code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def setpassword(self, password):
"""Sets the password to use when extracting.
"""
self._password = password
if self._file_parser:
if self._file_parser.has_header_encryption():
self._file_parser = None
if not self._file_parser:
self._parse()
else:
self._file_parser.setpassword(self._password)
|
def function[setpassword, parameter[self, password]]:
constant[Sets the password to use when extracting.
]
name[self]._password assign[=] name[password]
if name[self]._file_parser begin[:]
if call[name[self]._file_parser.has_header_encryption, parameter[]] begin[:]
name[self]._file_parser assign[=] constant[None]
if <ast.UnaryOp object at 0x7da18f813940> begin[:]
call[name[self]._parse, parameter[]]
|
keyword[def] identifier[setpassword] ( identifier[self] , identifier[password] ):
literal[string]
identifier[self] . identifier[_password] = identifier[password]
keyword[if] identifier[self] . identifier[_file_parser] :
keyword[if] identifier[self] . identifier[_file_parser] . identifier[has_header_encryption] ():
identifier[self] . identifier[_file_parser] = keyword[None]
keyword[if] keyword[not] identifier[self] . identifier[_file_parser] :
identifier[self] . identifier[_parse] ()
keyword[else] :
identifier[self] . identifier[_file_parser] . identifier[setpassword] ( identifier[self] . identifier[_password] )
|
def setpassword(self, password):
"""Sets the password to use when extracting.
"""
self._password = password
if self._file_parser:
if self._file_parser.has_header_encryption():
self._file_parser = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not self._file_parser:
self._parse() # depends on [control=['if'], data=[]]
else:
self._file_parser.setpassword(self._password)
|
def get_cache_path(self, archive_name, names=()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later.
"""
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
try:
_bypass_ensure_directory(target_path)
except:
self.extraction_error()
self.cached_files[target_path] = 1
return target_path
|
def function[get_cache_path, parameter[self, archive_name, names]]:
constant[Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later.
]
variable[extract_path] assign[=] <ast.BoolOp object at 0x7da18fe918d0>
variable[target_path] assign[=] call[name[os].path.join, parameter[name[extract_path], binary_operation[name[archive_name] + constant[-tmp]], <ast.Starred object at 0x7da18fe91e40>]]
<ast.Try object at 0x7da18fe904f0>
call[name[self].cached_files][name[target_path]] assign[=] constant[1]
return[name[target_path]]
|
keyword[def] identifier[get_cache_path] ( identifier[self] , identifier[archive_name] , identifier[names] =()):
literal[string]
identifier[extract_path] = identifier[self] . identifier[extraction_path] keyword[or] identifier[get_default_cache] ()
identifier[target_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[extract_path] , identifier[archive_name] + literal[string] ,* identifier[names] )
keyword[try] :
identifier[_bypass_ensure_directory] ( identifier[target_path] )
keyword[except] :
identifier[self] . identifier[extraction_error] ()
identifier[self] . identifier[cached_files] [ identifier[target_path] ]= literal[int]
keyword[return] identifier[target_path]
|
def get_cache_path(self, archive_name, names=()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later.
"""
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
try:
_bypass_ensure_directory(target_path) # depends on [control=['try'], data=[]]
except:
self.extraction_error() # depends on [control=['except'], data=[]]
self.cached_files[target_path] = 1
return target_path
|
def setop(args):
"""
%prog setop "fileA & fileB" > newfile
Perform set operations, except on files. The files (fileA and fileB) contain
list of ids. The operator is one of the four:
|: union (elements found in either file)
&: intersection (elements found in both)
-: difference (elements in fileA but not in fileB)
^: symmetric difference (elementes found in either set but not both)
Please quote the argument to avoid shell interpreting | and &.
"""
from jcvi.utils.natsort import natsorted
p = OptionParser(setop.__doc__)
p.add_option("--column", default=0, type="int",
help="The column to extract, 0-based, -1 to disable [default: %default]")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
statement, = args
fa, op, fb = statement.split()
assert op in ('|', '&', '-', '^')
column = opts.column
fa = SetFile(fa, column=column)
fb = SetFile(fb, column=column)
if op == '|':
t = fa | fb
elif op == '&':
t = fa & fb
elif op == '-':
t = fa - fb
elif op == '^':
t = fa ^ fb
for x in natsorted(t):
print(x)
|
def function[setop, parameter[args]]:
constant[
%prog setop "fileA & fileB" > newfile
Perform set operations, except on files. The files (fileA and fileB) contain
list of ids. The operator is one of the four:
|: union (elements found in either file)
&: intersection (elements found in both)
-: difference (elements in fileA but not in fileB)
^: symmetric difference (elementes found in either set but not both)
Please quote the argument to avoid shell interpreting | and &.
]
from relative_module[jcvi.utils.natsort] import module[natsorted]
variable[p] assign[=] call[name[OptionParser], parameter[name[setop].__doc__]]
call[name[p].add_option, parameter[constant[--column]]]
<ast.Tuple object at 0x7da20e963550> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da20e960f40>]]
<ast.Tuple object at 0x7da20e9610f0> assign[=] name[args]
<ast.Tuple object at 0x7da20e961ed0> assign[=] call[name[statement].split, parameter[]]
assert[compare[name[op] in tuple[[<ast.Constant object at 0x7da20e963d60>, <ast.Constant object at 0x7da20e960ac0>, <ast.Constant object at 0x7da20e9609d0>, <ast.Constant object at 0x7da20e962ef0>]]]]
variable[column] assign[=] name[opts].column
variable[fa] assign[=] call[name[SetFile], parameter[name[fa]]]
variable[fb] assign[=] call[name[SetFile], parameter[name[fb]]]
if compare[name[op] equal[==] constant[|]] begin[:]
variable[t] assign[=] binary_operation[name[fa] <ast.BitOr object at 0x7da2590d6aa0> name[fb]]
for taget[name[x]] in starred[call[name[natsorted], parameter[name[t]]]] begin[:]
call[name[print], parameter[name[x]]]
|
keyword[def] identifier[setop] ( identifier[args] ):
literal[string]
keyword[from] identifier[jcvi] . identifier[utils] . identifier[natsort] keyword[import] identifier[natsorted]
identifier[p] = identifier[OptionParser] ( identifier[setop] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[int] , identifier[type] = literal[string] ,
identifier[help] = literal[string] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[statement] ,= identifier[args]
identifier[fa] , identifier[op] , identifier[fb] = identifier[statement] . identifier[split] ()
keyword[assert] identifier[op] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] )
identifier[column] = identifier[opts] . identifier[column]
identifier[fa] = identifier[SetFile] ( identifier[fa] , identifier[column] = identifier[column] )
identifier[fb] = identifier[SetFile] ( identifier[fb] , identifier[column] = identifier[column] )
keyword[if] identifier[op] == literal[string] :
identifier[t] = identifier[fa] | identifier[fb]
keyword[elif] identifier[op] == literal[string] :
identifier[t] = identifier[fa] & identifier[fb]
keyword[elif] identifier[op] == literal[string] :
identifier[t] = identifier[fa] - identifier[fb]
keyword[elif] identifier[op] == literal[string] :
identifier[t] = identifier[fa] ^ identifier[fb]
keyword[for] identifier[x] keyword[in] identifier[natsorted] ( identifier[t] ):
identifier[print] ( identifier[x] )
|
def setop(args):
"""
%prog setop "fileA & fileB" > newfile
Perform set operations, except on files. The files (fileA and fileB) contain
list of ids. The operator is one of the four:
|: union (elements found in either file)
&: intersection (elements found in both)
-: difference (elements in fileA but not in fileB)
^: symmetric difference (elementes found in either set but not both)
Please quote the argument to avoid shell interpreting | and &.
"""
from jcvi.utils.natsort import natsorted
p = OptionParser(setop.__doc__)
p.add_option('--column', default=0, type='int', help='The column to extract, 0-based, -1 to disable [default: %default]')
(opts, args) = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(statement,) = args
(fa, op, fb) = statement.split()
assert op in ('|', '&', '-', '^')
column = opts.column
fa = SetFile(fa, column=column)
fb = SetFile(fb, column=column)
if op == '|':
t = fa | fb # depends on [control=['if'], data=[]]
elif op == '&':
t = fa & fb # depends on [control=['if'], data=[]]
elif op == '-':
t = fa - fb # depends on [control=['if'], data=[]]
elif op == '^':
t = fa ^ fb # depends on [control=['if'], data=[]]
for x in natsorted(t):
print(x) # depends on [control=['for'], data=['x']]
|
def close(self):
"""Close the plot and release its memory.
"""
from matplotlib.pyplot import close
for ax in self.axes[::-1]:
# avoid matplotlib/matplotlib#9970
ax.set_xscale('linear')
ax.set_yscale('linear')
# clear the axes
ax.cla()
# close the figure
close(self)
|
def function[close, parameter[self]]:
constant[Close the plot and release its memory.
]
from relative_module[matplotlib.pyplot] import module[close]
for taget[name[ax]] in starred[call[name[self].axes][<ast.Slice object at 0x7da1b0608370>]] begin[:]
call[name[ax].set_xscale, parameter[constant[linear]]]
call[name[ax].set_yscale, parameter[constant[linear]]]
call[name[ax].cla, parameter[]]
call[name[close], parameter[name[self]]]
|
keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[from] identifier[matplotlib] . identifier[pyplot] keyword[import] identifier[close]
keyword[for] identifier[ax] keyword[in] identifier[self] . identifier[axes] [::- literal[int] ]:
identifier[ax] . identifier[set_xscale] ( literal[string] )
identifier[ax] . identifier[set_yscale] ( literal[string] )
identifier[ax] . identifier[cla] ()
identifier[close] ( identifier[self] )
|
def close(self):
"""Close the plot and release its memory.
"""
from matplotlib.pyplot import close
for ax in self.axes[::-1]:
# avoid matplotlib/matplotlib#9970
ax.set_xscale('linear')
ax.set_yscale('linear')
# clear the axes
ax.cla() # depends on [control=['for'], data=['ax']]
# close the figure
close(self)
|
def _is_old_database(db_dir, args):
"""Check for old database versions, supported in snpEff 4.1.
"""
snpeff_version = effects.snpeff_version(args)
if LooseVersion(snpeff_version) >= LooseVersion("4.1"):
pred_file = os.path.join(db_dir, "snpEffectPredictor.bin")
if not utils.file_exists(pred_file):
return True
with utils.open_gzipsafe(pred_file, is_gz=True) as in_handle:
version_info = in_handle.readline().strip().split("\t")
program, version = version_info[:2]
if not program.lower() == "snpeff" or LooseVersion(snpeff_version) > LooseVersion(version):
return True
return False
|
def function[_is_old_database, parameter[db_dir, args]]:
constant[Check for old database versions, supported in snpEff 4.1.
]
variable[snpeff_version] assign[=] call[name[effects].snpeff_version, parameter[name[args]]]
if compare[call[name[LooseVersion], parameter[name[snpeff_version]]] greater_or_equal[>=] call[name[LooseVersion], parameter[constant[4.1]]]] begin[:]
variable[pred_file] assign[=] call[name[os].path.join, parameter[name[db_dir], constant[snpEffectPredictor.bin]]]
if <ast.UnaryOp object at 0x7da1b19d9510> begin[:]
return[constant[True]]
with call[name[utils].open_gzipsafe, parameter[name[pred_file]]] begin[:]
variable[version_info] assign[=] call[call[call[name[in_handle].readline, parameter[]].strip, parameter[]].split, parameter[constant[ ]]]
<ast.Tuple object at 0x7da1b19bbc40> assign[=] call[name[version_info]][<ast.Slice object at 0x7da1b19b90c0>]
if <ast.BoolOp object at 0x7da1b19babc0> begin[:]
return[constant[True]]
return[constant[False]]
|
keyword[def] identifier[_is_old_database] ( identifier[db_dir] , identifier[args] ):
literal[string]
identifier[snpeff_version] = identifier[effects] . identifier[snpeff_version] ( identifier[args] )
keyword[if] identifier[LooseVersion] ( identifier[snpeff_version] )>= identifier[LooseVersion] ( literal[string] ):
identifier[pred_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[db_dir] , literal[string] )
keyword[if] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[pred_file] ):
keyword[return] keyword[True]
keyword[with] identifier[utils] . identifier[open_gzipsafe] ( identifier[pred_file] , identifier[is_gz] = keyword[True] ) keyword[as] identifier[in_handle] :
identifier[version_info] = identifier[in_handle] . identifier[readline] (). identifier[strip] (). identifier[split] ( literal[string] )
identifier[program] , identifier[version] = identifier[version_info] [: literal[int] ]
keyword[if] keyword[not] identifier[program] . identifier[lower] ()== literal[string] keyword[or] identifier[LooseVersion] ( identifier[snpeff_version] )> identifier[LooseVersion] ( identifier[version] ):
keyword[return] keyword[True]
keyword[return] keyword[False]
|
def _is_old_database(db_dir, args):
"""Check for old database versions, supported in snpEff 4.1.
"""
snpeff_version = effects.snpeff_version(args)
if LooseVersion(snpeff_version) >= LooseVersion('4.1'):
pred_file = os.path.join(db_dir, 'snpEffectPredictor.bin')
if not utils.file_exists(pred_file):
return True # depends on [control=['if'], data=[]]
with utils.open_gzipsafe(pred_file, is_gz=True) as in_handle:
version_info = in_handle.readline().strip().split('\t') # depends on [control=['with'], data=['in_handle']]
(program, version) = version_info[:2]
if not program.lower() == 'snpeff' or LooseVersion(snpeff_version) > LooseVersion(version):
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return False
|
def get_for_targets(self, targets):
"""Gets the classpath products for the given targets.
Products are returned in order, respecting target excludes.
:param targets: The targets to lookup classpath products for.
:returns: The ordered (conf, path) tuples, with paths being either classfile directories or
jars.
:rtype: list of (string, string)
"""
cp_entries = self.get_classpath_entries_for_targets(targets)
return [(conf, cp_entry.path) for conf, cp_entry in cp_entries]
|
def function[get_for_targets, parameter[self, targets]]:
constant[Gets the classpath products for the given targets.
Products are returned in order, respecting target excludes.
:param targets: The targets to lookup classpath products for.
:returns: The ordered (conf, path) tuples, with paths being either classfile directories or
jars.
:rtype: list of (string, string)
]
variable[cp_entries] assign[=] call[name[self].get_classpath_entries_for_targets, parameter[name[targets]]]
return[<ast.ListComp object at 0x7da1b22a67a0>]
|
keyword[def] identifier[get_for_targets] ( identifier[self] , identifier[targets] ):
literal[string]
identifier[cp_entries] = identifier[self] . identifier[get_classpath_entries_for_targets] ( identifier[targets] )
keyword[return] [( identifier[conf] , identifier[cp_entry] . identifier[path] ) keyword[for] identifier[conf] , identifier[cp_entry] keyword[in] identifier[cp_entries] ]
|
def get_for_targets(self, targets):
"""Gets the classpath products for the given targets.
Products are returned in order, respecting target excludes.
:param targets: The targets to lookup classpath products for.
:returns: The ordered (conf, path) tuples, with paths being either classfile directories or
jars.
:rtype: list of (string, string)
"""
cp_entries = self.get_classpath_entries_for_targets(targets)
return [(conf, cp_entry.path) for (conf, cp_entry) in cp_entries]
|
def load_file_or_hdu(filename):
"""
Load a file from disk and return an HDUList
If filename is already an HDUList return that instead
Parameters
----------
filename : str or HDUList
File or HDU to be loaded
Returns
-------
hdulist : HDUList
"""
if isinstance(filename, fits.HDUList):
hdulist = filename
else:
hdulist = fits.open(filename, ignore_missing_end=True)
return hdulist
|
def function[load_file_or_hdu, parameter[filename]]:
constant[
Load a file from disk and return an HDUList
If filename is already an HDUList return that instead
Parameters
----------
filename : str or HDUList
File or HDU to be loaded
Returns
-------
hdulist : HDUList
]
if call[name[isinstance], parameter[name[filename], name[fits].HDUList]] begin[:]
variable[hdulist] assign[=] name[filename]
return[name[hdulist]]
|
keyword[def] identifier[load_file_or_hdu] ( identifier[filename] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[filename] , identifier[fits] . identifier[HDUList] ):
identifier[hdulist] = identifier[filename]
keyword[else] :
identifier[hdulist] = identifier[fits] . identifier[open] ( identifier[filename] , identifier[ignore_missing_end] = keyword[True] )
keyword[return] identifier[hdulist]
|
def load_file_or_hdu(filename):
"""
Load a file from disk and return an HDUList
If filename is already an HDUList return that instead
Parameters
----------
filename : str or HDUList
File or HDU to be loaded
Returns
-------
hdulist : HDUList
"""
if isinstance(filename, fits.HDUList):
hdulist = filename # depends on [control=['if'], data=[]]
else:
hdulist = fits.open(filename, ignore_missing_end=True)
return hdulist
|
def dag_paused(dag_id, paused):
"""(Un)pauses a dag"""
DagModel = models.DagModel
with create_session() as session:
orm_dag = (
session.query(DagModel)
.filter(DagModel.dag_id == dag_id).first()
)
if paused == 'true':
orm_dag.is_paused = True
else:
orm_dag.is_paused = False
session.merge(orm_dag)
session.commit()
return jsonify({'response': 'ok'})
|
def function[dag_paused, parameter[dag_id, paused]]:
constant[(Un)pauses a dag]
variable[DagModel] assign[=] name[models].DagModel
with call[name[create_session], parameter[]] begin[:]
variable[orm_dag] assign[=] call[call[call[name[session].query, parameter[name[DagModel]]].filter, parameter[compare[name[DagModel].dag_id equal[==] name[dag_id]]]].first, parameter[]]
if compare[name[paused] equal[==] constant[true]] begin[:]
name[orm_dag].is_paused assign[=] constant[True]
call[name[session].merge, parameter[name[orm_dag]]]
call[name[session].commit, parameter[]]
return[call[name[jsonify], parameter[dictionary[[<ast.Constant object at 0x7da1b05bdc90>], [<ast.Constant object at 0x7da1b05bc730>]]]]]
|
keyword[def] identifier[dag_paused] ( identifier[dag_id] , identifier[paused] ):
literal[string]
identifier[DagModel] = identifier[models] . identifier[DagModel]
keyword[with] identifier[create_session] () keyword[as] identifier[session] :
identifier[orm_dag] =(
identifier[session] . identifier[query] ( identifier[DagModel] )
. identifier[filter] ( identifier[DagModel] . identifier[dag_id] == identifier[dag_id] ). identifier[first] ()
)
keyword[if] identifier[paused] == literal[string] :
identifier[orm_dag] . identifier[is_paused] = keyword[True]
keyword[else] :
identifier[orm_dag] . identifier[is_paused] = keyword[False]
identifier[session] . identifier[merge] ( identifier[orm_dag] )
identifier[session] . identifier[commit] ()
keyword[return] identifier[jsonify] ({ literal[string] : literal[string] })
|
def dag_paused(dag_id, paused):
"""(Un)pauses a dag"""
DagModel = models.DagModel
with create_session() as session:
orm_dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).first()
if paused == 'true':
orm_dag.is_paused = True # depends on [control=['if'], data=[]]
else:
orm_dag.is_paused = False
session.merge(orm_dag)
session.commit() # depends on [control=['with'], data=['session']]
return jsonify({'response': 'ok'})
|
def _setup_evp_encrypt_decrypt(cipher, data):
"""
Creates an EVP_CIPHER pointer object and determines the buffer size
necessary for the parameter specified.
:param evp_cipher_ctx:
An EVP_CIPHER_CTX pointer
:param cipher:
A unicode string of "aes128", "aes192", "aes256", "des",
"tripledes_2key", "tripledes_3key", "rc2", "rc4"
:param key:
The key byte string
:param data:
The plaintext or ciphertext as a byte string
:param padding:
If padding is to be used
:return:
A 2-element tuple with the first element being an EVP_CIPHER pointer
and the second being an integer that is the required buffer size
"""
evp_cipher = {
'aes128': libcrypto.EVP_aes_128_cbc,
'aes192': libcrypto.EVP_aes_192_cbc,
'aes256': libcrypto.EVP_aes_256_cbc,
'rc2': libcrypto.EVP_rc2_cbc,
'rc4': libcrypto.EVP_rc4,
'des': libcrypto.EVP_des_cbc,
'tripledes_2key': libcrypto.EVP_des_ede_cbc,
'tripledes_3key': libcrypto.EVP_des_ede3_cbc,
}[cipher]()
if cipher == 'rc4':
buffer_size = len(data)
else:
block_size = {
'aes128': 16,
'aes192': 16,
'aes256': 16,
'rc2': 8,
'des': 8,
'tripledes_2key': 8,
'tripledes_3key': 8,
}[cipher]
buffer_size = block_size * int(math.ceil(len(data) / block_size))
return (evp_cipher, buffer_size)
|
def function[_setup_evp_encrypt_decrypt, parameter[cipher, data]]:
constant[
Creates an EVP_CIPHER pointer object and determines the buffer size
necessary for the parameter specified.
:param evp_cipher_ctx:
An EVP_CIPHER_CTX pointer
:param cipher:
A unicode string of "aes128", "aes192", "aes256", "des",
"tripledes_2key", "tripledes_3key", "rc2", "rc4"
:param key:
The key byte string
:param data:
The plaintext or ciphertext as a byte string
:param padding:
If padding is to be used
:return:
A 2-element tuple with the first element being an EVP_CIPHER pointer
and the second being an integer that is the required buffer size
]
variable[evp_cipher] assign[=] call[call[dictionary[[<ast.Constant object at 0x7da1b000fc10>, <ast.Constant object at 0x7da1b000d780>, <ast.Constant object at 0x7da1b000f2b0>, <ast.Constant object at 0x7da1b000c640>, <ast.Constant object at 0x7da1b000fd30>, <ast.Constant object at 0x7da1b000c1f0>, <ast.Constant object at 0x7da1b000dfc0>, <ast.Constant object at 0x7da1b000fee0>], [<ast.Attribute object at 0x7da1b000cf40>, <ast.Attribute object at 0x7da1b000c220>, <ast.Attribute object at 0x7da1b000e680>, <ast.Attribute object at 0x7da1b000f250>, <ast.Attribute object at 0x7da1b000d9c0>, <ast.Attribute object at 0x7da1b000cf70>, <ast.Attribute object at 0x7da1b000e7a0>, <ast.Attribute object at 0x7da1b000ca30>]]][name[cipher]], parameter[]]
if compare[name[cipher] equal[==] constant[rc4]] begin[:]
variable[buffer_size] assign[=] call[name[len], parameter[name[data]]]
return[tuple[[<ast.Name object at 0x7da1b00d60e0>, <ast.Name object at 0x7da1b00d7400>]]]
|
keyword[def] identifier[_setup_evp_encrypt_decrypt] ( identifier[cipher] , identifier[data] ):
literal[string]
identifier[evp_cipher] ={
literal[string] : identifier[libcrypto] . identifier[EVP_aes_128_cbc] ,
literal[string] : identifier[libcrypto] . identifier[EVP_aes_192_cbc] ,
literal[string] : identifier[libcrypto] . identifier[EVP_aes_256_cbc] ,
literal[string] : identifier[libcrypto] . identifier[EVP_rc2_cbc] ,
literal[string] : identifier[libcrypto] . identifier[EVP_rc4] ,
literal[string] : identifier[libcrypto] . identifier[EVP_des_cbc] ,
literal[string] : identifier[libcrypto] . identifier[EVP_des_ede_cbc] ,
literal[string] : identifier[libcrypto] . identifier[EVP_des_ede3_cbc] ,
}[ identifier[cipher] ]()
keyword[if] identifier[cipher] == literal[string] :
identifier[buffer_size] = identifier[len] ( identifier[data] )
keyword[else] :
identifier[block_size] ={
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
}[ identifier[cipher] ]
identifier[buffer_size] = identifier[block_size] * identifier[int] ( identifier[math] . identifier[ceil] ( identifier[len] ( identifier[data] )/ identifier[block_size] ))
keyword[return] ( identifier[evp_cipher] , identifier[buffer_size] )
|
def _setup_evp_encrypt_decrypt(cipher, data):
"""
Creates an EVP_CIPHER pointer object and determines the buffer size
necessary for the parameter specified.
:param evp_cipher_ctx:
An EVP_CIPHER_CTX pointer
:param cipher:
A unicode string of "aes128", "aes192", "aes256", "des",
"tripledes_2key", "tripledes_3key", "rc2", "rc4"
:param key:
The key byte string
:param data:
The plaintext or ciphertext as a byte string
:param padding:
If padding is to be used
:return:
A 2-element tuple with the first element being an EVP_CIPHER pointer
and the second being an integer that is the required buffer size
"""
evp_cipher = {'aes128': libcrypto.EVP_aes_128_cbc, 'aes192': libcrypto.EVP_aes_192_cbc, 'aes256': libcrypto.EVP_aes_256_cbc, 'rc2': libcrypto.EVP_rc2_cbc, 'rc4': libcrypto.EVP_rc4, 'des': libcrypto.EVP_des_cbc, 'tripledes_2key': libcrypto.EVP_des_ede_cbc, 'tripledes_3key': libcrypto.EVP_des_ede3_cbc}[cipher]()
if cipher == 'rc4':
buffer_size = len(data) # depends on [control=['if'], data=[]]
else:
block_size = {'aes128': 16, 'aes192': 16, 'aes256': 16, 'rc2': 8, 'des': 8, 'tripledes_2key': 8, 'tripledes_3key': 8}[cipher]
buffer_size = block_size * int(math.ceil(len(data) / block_size))
return (evp_cipher, buffer_size)
|
def files_log_graph(self, stream):
''' Build up a graph (nodes and edges from a Bro dns.log) '''
for row in list(stream):
# dataframes['files_log'][['md5','mime_type','missing_bytes','rx_hosts','source','tx_hosts']]
# If the mime-type is interesting add the uri and the host->uri->host relationships
if row['mime_type'] not in self.exclude_mime_types:
# Check for weird conditions
if (row['total_bytes'] == '-'):
continue
if ('-' in row['md5']):
continue
# Check for missing bytes and small file
if row['missing_bytes']:
labels = ['missing', 'file']
elif row['total_bytes'] < 50*1024:
labels = ['small','file']
else:
labels = ['file']
# Make the file node name kewl
name = '%6s %s %.0f-KB' % (row['md5'][:6], row['mime_type'], row['total_bytes']/1024.0)
if row['missing_bytes']:
name += '*'
name = name.replace('application/','')
# Add the file node
self.add_node(row['md5'], name, labels)
# Add the tx_host
self.add_node(row['tx_hosts'], row['tx_hosts'], ['host'])
# Add the file->tx_host relationship
self.add_rel(row['tx_hosts'], row['md5'], 'file')
|
def function[files_log_graph, parameter[self, stream]]:
constant[ Build up a graph (nodes and edges from a Bro dns.log) ]
for taget[name[row]] in starred[call[name[list], parameter[name[stream]]]] begin[:]
if compare[call[name[row]][constant[mime_type]] <ast.NotIn object at 0x7da2590d7190> name[self].exclude_mime_types] begin[:]
if compare[call[name[row]][constant[total_bytes]] equal[==] constant[-]] begin[:]
continue
if compare[constant[-] in call[name[row]][constant[md5]]] begin[:]
continue
if call[name[row]][constant[missing_bytes]] begin[:]
variable[labels] assign[=] list[[<ast.Constant object at 0x7da18dc04dc0>, <ast.Constant object at 0x7da18dc071c0>]]
variable[name] assign[=] binary_operation[constant[%6s %s %.0f-KB] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da18dc07dc0>, <ast.Subscript object at 0x7da18dc05e10>, <ast.BinOp object at 0x7da18dc07040>]]]
if call[name[row]][constant[missing_bytes]] begin[:]
<ast.AugAssign object at 0x7da18dc05ba0>
variable[name] assign[=] call[name[name].replace, parameter[constant[application/], constant[]]]
call[name[self].add_node, parameter[call[name[row]][constant[md5]], name[name], name[labels]]]
call[name[self].add_node, parameter[call[name[row]][constant[tx_hosts]], call[name[row]][constant[tx_hosts]], list[[<ast.Constant object at 0x7da18dc05fc0>]]]]
call[name[self].add_rel, parameter[call[name[row]][constant[tx_hosts]], call[name[row]][constant[md5]], constant[file]]]
|
keyword[def] identifier[files_log_graph] ( identifier[self] , identifier[stream] ):
literal[string]
keyword[for] identifier[row] keyword[in] identifier[list] ( identifier[stream] ):
keyword[if] identifier[row] [ literal[string] ] keyword[not] keyword[in] identifier[self] . identifier[exclude_mime_types] :
keyword[if] ( identifier[row] [ literal[string] ]== literal[string] ):
keyword[continue]
keyword[if] ( literal[string] keyword[in] identifier[row] [ literal[string] ]):
keyword[continue]
keyword[if] identifier[row] [ literal[string] ]:
identifier[labels] =[ literal[string] , literal[string] ]
keyword[elif] identifier[row] [ literal[string] ]< literal[int] * literal[int] :
identifier[labels] =[ literal[string] , literal[string] ]
keyword[else] :
identifier[labels] =[ literal[string] ]
identifier[name] = literal[string] %( identifier[row] [ literal[string] ][: literal[int] ], identifier[row] [ literal[string] ], identifier[row] [ literal[string] ]/ literal[int] )
keyword[if] identifier[row] [ literal[string] ]:
identifier[name] += literal[string]
identifier[name] = identifier[name] . identifier[replace] ( literal[string] , literal[string] )
identifier[self] . identifier[add_node] ( identifier[row] [ literal[string] ], identifier[name] , identifier[labels] )
identifier[self] . identifier[add_node] ( identifier[row] [ literal[string] ], identifier[row] [ literal[string] ],[ literal[string] ])
identifier[self] . identifier[add_rel] ( identifier[row] [ literal[string] ], identifier[row] [ literal[string] ], literal[string] )
|
def files_log_graph(self, stream):
""" Build up a graph (nodes and edges from a Bro dns.log) """
for row in list(stream):
# dataframes['files_log'][['md5','mime_type','missing_bytes','rx_hosts','source','tx_hosts']]
# If the mime-type is interesting add the uri and the host->uri->host relationships
if row['mime_type'] not in self.exclude_mime_types:
# Check for weird conditions
if row['total_bytes'] == '-':
continue # depends on [control=['if'], data=[]]
if '-' in row['md5']:
continue # depends on [control=['if'], data=[]]
# Check for missing bytes and small file
if row['missing_bytes']:
labels = ['missing', 'file'] # depends on [control=['if'], data=[]]
elif row['total_bytes'] < 50 * 1024:
labels = ['small', 'file'] # depends on [control=['if'], data=[]]
else:
labels = ['file']
# Make the file node name kewl
name = '%6s %s %.0f-KB' % (row['md5'][:6], row['mime_type'], row['total_bytes'] / 1024.0)
if row['missing_bytes']:
name += '*' # depends on [control=['if'], data=[]]
name = name.replace('application/', '')
# Add the file node
self.add_node(row['md5'], name, labels)
# Add the tx_host
self.add_node(row['tx_hosts'], row['tx_hosts'], ['host'])
# Add the file->tx_host relationship
self.add_rel(row['tx_hosts'], row['md5'], 'file') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']]
|
def match_abstract_str(cls):
"""
For a given abstract or match rule meta-class returns a nice string
representation for the body.
"""
def r(s):
if s.root:
if s in visited or s.rule_name in ALL_TYPE_NAMES or \
(hasattr(s, '_tx_class') and
s._tx_class._tx_type is not RULE_MATCH):
return s.rule_name
visited.add(s)
if isinstance(s, Match):
result = text(s)
elif isinstance(s, OrderedChoice):
result = "|".join([r(x) for x in s.nodes])
elif isinstance(s, Sequence):
result = " ".join([r(x) for x in s.nodes])
elif isinstance(s, ZeroOrMore):
result = "({})*".format(r(s.nodes[0]))
elif isinstance(s, OneOrMore):
result = "({})+".format(r(s.nodes[0]))
elif isinstance(s, Optional):
result = "{}?".format(r(s.nodes[0]))
elif isinstance(s, SyntaxPredicate):
result = ""
return "{}{}".format(result, "-" if s.suppress else "")
mstr = ""
if cls.__name__ not in ALL_TYPE_NAMES and \
not (cls._tx_type is RULE_ABSTRACT and
cls.__name__ != cls._tx_peg_rule.rule_name):
e = cls._tx_peg_rule
visited = set()
if not isinstance(e, Match):
visited.add(e)
if isinstance(e, OrderedChoice):
mstr = "|".join([r(x) for x in e.nodes
if x.rule_name in BASE_TYPE_NAMES or not x.root])
elif isinstance(e, Sequence):
mstr = " ".join([r(x) for x in e.nodes])
else:
mstr = r(e)
mstr = dot_escape(mstr)
return mstr
|
def function[match_abstract_str, parameter[cls]]:
constant[
For a given abstract or match rule meta-class returns a nice string
representation for the body.
]
def function[r, parameter[s]]:
if name[s].root begin[:]
if <ast.BoolOp object at 0x7da20e961d80> begin[:]
return[name[s].rule_name]
call[name[visited].add, parameter[name[s]]]
if call[name[isinstance], parameter[name[s], name[Match]]] begin[:]
variable[result] assign[=] call[name[text], parameter[name[s]]]
return[call[constant[{}{}].format, parameter[name[result], <ast.IfExp object at 0x7da18fe90070>]]]
variable[mstr] assign[=] constant[]
if <ast.BoolOp object at 0x7da18fe913c0> begin[:]
variable[e] assign[=] name[cls]._tx_peg_rule
variable[visited] assign[=] call[name[set], parameter[]]
if <ast.UnaryOp object at 0x7da18fe92b30> begin[:]
call[name[visited].add, parameter[name[e]]]
if call[name[isinstance], parameter[name[e], name[OrderedChoice]]] begin[:]
variable[mstr] assign[=] call[constant[|].join, parameter[<ast.ListComp object at 0x7da18fe92800>]]
variable[mstr] assign[=] call[name[dot_escape], parameter[name[mstr]]]
return[name[mstr]]
|
keyword[def] identifier[match_abstract_str] ( identifier[cls] ):
literal[string]
keyword[def] identifier[r] ( identifier[s] ):
keyword[if] identifier[s] . identifier[root] :
keyword[if] identifier[s] keyword[in] identifier[visited] keyword[or] identifier[s] . identifier[rule_name] keyword[in] identifier[ALL_TYPE_NAMES] keyword[or] ( identifier[hasattr] ( identifier[s] , literal[string] ) keyword[and]
identifier[s] . identifier[_tx_class] . identifier[_tx_type] keyword[is] keyword[not] identifier[RULE_MATCH] ):
keyword[return] identifier[s] . identifier[rule_name]
identifier[visited] . identifier[add] ( identifier[s] )
keyword[if] identifier[isinstance] ( identifier[s] , identifier[Match] ):
identifier[result] = identifier[text] ( identifier[s] )
keyword[elif] identifier[isinstance] ( identifier[s] , identifier[OrderedChoice] ):
identifier[result] = literal[string] . identifier[join] ([ identifier[r] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[s] . identifier[nodes] ])
keyword[elif] identifier[isinstance] ( identifier[s] , identifier[Sequence] ):
identifier[result] = literal[string] . identifier[join] ([ identifier[r] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[s] . identifier[nodes] ])
keyword[elif] identifier[isinstance] ( identifier[s] , identifier[ZeroOrMore] ):
identifier[result] = literal[string] . identifier[format] ( identifier[r] ( identifier[s] . identifier[nodes] [ literal[int] ]))
keyword[elif] identifier[isinstance] ( identifier[s] , identifier[OneOrMore] ):
identifier[result] = literal[string] . identifier[format] ( identifier[r] ( identifier[s] . identifier[nodes] [ literal[int] ]))
keyword[elif] identifier[isinstance] ( identifier[s] , identifier[Optional] ):
identifier[result] = literal[string] . identifier[format] ( identifier[r] ( identifier[s] . identifier[nodes] [ literal[int] ]))
keyword[elif] identifier[isinstance] ( identifier[s] , identifier[SyntaxPredicate] ):
identifier[result] = literal[string]
keyword[return] literal[string] . identifier[format] ( identifier[result] , literal[string] keyword[if] identifier[s] . identifier[suppress] keyword[else] literal[string] )
identifier[mstr] = literal[string]
keyword[if] identifier[cls] . identifier[__name__] keyword[not] keyword[in] identifier[ALL_TYPE_NAMES] keyword[and] keyword[not] ( identifier[cls] . identifier[_tx_type] keyword[is] identifier[RULE_ABSTRACT] keyword[and]
identifier[cls] . identifier[__name__] != identifier[cls] . identifier[_tx_peg_rule] . identifier[rule_name] ):
identifier[e] = identifier[cls] . identifier[_tx_peg_rule]
identifier[visited] = identifier[set] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[e] , identifier[Match] ):
identifier[visited] . identifier[add] ( identifier[e] )
keyword[if] identifier[isinstance] ( identifier[e] , identifier[OrderedChoice] ):
identifier[mstr] = literal[string] . identifier[join] ([ identifier[r] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[e] . identifier[nodes]
keyword[if] identifier[x] . identifier[rule_name] keyword[in] identifier[BASE_TYPE_NAMES] keyword[or] keyword[not] identifier[x] . identifier[root] ])
keyword[elif] identifier[isinstance] ( identifier[e] , identifier[Sequence] ):
identifier[mstr] = literal[string] . identifier[join] ([ identifier[r] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[e] . identifier[nodes] ])
keyword[else] :
identifier[mstr] = identifier[r] ( identifier[e] )
identifier[mstr] = identifier[dot_escape] ( identifier[mstr] )
keyword[return] identifier[mstr]
|
def match_abstract_str(cls):
"""
For a given abstract or match rule meta-class returns a nice string
representation for the body.
"""
def r(s):
if s.root:
if s in visited or s.rule_name in ALL_TYPE_NAMES or (hasattr(s, '_tx_class') and s._tx_class._tx_type is not RULE_MATCH):
return s.rule_name # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
visited.add(s)
if isinstance(s, Match):
result = text(s) # depends on [control=['if'], data=[]]
elif isinstance(s, OrderedChoice):
result = '|'.join([r(x) for x in s.nodes]) # depends on [control=['if'], data=[]]
elif isinstance(s, Sequence):
result = ' '.join([r(x) for x in s.nodes]) # depends on [control=['if'], data=[]]
elif isinstance(s, ZeroOrMore):
result = '({})*'.format(r(s.nodes[0])) # depends on [control=['if'], data=[]]
elif isinstance(s, OneOrMore):
result = '({})+'.format(r(s.nodes[0])) # depends on [control=['if'], data=[]]
elif isinstance(s, Optional):
result = '{}?'.format(r(s.nodes[0])) # depends on [control=['if'], data=[]]
elif isinstance(s, SyntaxPredicate):
result = '' # depends on [control=['if'], data=[]]
return '{}{}'.format(result, '-' if s.suppress else '')
mstr = ''
if cls.__name__ not in ALL_TYPE_NAMES and (not (cls._tx_type is RULE_ABSTRACT and cls.__name__ != cls._tx_peg_rule.rule_name)):
e = cls._tx_peg_rule
visited = set()
if not isinstance(e, Match):
visited.add(e) # depends on [control=['if'], data=[]]
if isinstance(e, OrderedChoice):
mstr = '|'.join([r(x) for x in e.nodes if x.rule_name in BASE_TYPE_NAMES or not x.root]) # depends on [control=['if'], data=[]]
elif isinstance(e, Sequence):
mstr = ' '.join([r(x) for x in e.nodes]) # depends on [control=['if'], data=[]]
else:
mstr = r(e)
mstr = dot_escape(mstr) # depends on [control=['if'], data=[]]
return mstr
|
def tidy_nlm_references(document):
"""Remove punctuation around references like brackets, commas, hyphens."""
def strip_preceding(text):
stext = text.rstrip()
if stext.endswith('[') or stext.endswith('('):
#log.debug('%s -> %s' % (text, stext[:-1]))
return stext[:-1]
return text
def strip_between(text):
stext = text.strip()
if stext in {',', '-', '\u2013', '\u2212'}:
#log.debug('%s -> %s' % (text, ''))
return ''
return text
def strip_following(text):
stext = text.lstrip()
if stext.startswith(']') or stext.startswith(')'):
#log.debug('%s -> %s' % (text, stext[1:]))
return stext[1:]
return text
for ref in document.xpath('.//xref[@ref-type="bibr"]'):
parent = ref.getparent()
previous = ref.getprevious()
next = ref.getnext()
if previous is None:
parent.text = strip_preceding(parent.text or '')
else:
previous.tail = strip_preceding(previous.tail or '')
if next is not None and next.tag == 'xref' and next.get('ref-type') == 'bibr':
ref.tail = strip_between(ref.tail or '')
ref.tail = strip_following(ref.tail or '')
return document
|
def function[tidy_nlm_references, parameter[document]]:
constant[Remove punctuation around references like brackets, commas, hyphens.]
def function[strip_preceding, parameter[text]]:
variable[stext] assign[=] call[name[text].rstrip, parameter[]]
if <ast.BoolOp object at 0x7da18fe90910> begin[:]
return[call[name[stext]][<ast.Slice object at 0x7da20e9572e0>]]
return[name[text]]
def function[strip_between, parameter[text]]:
variable[stext] assign[=] call[name[text].strip, parameter[]]
if compare[name[stext] in <ast.Set object at 0x7da1b152a110>] begin[:]
return[constant[]]
return[name[text]]
def function[strip_following, parameter[text]]:
variable[stext] assign[=] call[name[text].lstrip, parameter[]]
if <ast.BoolOp object at 0x7da1b15287f0> begin[:]
return[call[name[stext]][<ast.Slice object at 0x7da20c9920b0>]]
return[name[text]]
for taget[name[ref]] in starred[call[name[document].xpath, parameter[constant[.//xref[@ref-type="bibr"]]]]] begin[:]
variable[parent] assign[=] call[name[ref].getparent, parameter[]]
variable[previous] assign[=] call[name[ref].getprevious, parameter[]]
variable[next] assign[=] call[name[ref].getnext, parameter[]]
if compare[name[previous] is constant[None]] begin[:]
name[parent].text assign[=] call[name[strip_preceding], parameter[<ast.BoolOp object at 0x7da20c9905b0>]]
if <ast.BoolOp object at 0x7da20c993130> begin[:]
name[ref].tail assign[=] call[name[strip_between], parameter[<ast.BoolOp object at 0x7da20c9900a0>]]
name[ref].tail assign[=] call[name[strip_following], parameter[<ast.BoolOp object at 0x7da20c992f50>]]
return[name[document]]
|
keyword[def] identifier[tidy_nlm_references] ( identifier[document] ):
literal[string]
keyword[def] identifier[strip_preceding] ( identifier[text] ):
identifier[stext] = identifier[text] . identifier[rstrip] ()
keyword[if] identifier[stext] . identifier[endswith] ( literal[string] ) keyword[or] identifier[stext] . identifier[endswith] ( literal[string] ):
keyword[return] identifier[stext] [:- literal[int] ]
keyword[return] identifier[text]
keyword[def] identifier[strip_between] ( identifier[text] ):
identifier[stext] = identifier[text] . identifier[strip] ()
keyword[if] identifier[stext] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] }:
keyword[return] literal[string]
keyword[return] identifier[text]
keyword[def] identifier[strip_following] ( identifier[text] ):
identifier[stext] = identifier[text] . identifier[lstrip] ()
keyword[if] identifier[stext] . identifier[startswith] ( literal[string] ) keyword[or] identifier[stext] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[stext] [ literal[int] :]
keyword[return] identifier[text]
keyword[for] identifier[ref] keyword[in] identifier[document] . identifier[xpath] ( literal[string] ):
identifier[parent] = identifier[ref] . identifier[getparent] ()
identifier[previous] = identifier[ref] . identifier[getprevious] ()
identifier[next] = identifier[ref] . identifier[getnext] ()
keyword[if] identifier[previous] keyword[is] keyword[None] :
identifier[parent] . identifier[text] = identifier[strip_preceding] ( identifier[parent] . identifier[text] keyword[or] literal[string] )
keyword[else] :
identifier[previous] . identifier[tail] = identifier[strip_preceding] ( identifier[previous] . identifier[tail] keyword[or] literal[string] )
keyword[if] identifier[next] keyword[is] keyword[not] keyword[None] keyword[and] identifier[next] . identifier[tag] == literal[string] keyword[and] identifier[next] . identifier[get] ( literal[string] )== literal[string] :
identifier[ref] . identifier[tail] = identifier[strip_between] ( identifier[ref] . identifier[tail] keyword[or] literal[string] )
identifier[ref] . identifier[tail] = identifier[strip_following] ( identifier[ref] . identifier[tail] keyword[or] literal[string] )
keyword[return] identifier[document]
|
def tidy_nlm_references(document):
"""Remove punctuation around references like brackets, commas, hyphens."""
def strip_preceding(text):
stext = text.rstrip()
if stext.endswith('[') or stext.endswith('('):
#log.debug('%s -> %s' % (text, stext[:-1]))
return stext[:-1] # depends on [control=['if'], data=[]]
return text
def strip_between(text):
stext = text.strip()
if stext in {',', '-', '–', '−'}:
#log.debug('%s -> %s' % (text, ''))
return '' # depends on [control=['if'], data=[]]
return text
def strip_following(text):
stext = text.lstrip()
if stext.startswith(']') or stext.startswith(')'):
#log.debug('%s -> %s' % (text, stext[1:]))
return stext[1:] # depends on [control=['if'], data=[]]
return text
for ref in document.xpath('.//xref[@ref-type="bibr"]'):
parent = ref.getparent()
previous = ref.getprevious()
next = ref.getnext()
if previous is None:
parent.text = strip_preceding(parent.text or '') # depends on [control=['if'], data=[]]
else:
previous.tail = strip_preceding(previous.tail or '')
if next is not None and next.tag == 'xref' and (next.get('ref-type') == 'bibr'):
ref.tail = strip_between(ref.tail or '') # depends on [control=['if'], data=[]]
ref.tail = strip_following(ref.tail or '') # depends on [control=['for'], data=['ref']]
return document
|
def transfer(self, user):
"""Transfers app to given username's account."""
r = self._h._http_resource(
method='PUT',
resource=('apps', self.name),
data={'app[transfer_owner]': user}
)
return r.ok
|
def function[transfer, parameter[self, user]]:
constant[Transfers app to given username's account.]
variable[r] assign[=] call[name[self]._h._http_resource, parameter[]]
return[name[r].ok]
|
keyword[def] identifier[transfer] ( identifier[self] , identifier[user] ):
literal[string]
identifier[r] = identifier[self] . identifier[_h] . identifier[_http_resource] (
identifier[method] = literal[string] ,
identifier[resource] =( literal[string] , identifier[self] . identifier[name] ),
identifier[data] ={ literal[string] : identifier[user] }
)
keyword[return] identifier[r] . identifier[ok]
|
def transfer(self, user):
"""Transfers app to given username's account."""
r = self._h._http_resource(method='PUT', resource=('apps', self.name), data={'app[transfer_owner]': user})
return r.ok
|
def connect_ses(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ses.SESConnection`
:return: A connection to Amazon's SES
"""
from boto.ses import SESConnection
return SESConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
|
def function[connect_ses, parameter[aws_access_key_id, aws_secret_access_key]]:
constant[
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ses.SESConnection`
:return: A connection to Amazon's SES
]
from relative_module[boto.ses] import module[SESConnection]
return[call[name[SESConnection], parameter[name[aws_access_key_id], name[aws_secret_access_key]]]]
|
keyword[def] identifier[connect_ses] ( identifier[aws_access_key_id] = keyword[None] , identifier[aws_secret_access_key] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[from] identifier[boto] . identifier[ses] keyword[import] identifier[SESConnection]
keyword[return] identifier[SESConnection] ( identifier[aws_access_key_id] , identifier[aws_secret_access_key] ,** identifier[kwargs] )
|
def connect_ses(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ses.SESConnection`
:return: A connection to Amazon's SES
"""
from boto.ses import SESConnection
return SESConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
|
def play_tone(self, pin, tone_command, frequency, duration=None):
"""
This method will call the Tone library for the selected pin.
It requires FirmataPlus to be loaded onto the arduino
If the tone command is set to TONE_TONE, then the specified
tone will be played.
Else, if the tone command is TONE_NO_TONE, then any currently
playing tone will be disabled.
:param pin: Pin number
:param tone_command: Either TONE_TONE, or TONE_NO_TONE
:param frequency: Frequency of tone
:param duration: Duration of tone in milliseconds
:returns: No return value
"""
task = asyncio.ensure_future(self.core.play_tone(pin, tone_command,
frequency, duration))
self.loop.run_until_complete(task)
|
def function[play_tone, parameter[self, pin, tone_command, frequency, duration]]:
constant[
This method will call the Tone library for the selected pin.
It requires FirmataPlus to be loaded onto the arduino
If the tone command is set to TONE_TONE, then the specified
tone will be played.
Else, if the tone command is TONE_NO_TONE, then any currently
playing tone will be disabled.
:param pin: Pin number
:param tone_command: Either TONE_TONE, or TONE_NO_TONE
:param frequency: Frequency of tone
:param duration: Duration of tone in milliseconds
:returns: No return value
]
variable[task] assign[=] call[name[asyncio].ensure_future, parameter[call[name[self].core.play_tone, parameter[name[pin], name[tone_command], name[frequency], name[duration]]]]]
call[name[self].loop.run_until_complete, parameter[name[task]]]
|
keyword[def] identifier[play_tone] ( identifier[self] , identifier[pin] , identifier[tone_command] , identifier[frequency] , identifier[duration] = keyword[None] ):
literal[string]
identifier[task] = identifier[asyncio] . identifier[ensure_future] ( identifier[self] . identifier[core] . identifier[play_tone] ( identifier[pin] , identifier[tone_command] ,
identifier[frequency] , identifier[duration] ))
identifier[self] . identifier[loop] . identifier[run_until_complete] ( identifier[task] )
|
def play_tone(self, pin, tone_command, frequency, duration=None):
"""
This method will call the Tone library for the selected pin.
It requires FirmataPlus to be loaded onto the arduino
If the tone command is set to TONE_TONE, then the specified
tone will be played.
Else, if the tone command is TONE_NO_TONE, then any currently
playing tone will be disabled.
:param pin: Pin number
:param tone_command: Either TONE_TONE, or TONE_NO_TONE
:param frequency: Frequency of tone
:param duration: Duration of tone in milliseconds
:returns: No return value
"""
task = asyncio.ensure_future(self.core.play_tone(pin, tone_command, frequency, duration))
self.loop.run_until_complete(task)
|
def _new_name(method, old_name):
"""Return a method with a deprecation warning."""
# Looks suspiciously like a decorator, but isn't!
@wraps(method)
def _method(*args, **kwargs):
warnings.warn(
"method '{}' has been deprecated, please rename to '{}'".format(
old_name, method.__name__
),
DeprecationWarning,
)
return method(*args, **kwargs)
deprecated_msg = """
Note:
.. deprecated:: 2.2.0
Please use `~{}`
""".format(
method.__name__
)
if getattr(_method, "__doc__"):
_method.__doc__ += deprecated_msg
return _method
|
def function[_new_name, parameter[method, old_name]]:
constant[Return a method with a deprecation warning.]
def function[_method, parameter[]]:
call[name[warnings].warn, parameter[call[constant[method '{}' has been deprecated, please rename to '{}'].format, parameter[name[old_name], name[method].__name__]], name[DeprecationWarning]]]
return[call[name[method], parameter[<ast.Starred object at 0x7da1b16b1de0>]]]
variable[deprecated_msg] assign[=] call[constant[
Note:
.. deprecated:: 2.2.0
Please use `~{}`
].format, parameter[name[method].__name__]]
if call[name[getattr], parameter[name[_method], constant[__doc__]]] begin[:]
<ast.AugAssign object at 0x7da1b16b0670>
return[name[_method]]
|
keyword[def] identifier[_new_name] ( identifier[method] , identifier[old_name] ):
literal[string]
@ identifier[wraps] ( identifier[method] )
keyword[def] identifier[_method] (* identifier[args] ,** identifier[kwargs] ):
identifier[warnings] . identifier[warn] (
literal[string] . identifier[format] (
identifier[old_name] , identifier[method] . identifier[__name__]
),
identifier[DeprecationWarning] ,
)
keyword[return] identifier[method] (* identifier[args] ,** identifier[kwargs] )
identifier[deprecated_msg] = literal[string] . identifier[format] (
identifier[method] . identifier[__name__]
)
keyword[if] identifier[getattr] ( identifier[_method] , literal[string] ):
identifier[_method] . identifier[__doc__] += identifier[deprecated_msg]
keyword[return] identifier[_method]
|
def _new_name(method, old_name):
"""Return a method with a deprecation warning."""
# Looks suspiciously like a decorator, but isn't!
@wraps(method)
def _method(*args, **kwargs):
warnings.warn("method '{}' has been deprecated, please rename to '{}'".format(old_name, method.__name__), DeprecationWarning)
return method(*args, **kwargs)
deprecated_msg = '\n Note:\n .. deprecated:: 2.2.0\n Please use `~{}`\n'.format(method.__name__)
if getattr(_method, '__doc__'):
_method.__doc__ += deprecated_msg # depends on [control=['if'], data=[]]
return _method
|
def is_continuous(self):
"""Boolean denoting whether the data collection is continuous."""
if self._validated_a_period is True and \
len(self.values) == len(self.header.analysis_period.months_int):
return True
else:
return False
|
def function[is_continuous, parameter[self]]:
constant[Boolean denoting whether the data collection is continuous.]
if <ast.BoolOp object at 0x7da1b12ba890> begin[:]
return[constant[True]]
|
keyword[def] identifier[is_continuous] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_validated_a_period] keyword[is] keyword[True] keyword[and] identifier[len] ( identifier[self] . identifier[values] )== identifier[len] ( identifier[self] . identifier[header] . identifier[analysis_period] . identifier[months_int] ):
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False]
|
def is_continuous(self):
"""Boolean denoting whether the data collection is continuous."""
if self._validated_a_period is True and len(self.values) == len(self.header.analysis_period.months_int):
return True # depends on [control=['if'], data=[]]
else:
return False
|
def get_block_from_time(self, timestring, error_margin=10):
""" Estimate block number from given time
:param str timestring: String representing time
:param int error_margin: Estimate block number within this interval (in seconds)
"""
known_block = self.get_current_block()['block_num']
known_block_timestamp = self.block_timestamp(known_block)
timestring_timestamp = parse_time(timestring).timestamp()
delta = known_block_timestamp - timestring_timestamp
block_delta = delta / 3
guess_block = known_block - block_delta
guess_block_timestamp = self.block_timestamp(guess_block)
error = timestring_timestamp - guess_block_timestamp
while abs(error) > error_margin:
guess_block += error / 3
guess_block_timestamp = self.block_timestamp(guess_block)
error = timestring_timestamp - guess_block_timestamp
return int(guess_block)
|
def function[get_block_from_time, parameter[self, timestring, error_margin]]:
constant[ Estimate block number from given time
:param str timestring: String representing time
:param int error_margin: Estimate block number within this interval (in seconds)
]
variable[known_block] assign[=] call[call[name[self].get_current_block, parameter[]]][constant[block_num]]
variable[known_block_timestamp] assign[=] call[name[self].block_timestamp, parameter[name[known_block]]]
variable[timestring_timestamp] assign[=] call[call[name[parse_time], parameter[name[timestring]]].timestamp, parameter[]]
variable[delta] assign[=] binary_operation[name[known_block_timestamp] - name[timestring_timestamp]]
variable[block_delta] assign[=] binary_operation[name[delta] / constant[3]]
variable[guess_block] assign[=] binary_operation[name[known_block] - name[block_delta]]
variable[guess_block_timestamp] assign[=] call[name[self].block_timestamp, parameter[name[guess_block]]]
variable[error] assign[=] binary_operation[name[timestring_timestamp] - name[guess_block_timestamp]]
while compare[call[name[abs], parameter[name[error]]] greater[>] name[error_margin]] begin[:]
<ast.AugAssign object at 0x7da1b0879120>
variable[guess_block_timestamp] assign[=] call[name[self].block_timestamp, parameter[name[guess_block]]]
variable[error] assign[=] binary_operation[name[timestring_timestamp] - name[guess_block_timestamp]]
return[call[name[int], parameter[name[guess_block]]]]
|
keyword[def] identifier[get_block_from_time] ( identifier[self] , identifier[timestring] , identifier[error_margin] = literal[int] ):
literal[string]
identifier[known_block] = identifier[self] . identifier[get_current_block] ()[ literal[string] ]
identifier[known_block_timestamp] = identifier[self] . identifier[block_timestamp] ( identifier[known_block] )
identifier[timestring_timestamp] = identifier[parse_time] ( identifier[timestring] ). identifier[timestamp] ()
identifier[delta] = identifier[known_block_timestamp] - identifier[timestring_timestamp]
identifier[block_delta] = identifier[delta] / literal[int]
identifier[guess_block] = identifier[known_block] - identifier[block_delta]
identifier[guess_block_timestamp] = identifier[self] . identifier[block_timestamp] ( identifier[guess_block] )
identifier[error] = identifier[timestring_timestamp] - identifier[guess_block_timestamp]
keyword[while] identifier[abs] ( identifier[error] )> identifier[error_margin] :
identifier[guess_block] += identifier[error] / literal[int]
identifier[guess_block_timestamp] = identifier[self] . identifier[block_timestamp] ( identifier[guess_block] )
identifier[error] = identifier[timestring_timestamp] - identifier[guess_block_timestamp]
keyword[return] identifier[int] ( identifier[guess_block] )
|
def get_block_from_time(self, timestring, error_margin=10):
""" Estimate block number from given time
:param str timestring: String representing time
:param int error_margin: Estimate block number within this interval (in seconds)
"""
known_block = self.get_current_block()['block_num']
known_block_timestamp = self.block_timestamp(known_block)
timestring_timestamp = parse_time(timestring).timestamp()
delta = known_block_timestamp - timestring_timestamp
block_delta = delta / 3
guess_block = known_block - block_delta
guess_block_timestamp = self.block_timestamp(guess_block)
error = timestring_timestamp - guess_block_timestamp
while abs(error) > error_margin:
guess_block += error / 3
guess_block_timestamp = self.block_timestamp(guess_block)
error = timestring_timestamp - guess_block_timestamp # depends on [control=['while'], data=[]]
return int(guess_block)
|
def expr_match(line, expr):
'''
Checks whether or not the passed value matches the specified expression.
Tries to match expr first as a glob using fnmatch.fnmatch(), and then tries
to match expr as a regular expression. Originally designed to match minion
IDs for whitelists/blacklists.
Note that this also does exact matches, as fnmatch.fnmatch() will return
``True`` when no glob characters are used and the string is an exact match:
.. code-block:: python
>>> fnmatch.fnmatch('foo', 'foo')
True
'''
try:
if fnmatch.fnmatch(line, expr):
return True
try:
if re.match(r'\A{0}\Z'.format(expr), line):
return True
except re.error:
pass
except TypeError:
log.exception('Value %r or expression %r is not a string', line, expr)
return False
|
def function[expr_match, parameter[line, expr]]:
constant[
Checks whether or not the passed value matches the specified expression.
Tries to match expr first as a glob using fnmatch.fnmatch(), and then tries
to match expr as a regular expression. Originally designed to match minion
IDs for whitelists/blacklists.
Note that this also does exact matches, as fnmatch.fnmatch() will return
``True`` when no glob characters are used and the string is an exact match:
.. code-block:: python
>>> fnmatch.fnmatch('foo', 'foo')
True
]
<ast.Try object at 0x7da1b26adde0>
return[constant[False]]
|
keyword[def] identifier[expr_match] ( identifier[line] , identifier[expr] ):
literal[string]
keyword[try] :
keyword[if] identifier[fnmatch] . identifier[fnmatch] ( identifier[line] , identifier[expr] ):
keyword[return] keyword[True]
keyword[try] :
keyword[if] identifier[re] . identifier[match] ( literal[string] . identifier[format] ( identifier[expr] ), identifier[line] ):
keyword[return] keyword[True]
keyword[except] identifier[re] . identifier[error] :
keyword[pass]
keyword[except] identifier[TypeError] :
identifier[log] . identifier[exception] ( literal[string] , identifier[line] , identifier[expr] )
keyword[return] keyword[False]
|
def expr_match(line, expr):
"""
Checks whether or not the passed value matches the specified expression.
Tries to match expr first as a glob using fnmatch.fnmatch(), and then tries
to match expr as a regular expression. Originally designed to match minion
IDs for whitelists/blacklists.
Note that this also does exact matches, as fnmatch.fnmatch() will return
``True`` when no glob characters are used and the string is an exact match:
.. code-block:: python
>>> fnmatch.fnmatch('foo', 'foo')
True
"""
try:
if fnmatch.fnmatch(line, expr):
return True # depends on [control=['if'], data=[]]
try:
if re.match('\\A{0}\\Z'.format(expr), line):
return True # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except re.error:
pass # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
except TypeError:
log.exception('Value %r or expression %r is not a string', line, expr) # depends on [control=['except'], data=[]]
return False
|
def secured_clipboard(item):
"""This clipboard only allows 1 paste
"""
expire_clock = time.time()
def set_text(clipboard, selectiondata, info, data):
# expire after 15 secs
if 15.0 >= time.time() - expire_clock:
selectiondata.set_text(item.get_secret())
clipboard.clear()
def clear(clipboard, data):
"""Clearing of the buffer is deferred this only gets called if the
paste is actually triggered
"""
pass
targets = [("STRING", 0, 0)
,("TEXT", 0, 1)
,("COMPOUND_TEXT", 0, 2)
,("UTF8_STRING", 0, 3)]
cp = gtk.clipboard_get()
cp.set_with_data(targets, set_text, clear)
|
def function[secured_clipboard, parameter[item]]:
constant[This clipboard only allows 1 paste
]
variable[expire_clock] assign[=] call[name[time].time, parameter[]]
def function[set_text, parameter[clipboard, selectiondata, info, data]]:
if compare[constant[15.0] greater_or_equal[>=] binary_operation[call[name[time].time, parameter[]] - name[expire_clock]]] begin[:]
call[name[selectiondata].set_text, parameter[call[name[item].get_secret, parameter[]]]]
call[name[clipboard].clear, parameter[]]
def function[clear, parameter[clipboard, data]]:
constant[Clearing of the buffer is deferred this only gets called if the
paste is actually triggered
]
pass
variable[targets] assign[=] list[[<ast.Tuple object at 0x7da18c4cf310>, <ast.Tuple object at 0x7da18c4cd090>, <ast.Tuple object at 0x7da18c4ceaa0>, <ast.Tuple object at 0x7da18c4ce320>]]
variable[cp] assign[=] call[name[gtk].clipboard_get, parameter[]]
call[name[cp].set_with_data, parameter[name[targets], name[set_text], name[clear]]]
|
keyword[def] identifier[secured_clipboard] ( identifier[item] ):
literal[string]
identifier[expire_clock] = identifier[time] . identifier[time] ()
keyword[def] identifier[set_text] ( identifier[clipboard] , identifier[selectiondata] , identifier[info] , identifier[data] ):
keyword[if] literal[int] >= identifier[time] . identifier[time] ()- identifier[expire_clock] :
identifier[selectiondata] . identifier[set_text] ( identifier[item] . identifier[get_secret] ())
identifier[clipboard] . identifier[clear] ()
keyword[def] identifier[clear] ( identifier[clipboard] , identifier[data] ):
literal[string]
keyword[pass]
identifier[targets] =[( literal[string] , literal[int] , literal[int] )
,( literal[string] , literal[int] , literal[int] )
,( literal[string] , literal[int] , literal[int] )
,( literal[string] , literal[int] , literal[int] )]
identifier[cp] = identifier[gtk] . identifier[clipboard_get] ()
identifier[cp] . identifier[set_with_data] ( identifier[targets] , identifier[set_text] , identifier[clear] )
|
def secured_clipboard(item):
"""This clipboard only allows 1 paste
"""
expire_clock = time.time()
def set_text(clipboard, selectiondata, info, data):
# expire after 15 secs
if 15.0 >= time.time() - expire_clock:
selectiondata.set_text(item.get_secret())
clipboard.clear() # depends on [control=['if'], data=[]]
def clear(clipboard, data):
"""Clearing of the buffer is deferred this only gets called if the
paste is actually triggered
"""
pass
targets = [('STRING', 0, 0), ('TEXT', 0, 1), ('COMPOUND_TEXT', 0, 2), ('UTF8_STRING', 0, 3)]
cp = gtk.clipboard_get()
cp.set_with_data(targets, set_text, clear)
|
def file_objects(self):
"""Returns the :class:`basc_py4chan.File` objects of all files attached to posts in the thread."""
if self.topic.has_file:
yield self.topic.file
for reply in self.replies:
if reply.has_file:
yield reply.file
|
def function[file_objects, parameter[self]]:
constant[Returns the :class:`basc_py4chan.File` objects of all files attached to posts in the thread.]
if name[self].topic.has_file begin[:]
<ast.Yield object at 0x7da1b267a8c0>
for taget[name[reply]] in starred[name[self].replies] begin[:]
if name[reply].has_file begin[:]
<ast.Yield object at 0x7da1b2650f70>
|
keyword[def] identifier[file_objects] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[topic] . identifier[has_file] :
keyword[yield] identifier[self] . identifier[topic] . identifier[file]
keyword[for] identifier[reply] keyword[in] identifier[self] . identifier[replies] :
keyword[if] identifier[reply] . identifier[has_file] :
keyword[yield] identifier[reply] . identifier[file]
|
def file_objects(self):
"""Returns the :class:`basc_py4chan.File` objects of all files attached to posts in the thread."""
if self.topic.has_file:
yield self.topic.file # depends on [control=['if'], data=[]]
for reply in self.replies:
if reply.has_file:
yield reply.file # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['reply']]
|
def local_minima(img, min_distance = 4):
r"""
Returns all local minima from an image.
Parameters
----------
img : array_like
The image.
min_distance : integer
The minimal distance between the minimas in voxels. If it is less, only the lower minima is returned.
Returns
-------
indices : sequence
List of all minima indices.
values : sequence
List of all minima values.
"""
# @TODO: Write a unittest for this.
fits = numpy.asarray(img)
minfits = minimum_filter(fits, size=min_distance) # default mode is reflect
minima_mask = fits == minfits
good_indices = numpy.transpose(minima_mask.nonzero())
good_fits = fits[minima_mask]
order = good_fits.argsort()
return good_indices[order], good_fits[order]
|
def function[local_minima, parameter[img, min_distance]]:
constant[
Returns all local minima from an image.
Parameters
----------
img : array_like
The image.
min_distance : integer
The minimal distance between the minimas in voxels. If it is less, only the lower minima is returned.
Returns
-------
indices : sequence
List of all minima indices.
values : sequence
List of all minima values.
]
variable[fits] assign[=] call[name[numpy].asarray, parameter[name[img]]]
variable[minfits] assign[=] call[name[minimum_filter], parameter[name[fits]]]
variable[minima_mask] assign[=] compare[name[fits] equal[==] name[minfits]]
variable[good_indices] assign[=] call[name[numpy].transpose, parameter[call[name[minima_mask].nonzero, parameter[]]]]
variable[good_fits] assign[=] call[name[fits]][name[minima_mask]]
variable[order] assign[=] call[name[good_fits].argsort, parameter[]]
return[tuple[[<ast.Subscript object at 0x7da20e956ad0>, <ast.Subscript object at 0x7da20e954850>]]]
|
keyword[def] identifier[local_minima] ( identifier[img] , identifier[min_distance] = literal[int] ):
literal[string]
identifier[fits] = identifier[numpy] . identifier[asarray] ( identifier[img] )
identifier[minfits] = identifier[minimum_filter] ( identifier[fits] , identifier[size] = identifier[min_distance] )
identifier[minima_mask] = identifier[fits] == identifier[minfits]
identifier[good_indices] = identifier[numpy] . identifier[transpose] ( identifier[minima_mask] . identifier[nonzero] ())
identifier[good_fits] = identifier[fits] [ identifier[minima_mask] ]
identifier[order] = identifier[good_fits] . identifier[argsort] ()
keyword[return] identifier[good_indices] [ identifier[order] ], identifier[good_fits] [ identifier[order] ]
|
def local_minima(img, min_distance=4):
"""
Returns all local minima from an image.
Parameters
----------
img : array_like
The image.
min_distance : integer
The minimal distance between the minimas in voxels. If it is less, only the lower minima is returned.
Returns
-------
indices : sequence
List of all minima indices.
values : sequence
List of all minima values.
"""
# @TODO: Write a unittest for this.
fits = numpy.asarray(img)
minfits = minimum_filter(fits, size=min_distance) # default mode is reflect
minima_mask = fits == minfits
good_indices = numpy.transpose(minima_mask.nonzero())
good_fits = fits[minima_mask]
order = good_fits.argsort()
return (good_indices[order], good_fits[order])
|
def resource_name(self, resource):
"""
Return the name of the file within the reference package for a
particular named resource.
"""
if not(resource in self.contents['files']):
raise ValueError("No such resource %r in refpkg" % (resource,))
return self.contents['files'][resource]
|
def function[resource_name, parameter[self, resource]]:
constant[
Return the name of the file within the reference package for a
particular named resource.
]
if <ast.UnaryOp object at 0x7da1b1b9dd20> begin[:]
<ast.Raise object at 0x7da1b1b9c520>
return[call[call[name[self].contents][constant[files]]][name[resource]]]
|
keyword[def] identifier[resource_name] ( identifier[self] , identifier[resource] ):
literal[string]
keyword[if] keyword[not] ( identifier[resource] keyword[in] identifier[self] . identifier[contents] [ literal[string] ]):
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[resource] ,))
keyword[return] identifier[self] . identifier[contents] [ literal[string] ][ identifier[resource] ]
|
def resource_name(self, resource):
"""
Return the name of the file within the reference package for a
particular named resource.
"""
if not resource in self.contents['files']:
raise ValueError('No such resource %r in refpkg' % (resource,)) # depends on [control=['if'], data=[]]
return self.contents['files'][resource]
|
def get_instance(self, payload):
"""
Build an instance of LocalInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.incoming_phone_number.local.LocalInstance
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.local.LocalInstance
"""
return LocalInstance(self._version, payload, account_sid=self._solution['account_sid'], )
|
def function[get_instance, parameter[self, payload]]:
constant[
Build an instance of LocalInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.incoming_phone_number.local.LocalInstance
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.local.LocalInstance
]
return[call[name[LocalInstance], parameter[name[self]._version, name[payload]]]]
|
keyword[def] identifier[get_instance] ( identifier[self] , identifier[payload] ):
literal[string]
keyword[return] identifier[LocalInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
|
def get_instance(self, payload):
"""
Build an instance of LocalInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.incoming_phone_number.local.LocalInstance
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.local.LocalInstance
"""
return LocalInstance(self._version, payload, account_sid=self._solution['account_sid'])
|
def get_eligible_features(examples, num_mutants):
"""Returns a list of JSON objects for each feature in the examples.
This list is used to drive partial dependence plots in the plugin.
Args:
examples: Examples to examine to determine the eligible features.
num_mutants: The number of mutations to make over each feature.
Returns:
A list with a JSON object for each feature.
Numeric features are represented as {name: observedMin: observedMax:}.
Categorical features are repesented as {name: samples:[]}.
"""
features_dict = (
get_numeric_features_to_observed_range(
examples))
features_dict.update(
get_categorical_features_to_sampling(
examples, num_mutants))
# Massage the features_dict into a sorted list before returning because
# Polymer dom-repeat needs a list.
features_list = []
for k, v in sorted(features_dict.items()):
v['name'] = k
features_list.append(v)
return features_list
|
def function[get_eligible_features, parameter[examples, num_mutants]]:
constant[Returns a list of JSON objects for each feature in the examples.
This list is used to drive partial dependence plots in the plugin.
Args:
examples: Examples to examine to determine the eligible features.
num_mutants: The number of mutations to make over each feature.
Returns:
A list with a JSON object for each feature.
Numeric features are represented as {name: observedMin: observedMax:}.
Categorical features are repesented as {name: samples:[]}.
]
variable[features_dict] assign[=] call[name[get_numeric_features_to_observed_range], parameter[name[examples]]]
call[name[features_dict].update, parameter[call[name[get_categorical_features_to_sampling], parameter[name[examples], name[num_mutants]]]]]
variable[features_list] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b21a5ed0>, <ast.Name object at 0x7da1b21a4c10>]]] in starred[call[name[sorted], parameter[call[name[features_dict].items, parameter[]]]]] begin[:]
call[name[v]][constant[name]] assign[=] name[k]
call[name[features_list].append, parameter[name[v]]]
return[name[features_list]]
|
keyword[def] identifier[get_eligible_features] ( identifier[examples] , identifier[num_mutants] ):
literal[string]
identifier[features_dict] =(
identifier[get_numeric_features_to_observed_range] (
identifier[examples] ))
identifier[features_dict] . identifier[update] (
identifier[get_categorical_features_to_sampling] (
identifier[examples] , identifier[num_mutants] ))
identifier[features_list] =[]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[sorted] ( identifier[features_dict] . identifier[items] ()):
identifier[v] [ literal[string] ]= identifier[k]
identifier[features_list] . identifier[append] ( identifier[v] )
keyword[return] identifier[features_list]
|
def get_eligible_features(examples, num_mutants):
"""Returns a list of JSON objects for each feature in the examples.
This list is used to drive partial dependence plots in the plugin.
Args:
examples: Examples to examine to determine the eligible features.
num_mutants: The number of mutations to make over each feature.
Returns:
A list with a JSON object for each feature.
Numeric features are represented as {name: observedMin: observedMax:}.
Categorical features are repesented as {name: samples:[]}.
"""
features_dict = get_numeric_features_to_observed_range(examples)
features_dict.update(get_categorical_features_to_sampling(examples, num_mutants))
# Massage the features_dict into a sorted list before returning because
# Polymer dom-repeat needs a list.
features_list = []
for (k, v) in sorted(features_dict.items()):
v['name'] = k
features_list.append(v) # depends on [control=['for'], data=[]]
return features_list
|
def pots(self, refresh=False):
"""
Returns a list of pots owned by the currently authorised user.
Official docs:
https://monzo.com/docs/#pots
:param refresh: decides if the pots information should be refreshed.
:type refresh: bool
:returns: list of Monzo pots
:rtype: list of MonzoPot
"""
if not refresh and self._cached_pots:
return self._cached_pots
endpoint = '/pots/listV1'
response = self._get_response(
method='get', endpoint=endpoint,
)
pots_json = response.json()['pots']
pots = [MonzoPot(data=pot) for pot in pots_json]
self._cached_pots = pots
return pots
|
def function[pots, parameter[self, refresh]]:
constant[
Returns a list of pots owned by the currently authorised user.
Official docs:
https://monzo.com/docs/#pots
:param refresh: decides if the pots information should be refreshed.
:type refresh: bool
:returns: list of Monzo pots
:rtype: list of MonzoPot
]
if <ast.BoolOp object at 0x7da1b0f3a3b0> begin[:]
return[name[self]._cached_pots]
variable[endpoint] assign[=] constant[/pots/listV1]
variable[response] assign[=] call[name[self]._get_response, parameter[]]
variable[pots_json] assign[=] call[call[name[response].json, parameter[]]][constant[pots]]
variable[pots] assign[=] <ast.ListComp object at 0x7da1b11126b0>
name[self]._cached_pots assign[=] name[pots]
return[name[pots]]
|
keyword[def] identifier[pots] ( identifier[self] , identifier[refresh] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[refresh] keyword[and] identifier[self] . identifier[_cached_pots] :
keyword[return] identifier[self] . identifier[_cached_pots]
identifier[endpoint] = literal[string]
identifier[response] = identifier[self] . identifier[_get_response] (
identifier[method] = literal[string] , identifier[endpoint] = identifier[endpoint] ,
)
identifier[pots_json] = identifier[response] . identifier[json] ()[ literal[string] ]
identifier[pots] =[ identifier[MonzoPot] ( identifier[data] = identifier[pot] ) keyword[for] identifier[pot] keyword[in] identifier[pots_json] ]
identifier[self] . identifier[_cached_pots] = identifier[pots]
keyword[return] identifier[pots]
|
def pots(self, refresh=False):
"""
Returns a list of pots owned by the currently authorised user.
Official docs:
https://monzo.com/docs/#pots
:param refresh: decides if the pots information should be refreshed.
:type refresh: bool
:returns: list of Monzo pots
:rtype: list of MonzoPot
"""
if not refresh and self._cached_pots:
return self._cached_pots # depends on [control=['if'], data=[]]
endpoint = '/pots/listV1'
response = self._get_response(method='get', endpoint=endpoint)
pots_json = response.json()['pots']
pots = [MonzoPot(data=pot) for pot in pots_json]
self._cached_pots = pots
return pots
|
def mac_access_list_standard_hide_mac_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
mac = ET.SubElement(config, "mac", xmlns="urn:brocade.com:mgmt:brocade-mac-access-list")
access_list = ET.SubElement(mac, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_mac_acl_std = ET.SubElement(standard, "hide-mac-acl-std")
seq = ET.SubElement(hide_mac_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def function[mac_access_list_standard_hide_mac_acl_std_seq_action, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[mac] assign[=] call[name[ET].SubElement, parameter[name[config], constant[mac]]]
variable[access_list] assign[=] call[name[ET].SubElement, parameter[name[mac], constant[access-list]]]
variable[standard] assign[=] call[name[ET].SubElement, parameter[name[access_list], constant[standard]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[standard], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[hide_mac_acl_std] assign[=] call[name[ET].SubElement, parameter[name[standard], constant[hide-mac-acl-std]]]
variable[seq] assign[=] call[name[ET].SubElement, parameter[name[hide_mac_acl_std], constant[seq]]]
variable[seq_id_key] assign[=] call[name[ET].SubElement, parameter[name[seq], constant[seq-id]]]
name[seq_id_key].text assign[=] call[name[kwargs].pop, parameter[constant[seq_id]]]
variable[action] assign[=] call[name[ET].SubElement, parameter[name[seq], constant[action]]]
name[action].text assign[=] call[name[kwargs].pop, parameter[constant[action]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]]
|
keyword[def] identifier[mac_access_list_standard_hide_mac_acl_std_seq_action] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[mac] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[access_list] = identifier[ET] . identifier[SubElement] ( identifier[mac] , literal[string] )
identifier[standard] = identifier[ET] . identifier[SubElement] ( identifier[access_list] , literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[standard] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[hide_mac_acl_std] = identifier[ET] . identifier[SubElement] ( identifier[standard] , literal[string] )
identifier[seq] = identifier[ET] . identifier[SubElement] ( identifier[hide_mac_acl_std] , literal[string] )
identifier[seq_id_key] = identifier[ET] . identifier[SubElement] ( identifier[seq] , literal[string] )
identifier[seq_id_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[action] = identifier[ET] . identifier[SubElement] ( identifier[seq] , literal[string] )
identifier[action] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] )
|
def mac_access_list_standard_hide_mac_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
mac = ET.SubElement(config, 'mac', xmlns='urn:brocade.com:mgmt:brocade-mac-access-list')
access_list = ET.SubElement(mac, 'access-list')
standard = ET.SubElement(access_list, 'standard')
name_key = ET.SubElement(standard, 'name')
name_key.text = kwargs.pop('name')
hide_mac_acl_std = ET.SubElement(standard, 'hide-mac-acl-std')
seq = ET.SubElement(hide_mac_acl_std, 'seq')
seq_id_key = ET.SubElement(seq, 'seq-id')
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, 'action')
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def _transform_local_field_to_expression(expression, node, context):
"""Transform a LocalField compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, LocalField compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
column_name = expression.field_name
column = sql_context_helpers.get_column(column_name, node, context)
return column
|
def function[_transform_local_field_to_expression, parameter[expression, node, context]]:
constant[Transform a LocalField compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, LocalField compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
]
variable[column_name] assign[=] name[expression].field_name
variable[column] assign[=] call[name[sql_context_helpers].get_column, parameter[name[column_name], name[node], name[context]]]
return[name[column]]
|
keyword[def] identifier[_transform_local_field_to_expression] ( identifier[expression] , identifier[node] , identifier[context] ):
literal[string]
identifier[column_name] = identifier[expression] . identifier[field_name]
identifier[column] = identifier[sql_context_helpers] . identifier[get_column] ( identifier[column_name] , identifier[node] , identifier[context] )
keyword[return] identifier[column]
|
def _transform_local_field_to_expression(expression, node, context):
"""Transform a LocalField compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, LocalField compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
column_name = expression.field_name
column = sql_context_helpers.get_column(column_name, node, context)
return column
|
def grantxml2json(self, grant_xml):
"""Convert OpenAIRE grant XML into JSON."""
tree = etree.fromstring(grant_xml)
# XML harvested from OAI-PMH has a different format/structure
if tree.prefix == 'oai':
ptree = self.get_subtree(
tree, '/oai:record/oai:metadata/oaf:entity/oaf:project')[0]
header = self.get_subtree(tree, '/oai:record/oai:header')[0]
oai_id = self.get_text_node(header, 'oai:identifier')
modified = self.get_text_node(header, 'oai:datestamp')
else:
ptree = self.get_subtree(
tree, '/record/result/metadata/oaf:entity/oaf:project')[0]
header = self.get_subtree(tree, '/record/result/header')[0]
oai_id = self.get_text_node(header, 'dri:objIdentifier')
modified = self.get_text_node(header, 'dri:dateOfTransformation')
url = self.get_text_node(ptree, 'websiteurl')
code = self.get_text_node(ptree, 'code')
title = self.get_text_node(ptree, 'title')
acronym = self.get_text_node(ptree, 'acronym')
startdate = self.get_text_node(ptree, 'startdate')
enddate = self.get_text_node(ptree, 'enddate')
funder = self.fundertree2json(ptree, oai_id)
internal_id = "{0}::{1}".format(funder['doi'], code)
eurepo_id = \
"info:eu-repo/grantAgreement/{funder}/{program}/{code}/".format(
funder=quote_plus(funder['name'].encode('utf8')),
program=quote_plus(funder['program'].encode('utf8')),
code=quote_plus(code.encode('utf8')), )
ret_json = {
'$schema': self.schema_formatter.schema_url,
'internal_id': internal_id,
'identifiers': {
'oaf': oai_id,
'eurepo': eurepo_id,
'purl': url if url.startswith("http://purl.org/") else None,
},
'code': code,
'title': title,
'acronym': acronym,
'startdate': startdate,
'enddate': enddate,
'funder': {'$ref': funder['url']},
'program': funder['program'],
'url': url,
'remote_modified': modified,
}
return ret_json
|
def function[grantxml2json, parameter[self, grant_xml]]:
constant[Convert OpenAIRE grant XML into JSON.]
variable[tree] assign[=] call[name[etree].fromstring, parameter[name[grant_xml]]]
if compare[name[tree].prefix equal[==] constant[oai]] begin[:]
variable[ptree] assign[=] call[call[name[self].get_subtree, parameter[name[tree], constant[/oai:record/oai:metadata/oaf:entity/oaf:project]]]][constant[0]]
variable[header] assign[=] call[call[name[self].get_subtree, parameter[name[tree], constant[/oai:record/oai:header]]]][constant[0]]
variable[oai_id] assign[=] call[name[self].get_text_node, parameter[name[header], constant[oai:identifier]]]
variable[modified] assign[=] call[name[self].get_text_node, parameter[name[header], constant[oai:datestamp]]]
variable[url] assign[=] call[name[self].get_text_node, parameter[name[ptree], constant[websiteurl]]]
variable[code] assign[=] call[name[self].get_text_node, parameter[name[ptree], constant[code]]]
variable[title] assign[=] call[name[self].get_text_node, parameter[name[ptree], constant[title]]]
variable[acronym] assign[=] call[name[self].get_text_node, parameter[name[ptree], constant[acronym]]]
variable[startdate] assign[=] call[name[self].get_text_node, parameter[name[ptree], constant[startdate]]]
variable[enddate] assign[=] call[name[self].get_text_node, parameter[name[ptree], constant[enddate]]]
variable[funder] assign[=] call[name[self].fundertree2json, parameter[name[ptree], name[oai_id]]]
variable[internal_id] assign[=] call[constant[{0}::{1}].format, parameter[call[name[funder]][constant[doi]], name[code]]]
variable[eurepo_id] assign[=] call[constant[info:eu-repo/grantAgreement/{funder}/{program}/{code}/].format, parameter[]]
variable[ret_json] assign[=] dictionary[[<ast.Constant object at 0x7da1b0bd60b0>, <ast.Constant object at 0x7da1b0bd4df0>, <ast.Constant object at 0x7da1b0bd5930>, <ast.Constant object at 0x7da1b0bd6d40>, <ast.Constant object at 0x7da1b0bd5210>, <ast.Constant object at 0x7da1b0a4f8b0>, <ast.Constant object at 0x7da1b0a4c040>, <ast.Constant object at 0x7da1b0a4d090>, <ast.Constant object at 0x7da1b0a4c2b0>, <ast.Constant object at 0x7da1b0a4d870>, <ast.Constant object at 0x7da1b0a4ec50>, <ast.Constant object at 0x7da1b0a4d360>], [<ast.Attribute object at 0x7da1b0a4f160>, <ast.Name object at 0x7da1b0bdb490>, <ast.Dict object at 0x7da1b0bd9ea0>, <ast.Name object at 0x7da1b0bd9cf0>, <ast.Name object at 0x7da1b0bd9510>, <ast.Name object at 0x7da1b0bdbb20>, <ast.Name object at 0x7da1b0bdaf50>, <ast.Name object at 0x7da1b0bd9ab0>, <ast.Dict object at 0x7da1b0bd82b0>, <ast.Subscript object at 0x7da1b0bd8580>, <ast.Name object at 0x7da1b0bdb280>, <ast.Name object at 0x7da1b0bd9f60>]]
return[name[ret_json]]
|
keyword[def] identifier[grantxml2json] ( identifier[self] , identifier[grant_xml] ):
literal[string]
identifier[tree] = identifier[etree] . identifier[fromstring] ( identifier[grant_xml] )
keyword[if] identifier[tree] . identifier[prefix] == literal[string] :
identifier[ptree] = identifier[self] . identifier[get_subtree] (
identifier[tree] , literal[string] )[ literal[int] ]
identifier[header] = identifier[self] . identifier[get_subtree] ( identifier[tree] , literal[string] )[ literal[int] ]
identifier[oai_id] = identifier[self] . identifier[get_text_node] ( identifier[header] , literal[string] )
identifier[modified] = identifier[self] . identifier[get_text_node] ( identifier[header] , literal[string] )
keyword[else] :
identifier[ptree] = identifier[self] . identifier[get_subtree] (
identifier[tree] , literal[string] )[ literal[int] ]
identifier[header] = identifier[self] . identifier[get_subtree] ( identifier[tree] , literal[string] )[ literal[int] ]
identifier[oai_id] = identifier[self] . identifier[get_text_node] ( identifier[header] , literal[string] )
identifier[modified] = identifier[self] . identifier[get_text_node] ( identifier[header] , literal[string] )
identifier[url] = identifier[self] . identifier[get_text_node] ( identifier[ptree] , literal[string] )
identifier[code] = identifier[self] . identifier[get_text_node] ( identifier[ptree] , literal[string] )
identifier[title] = identifier[self] . identifier[get_text_node] ( identifier[ptree] , literal[string] )
identifier[acronym] = identifier[self] . identifier[get_text_node] ( identifier[ptree] , literal[string] )
identifier[startdate] = identifier[self] . identifier[get_text_node] ( identifier[ptree] , literal[string] )
identifier[enddate] = identifier[self] . identifier[get_text_node] ( identifier[ptree] , literal[string] )
identifier[funder] = identifier[self] . identifier[fundertree2json] ( identifier[ptree] , identifier[oai_id] )
identifier[internal_id] = literal[string] . identifier[format] ( identifier[funder] [ literal[string] ], identifier[code] )
identifier[eurepo_id] = literal[string] . identifier[format] (
identifier[funder] = identifier[quote_plus] ( identifier[funder] [ literal[string] ]. identifier[encode] ( literal[string] )),
identifier[program] = identifier[quote_plus] ( identifier[funder] [ literal[string] ]. identifier[encode] ( literal[string] )),
identifier[code] = identifier[quote_plus] ( identifier[code] . identifier[encode] ( literal[string] )),)
identifier[ret_json] ={
literal[string] : identifier[self] . identifier[schema_formatter] . identifier[schema_url] ,
literal[string] : identifier[internal_id] ,
literal[string] :{
literal[string] : identifier[oai_id] ,
literal[string] : identifier[eurepo_id] ,
literal[string] : identifier[url] keyword[if] identifier[url] . identifier[startswith] ( literal[string] ) keyword[else] keyword[None] ,
},
literal[string] : identifier[code] ,
literal[string] : identifier[title] ,
literal[string] : identifier[acronym] ,
literal[string] : identifier[startdate] ,
literal[string] : identifier[enddate] ,
literal[string] :{ literal[string] : identifier[funder] [ literal[string] ]},
literal[string] : identifier[funder] [ literal[string] ],
literal[string] : identifier[url] ,
literal[string] : identifier[modified] ,
}
keyword[return] identifier[ret_json]
|
def grantxml2json(self, grant_xml):
"""Convert OpenAIRE grant XML into JSON."""
tree = etree.fromstring(grant_xml)
# XML harvested from OAI-PMH has a different format/structure
if tree.prefix == 'oai':
ptree = self.get_subtree(tree, '/oai:record/oai:metadata/oaf:entity/oaf:project')[0]
header = self.get_subtree(tree, '/oai:record/oai:header')[0]
oai_id = self.get_text_node(header, 'oai:identifier')
modified = self.get_text_node(header, 'oai:datestamp') # depends on [control=['if'], data=[]]
else:
ptree = self.get_subtree(tree, '/record/result/metadata/oaf:entity/oaf:project')[0]
header = self.get_subtree(tree, '/record/result/header')[0]
oai_id = self.get_text_node(header, 'dri:objIdentifier')
modified = self.get_text_node(header, 'dri:dateOfTransformation')
url = self.get_text_node(ptree, 'websiteurl')
code = self.get_text_node(ptree, 'code')
title = self.get_text_node(ptree, 'title')
acronym = self.get_text_node(ptree, 'acronym')
startdate = self.get_text_node(ptree, 'startdate')
enddate = self.get_text_node(ptree, 'enddate')
funder = self.fundertree2json(ptree, oai_id)
internal_id = '{0}::{1}'.format(funder['doi'], code)
eurepo_id = 'info:eu-repo/grantAgreement/{funder}/{program}/{code}/'.format(funder=quote_plus(funder['name'].encode('utf8')), program=quote_plus(funder['program'].encode('utf8')), code=quote_plus(code.encode('utf8')))
ret_json = {'$schema': self.schema_formatter.schema_url, 'internal_id': internal_id, 'identifiers': {'oaf': oai_id, 'eurepo': eurepo_id, 'purl': url if url.startswith('http://purl.org/') else None}, 'code': code, 'title': title, 'acronym': acronym, 'startdate': startdate, 'enddate': enddate, 'funder': {'$ref': funder['url']}, 'program': funder['program'], 'url': url, 'remote_modified': modified}
return ret_json
|
def representer(dumper, data):
"""
http://stackoverflow.com/a/14001707/4075339
http://stackoverflow.com/a/21912744/4075339
"""
tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
return dumper.represent_mapping(tag,list(data.todict().items()),flow_style=True)
|
def function[representer, parameter[dumper, data]]:
constant[
http://stackoverflow.com/a/14001707/4075339
http://stackoverflow.com/a/21912744/4075339
]
variable[tag] assign[=] name[yaml].resolver.BaseResolver.DEFAULT_MAPPING_TAG
return[call[name[dumper].represent_mapping, parameter[name[tag], call[name[list], parameter[call[call[name[data].todict, parameter[]].items, parameter[]]]]]]]
|
keyword[def] identifier[representer] ( identifier[dumper] , identifier[data] ):
literal[string]
identifier[tag] = identifier[yaml] . identifier[resolver] . identifier[BaseResolver] . identifier[DEFAULT_MAPPING_TAG]
keyword[return] identifier[dumper] . identifier[represent_mapping] ( identifier[tag] , identifier[list] ( identifier[data] . identifier[todict] (). identifier[items] ()), identifier[flow_style] = keyword[True] )
|
def representer(dumper, data):
"""
http://stackoverflow.com/a/14001707/4075339
http://stackoverflow.com/a/21912744/4075339
"""
tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
return dumper.represent_mapping(tag, list(data.todict().items()), flow_style=True)
|
def inline_callbacks(original, debug=False):
"""
Decorate a function like ``inlineCallbacks`` would but in a more
Eliot-friendly way. Use it just like ``inlineCallbacks`` but where you
want Eliot action contexts to Do The Right Thing inside the decorated
function.
"""
f = eliot_friendly_generator_function(original)
if debug:
f.debug = True
return inlineCallbacks(f)
|
def function[inline_callbacks, parameter[original, debug]]:
constant[
Decorate a function like ``inlineCallbacks`` would but in a more
Eliot-friendly way. Use it just like ``inlineCallbacks`` but where you
want Eliot action contexts to Do The Right Thing inside the decorated
function.
]
variable[f] assign[=] call[name[eliot_friendly_generator_function], parameter[name[original]]]
if name[debug] begin[:]
name[f].debug assign[=] constant[True]
return[call[name[inlineCallbacks], parameter[name[f]]]]
|
keyword[def] identifier[inline_callbacks] ( identifier[original] , identifier[debug] = keyword[False] ):
literal[string]
identifier[f] = identifier[eliot_friendly_generator_function] ( identifier[original] )
keyword[if] identifier[debug] :
identifier[f] . identifier[debug] = keyword[True]
keyword[return] identifier[inlineCallbacks] ( identifier[f] )
|
def inline_callbacks(original, debug=False):
"""
Decorate a function like ``inlineCallbacks`` would but in a more
Eliot-friendly way. Use it just like ``inlineCallbacks`` but where you
want Eliot action contexts to Do The Right Thing inside the decorated
function.
"""
f = eliot_friendly_generator_function(original)
if debug:
f.debug = True # depends on [control=['if'], data=[]]
return inlineCallbacks(f)
|
def _fsic_queuing_calc(fsic1, fsic2):
"""
We set the lower counter between two same instance ids.
If an instance_id exists in one fsic but not the other we want to give that counter a value of 0.
:param fsic1: dictionary containing (instance_id, counter) pairs
:param fsic2: dictionary containing (instance_id, counter) pairs
:return ``dict`` of fsics to be used in queueing the correct records to the buffer
"""
return {instance: fsic2.get(instance, 0) for instance, counter in six.iteritems(fsic1) if fsic2.get(instance, 0) < counter}
|
def function[_fsic_queuing_calc, parameter[fsic1, fsic2]]:
constant[
We set the lower counter between two same instance ids.
If an instance_id exists in one fsic but not the other we want to give that counter a value of 0.
:param fsic1: dictionary containing (instance_id, counter) pairs
:param fsic2: dictionary containing (instance_id, counter) pairs
:return ``dict`` of fsics to be used in queueing the correct records to the buffer
]
return[<ast.DictComp object at 0x7da18ede4cd0>]
|
keyword[def] identifier[_fsic_queuing_calc] ( identifier[fsic1] , identifier[fsic2] ):
literal[string]
keyword[return] { identifier[instance] : identifier[fsic2] . identifier[get] ( identifier[instance] , literal[int] ) keyword[for] identifier[instance] , identifier[counter] keyword[in] identifier[six] . identifier[iteritems] ( identifier[fsic1] ) keyword[if] identifier[fsic2] . identifier[get] ( identifier[instance] , literal[int] )< identifier[counter] }
|
def _fsic_queuing_calc(fsic1, fsic2):
"""
We set the lower counter between two same instance ids.
If an instance_id exists in one fsic but not the other we want to give that counter a value of 0.
:param fsic1: dictionary containing (instance_id, counter) pairs
:param fsic2: dictionary containing (instance_id, counter) pairs
:return ``dict`` of fsics to be used in queueing the correct records to the buffer
"""
return {instance: fsic2.get(instance, 0) for (instance, counter) in six.iteritems(fsic1) if fsic2.get(instance, 0) < counter}
|
def open(self, interface_name, namespaced=False, connection=None):
"""Open a new connection and get a client interface handle with the varlink methods installed.
:param interface_name: an interface name, which the service this client object is
connected to, provides.
:param namespaced: If arguments and return values are instances of SimpleNamespace
rather than dictionaries.
:param connection: If set, get the interface handle for an already opened connection.
:exception InterfaceNotFound: if the interface is not found
"""
if not connection:
connection = self.open_connection()
if interface_name not in self._interfaces:
self.get_interface(interface_name, socket_connection=connection)
if interface_name not in self._interfaces:
raise InterfaceNotFound(interface_name)
return self.handler(self._interfaces[interface_name], connection, namespaced=namespaced)
|
def function[open, parameter[self, interface_name, namespaced, connection]]:
constant[Open a new connection and get a client interface handle with the varlink methods installed.
:param interface_name: an interface name, which the service this client object is
connected to, provides.
:param namespaced: If arguments and return values are instances of SimpleNamespace
rather than dictionaries.
:param connection: If set, get the interface handle for an already opened connection.
:exception InterfaceNotFound: if the interface is not found
]
if <ast.UnaryOp object at 0x7da1b0e59ba0> begin[:]
variable[connection] assign[=] call[name[self].open_connection, parameter[]]
if compare[name[interface_name] <ast.NotIn object at 0x7da2590d7190> name[self]._interfaces] begin[:]
call[name[self].get_interface, parameter[name[interface_name]]]
if compare[name[interface_name] <ast.NotIn object at 0x7da2590d7190> name[self]._interfaces] begin[:]
<ast.Raise object at 0x7da1b0e59960>
return[call[name[self].handler, parameter[call[name[self]._interfaces][name[interface_name]], name[connection]]]]
|
keyword[def] identifier[open] ( identifier[self] , identifier[interface_name] , identifier[namespaced] = keyword[False] , identifier[connection] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[connection] :
identifier[connection] = identifier[self] . identifier[open_connection] ()
keyword[if] identifier[interface_name] keyword[not] keyword[in] identifier[self] . identifier[_interfaces] :
identifier[self] . identifier[get_interface] ( identifier[interface_name] , identifier[socket_connection] = identifier[connection] )
keyword[if] identifier[interface_name] keyword[not] keyword[in] identifier[self] . identifier[_interfaces] :
keyword[raise] identifier[InterfaceNotFound] ( identifier[interface_name] )
keyword[return] identifier[self] . identifier[handler] ( identifier[self] . identifier[_interfaces] [ identifier[interface_name] ], identifier[connection] , identifier[namespaced] = identifier[namespaced] )
|
def open(self, interface_name, namespaced=False, connection=None):
"""Open a new connection and get a client interface handle with the varlink methods installed.
:param interface_name: an interface name, which the service this client object is
connected to, provides.
:param namespaced: If arguments and return values are instances of SimpleNamespace
rather than dictionaries.
:param connection: If set, get the interface handle for an already opened connection.
:exception InterfaceNotFound: if the interface is not found
"""
if not connection:
connection = self.open_connection() # depends on [control=['if'], data=[]]
if interface_name not in self._interfaces:
self.get_interface(interface_name, socket_connection=connection) # depends on [control=['if'], data=['interface_name']]
if interface_name not in self._interfaces:
raise InterfaceNotFound(interface_name) # depends on [control=['if'], data=['interface_name']]
return self.handler(self._interfaces[interface_name], connection, namespaced=namespaced)
|
def save(self):
"""
Creates this index in the collection if it hasn't been already created
"""
api = Client.instance().api
index_details = {
'type': self.index_type_obj.type_name
}
extra_index_attributes = self.index_type_obj.get_extra_attributes()
for extra_attribute_key in extra_index_attributes:
extra_attribute_value = extra_index_attributes[extra_attribute_key]
index_details[extra_attribute_key] = extra_attribute_value
query_parameters = {
'collection': self.collection.name,
}
result = api.index.post(data=index_details, **query_parameters)
self.index_type_obj.is_new = result['isNewlyCreated']
self.index_type_obj.id = result['id']
|
def function[save, parameter[self]]:
constant[
Creates this index in the collection if it hasn't been already created
]
variable[api] assign[=] call[name[Client].instance, parameter[]].api
variable[index_details] assign[=] dictionary[[<ast.Constant object at 0x7da18ede6a70>], [<ast.Attribute object at 0x7da18ede5750>]]
variable[extra_index_attributes] assign[=] call[name[self].index_type_obj.get_extra_attributes, parameter[]]
for taget[name[extra_attribute_key]] in starred[name[extra_index_attributes]] begin[:]
variable[extra_attribute_value] assign[=] call[name[extra_index_attributes]][name[extra_attribute_key]]
call[name[index_details]][name[extra_attribute_key]] assign[=] name[extra_attribute_value]
variable[query_parameters] assign[=] dictionary[[<ast.Constant object at 0x7da18ede6260>], [<ast.Attribute object at 0x7da18ede50f0>]]
variable[result] assign[=] call[name[api].index.post, parameter[]]
name[self].index_type_obj.is_new assign[=] call[name[result]][constant[isNewlyCreated]]
name[self].index_type_obj.id assign[=] call[name[result]][constant[id]]
|
keyword[def] identifier[save] ( identifier[self] ):
literal[string]
identifier[api] = identifier[Client] . identifier[instance] (). identifier[api]
identifier[index_details] ={
literal[string] : identifier[self] . identifier[index_type_obj] . identifier[type_name]
}
identifier[extra_index_attributes] = identifier[self] . identifier[index_type_obj] . identifier[get_extra_attributes] ()
keyword[for] identifier[extra_attribute_key] keyword[in] identifier[extra_index_attributes] :
identifier[extra_attribute_value] = identifier[extra_index_attributes] [ identifier[extra_attribute_key] ]
identifier[index_details] [ identifier[extra_attribute_key] ]= identifier[extra_attribute_value]
identifier[query_parameters] ={
literal[string] : identifier[self] . identifier[collection] . identifier[name] ,
}
identifier[result] = identifier[api] . identifier[index] . identifier[post] ( identifier[data] = identifier[index_details] ,** identifier[query_parameters] )
identifier[self] . identifier[index_type_obj] . identifier[is_new] = identifier[result] [ literal[string] ]
identifier[self] . identifier[index_type_obj] . identifier[id] = identifier[result] [ literal[string] ]
|
def save(self):
"""
Creates this index in the collection if it hasn't been already created
"""
api = Client.instance().api
index_details = {'type': self.index_type_obj.type_name}
extra_index_attributes = self.index_type_obj.get_extra_attributes()
for extra_attribute_key in extra_index_attributes:
extra_attribute_value = extra_index_attributes[extra_attribute_key]
index_details[extra_attribute_key] = extra_attribute_value # depends on [control=['for'], data=['extra_attribute_key']]
query_parameters = {'collection': self.collection.name}
result = api.index.post(data=index_details, **query_parameters)
self.index_type_obj.is_new = result['isNewlyCreated']
self.index_type_obj.id = result['id']
|
def get_main_pattern(self, directory):
"""
Get the :func:`~glob.glob()` pattern to find the main configuration file.
:param directory: The pathname of a base directory (a string).
:returns: A filename pattern (a string).
This method generates a pattern that matches a filename based on
:attr:`program_name` with the suffix :attr:`filename_extension` in the
given base `directory`. Here's an example:
>>> from update_dotdee import ConfigLoader
>>> loader = ConfigLoader(program_name='update-dotdee')
>>> [loader.get_main_pattern(d) for d in loader.base_directories]
['/etc/update-dotdee.ini',
'~/.update-dotdee.ini',
'~/.config/update-dotdee.ini']
"""
return os.path.join(directory, format(
'{prefix}{program_name}.{extension}',
extension=self.filename_extension.lstrip('.'),
program_name=self.program_name,
prefix=self.get_prefix(directory),
))
|
def function[get_main_pattern, parameter[self, directory]]:
constant[
Get the :func:`~glob.glob()` pattern to find the main configuration file.
:param directory: The pathname of a base directory (a string).
:returns: A filename pattern (a string).
This method generates a pattern that matches a filename based on
:attr:`program_name` with the suffix :attr:`filename_extension` in the
given base `directory`. Here's an example:
>>> from update_dotdee import ConfigLoader
>>> loader = ConfigLoader(program_name='update-dotdee')
>>> [loader.get_main_pattern(d) for d in loader.base_directories]
['/etc/update-dotdee.ini',
'~/.update-dotdee.ini',
'~/.config/update-dotdee.ini']
]
return[call[name[os].path.join, parameter[name[directory], call[name[format], parameter[constant[{prefix}{program_name}.{extension}]]]]]]
|
keyword[def] identifier[get_main_pattern] ( identifier[self] , identifier[directory] ):
literal[string]
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[directory] , identifier[format] (
literal[string] ,
identifier[extension] = identifier[self] . identifier[filename_extension] . identifier[lstrip] ( literal[string] ),
identifier[program_name] = identifier[self] . identifier[program_name] ,
identifier[prefix] = identifier[self] . identifier[get_prefix] ( identifier[directory] ),
))
|
def get_main_pattern(self, directory):
"""
Get the :func:`~glob.glob()` pattern to find the main configuration file.
:param directory: The pathname of a base directory (a string).
:returns: A filename pattern (a string).
This method generates a pattern that matches a filename based on
:attr:`program_name` with the suffix :attr:`filename_extension` in the
given base `directory`. Here's an example:
>>> from update_dotdee import ConfigLoader
>>> loader = ConfigLoader(program_name='update-dotdee')
>>> [loader.get_main_pattern(d) for d in loader.base_directories]
['/etc/update-dotdee.ini',
'~/.update-dotdee.ini',
'~/.config/update-dotdee.ini']
"""
return os.path.join(directory, format('{prefix}{program_name}.{extension}', extension=self.filename_extension.lstrip('.'), program_name=self.program_name, prefix=self.get_prefix(directory)))
|
def get_results(cmd):
"""
def get_results(cmd: list) -> str:
return lines
Get the ping results using fping.
:param cmd: List - the fping command and its options
:return: String - raw string output containing csv fping results
including the newline characters
"""
try:
return subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
return e.output
|
def function[get_results, parameter[cmd]]:
constant[
def get_results(cmd: list) -> str:
return lines
Get the ping results using fping.
:param cmd: List - the fping command and its options
:return: String - raw string output containing csv fping results
including the newline characters
]
<ast.Try object at 0x7da18eb553f0>
|
keyword[def] identifier[get_results] ( identifier[cmd] ):
literal[string]
keyword[try] :
keyword[return] identifier[subprocess] . identifier[check_output] ( identifier[cmd] )
keyword[except] identifier[subprocess] . identifier[CalledProcessError] keyword[as] identifier[e] :
keyword[return] identifier[e] . identifier[output]
|
def get_results(cmd):
"""
def get_results(cmd: list) -> str:
return lines
Get the ping results using fping.
:param cmd: List - the fping command and its options
:return: String - raw string output containing csv fping results
including the newline characters
"""
try:
return subprocess.check_output(cmd) # depends on [control=['try'], data=[]]
except subprocess.CalledProcessError as e:
return e.output # depends on [control=['except'], data=['e']]
|
def return_future(fn):
"""Decorator that turns a synchronous function into one returning a future.
This should only be applied to non-blocking functions. Will do set_result()
with the return value, or set_exc_info() if an exception is raised.
"""
@wraps(fn)
def decorated(*args, **kwargs):
return gen.maybe_future(fn(*args, **kwargs))
return decorated
|
def function[return_future, parameter[fn]]:
constant[Decorator that turns a synchronous function into one returning a future.
This should only be applied to non-blocking functions. Will do set_result()
with the return value, or set_exc_info() if an exception is raised.
]
def function[decorated, parameter[]]:
return[call[name[gen].maybe_future, parameter[call[name[fn], parameter[<ast.Starred object at 0x7da1b05d8d90>]]]]]
return[name[decorated]]
|
keyword[def] identifier[return_future] ( identifier[fn] ):
literal[string]
@ identifier[wraps] ( identifier[fn] )
keyword[def] identifier[decorated] (* identifier[args] ,** identifier[kwargs] ):
keyword[return] identifier[gen] . identifier[maybe_future] ( identifier[fn] (* identifier[args] ,** identifier[kwargs] ))
keyword[return] identifier[decorated]
|
def return_future(fn):
"""Decorator that turns a synchronous function into one returning a future.
This should only be applied to non-blocking functions. Will do set_result()
with the return value, or set_exc_info() if an exception is raised.
"""
@wraps(fn)
def decorated(*args, **kwargs):
return gen.maybe_future(fn(*args, **kwargs))
return decorated
|
def post(self, content, endpoint=''):
'''
Issue a POST request with `content` as the body to `endpoint` and
return the result.
'''
url = self.url(endpoint)
post_content = json.dumps(content).encode('utf-8')
headers = {'Content-Type': 'application/json'}
request = Request(url, post_content, headers)
response = urlopen(request)
return json.loads(response.read().decode('utf-8'))
|
def function[post, parameter[self, content, endpoint]]:
constant[
Issue a POST request with `content` as the body to `endpoint` and
return the result.
]
variable[url] assign[=] call[name[self].url, parameter[name[endpoint]]]
variable[post_content] assign[=] call[call[name[json].dumps, parameter[name[content]]].encode, parameter[constant[utf-8]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da20cabeb00>], [<ast.Constant object at 0x7da20cabf700>]]
variable[request] assign[=] call[name[Request], parameter[name[url], name[post_content], name[headers]]]
variable[response] assign[=] call[name[urlopen], parameter[name[request]]]
return[call[name[json].loads, parameter[call[call[name[response].read, parameter[]].decode, parameter[constant[utf-8]]]]]]
|
keyword[def] identifier[post] ( identifier[self] , identifier[content] , identifier[endpoint] = literal[string] ):
literal[string]
identifier[url] = identifier[self] . identifier[url] ( identifier[endpoint] )
identifier[post_content] = identifier[json] . identifier[dumps] ( identifier[content] ). identifier[encode] ( literal[string] )
identifier[headers] ={ literal[string] : literal[string] }
identifier[request] = identifier[Request] ( identifier[url] , identifier[post_content] , identifier[headers] )
identifier[response] = identifier[urlopen] ( identifier[request] )
keyword[return] identifier[json] . identifier[loads] ( identifier[response] . identifier[read] (). identifier[decode] ( literal[string] ))
|
def post(self, content, endpoint=''):
"""
Issue a POST request with `content` as the body to `endpoint` and
return the result.
"""
url = self.url(endpoint)
post_content = json.dumps(content).encode('utf-8')
headers = {'Content-Type': 'application/json'}
request = Request(url, post_content, headers)
response = urlopen(request)
return json.loads(response.read().decode('utf-8'))
|
def clear(self, scope = 'screen'):
"""see doc in Term class
According to http://support.microsoft.com/kb/99261 the best way
to clear the console is to write out empty spaces
"""
#TODO: clear attributes too
if scope == 'screen':
bos = (0, self._get_console_info()['window']['top'])
cols, lines = self.get_size()
length = cols * lines
self._clear_console(length, bos)
self.move('beginning of screen')
elif scope == ' beginning of line':
pass
elif scope == 'end of line':
curx, cury = self._get_position()
cols, lines = self.get_size()
coord = (curx, cury)
length = cols - curx
self._clear_console(length, coord)
elif scope == 'end of screen':
curx, cury = self._get_position()
coord = (curx, cury)
cols, lines = self.get_size()
length = (lines - cury) * cols - curx
self._clear_console(length, coord)
elif scope == 'line':
curx, cury = self._get_position()
coord = (0, cury)
cols, lines = self.get_size()
self._clear_console(cols, coord)
self._set_position((curx, cury))
elif scope == 'left':
self.move('left')
self.write(' ')
elif scope == 'right':
self.write(' ')
self.move('left')
else:
raise ValueError("invalid scope to clear")
|
def function[clear, parameter[self, scope]]:
constant[see doc in Term class
According to http://support.microsoft.com/kb/99261 the best way
to clear the console is to write out empty spaces
]
if compare[name[scope] equal[==] constant[screen]] begin[:]
variable[bos] assign[=] tuple[[<ast.Constant object at 0x7da18f00fe50>, <ast.Subscript object at 0x7da18f00d270>]]
<ast.Tuple object at 0x7da18f00da20> assign[=] call[name[self].get_size, parameter[]]
variable[length] assign[=] binary_operation[name[cols] * name[lines]]
call[name[self]._clear_console, parameter[name[length], name[bos]]]
call[name[self].move, parameter[constant[beginning of screen]]]
|
keyword[def] identifier[clear] ( identifier[self] , identifier[scope] = literal[string] ):
literal[string]
keyword[if] identifier[scope] == literal[string] :
identifier[bos] =( literal[int] , identifier[self] . identifier[_get_console_info] ()[ literal[string] ][ literal[string] ])
identifier[cols] , identifier[lines] = identifier[self] . identifier[get_size] ()
identifier[length] = identifier[cols] * identifier[lines]
identifier[self] . identifier[_clear_console] ( identifier[length] , identifier[bos] )
identifier[self] . identifier[move] ( literal[string] )
keyword[elif] identifier[scope] == literal[string] :
keyword[pass]
keyword[elif] identifier[scope] == literal[string] :
identifier[curx] , identifier[cury] = identifier[self] . identifier[_get_position] ()
identifier[cols] , identifier[lines] = identifier[self] . identifier[get_size] ()
identifier[coord] =( identifier[curx] , identifier[cury] )
identifier[length] = identifier[cols] - identifier[curx]
identifier[self] . identifier[_clear_console] ( identifier[length] , identifier[coord] )
keyword[elif] identifier[scope] == literal[string] :
identifier[curx] , identifier[cury] = identifier[self] . identifier[_get_position] ()
identifier[coord] =( identifier[curx] , identifier[cury] )
identifier[cols] , identifier[lines] = identifier[self] . identifier[get_size] ()
identifier[length] =( identifier[lines] - identifier[cury] )* identifier[cols] - identifier[curx]
identifier[self] . identifier[_clear_console] ( identifier[length] , identifier[coord] )
keyword[elif] identifier[scope] == literal[string] :
identifier[curx] , identifier[cury] = identifier[self] . identifier[_get_position] ()
identifier[coord] =( literal[int] , identifier[cury] )
identifier[cols] , identifier[lines] = identifier[self] . identifier[get_size] ()
identifier[self] . identifier[_clear_console] ( identifier[cols] , identifier[coord] )
identifier[self] . identifier[_set_position] (( identifier[curx] , identifier[cury] ))
keyword[elif] identifier[scope] == literal[string] :
identifier[self] . identifier[move] ( literal[string] )
identifier[self] . identifier[write] ( literal[string] )
keyword[elif] identifier[scope] == literal[string] :
identifier[self] . identifier[write] ( literal[string] )
identifier[self] . identifier[move] ( literal[string] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
|
def clear(self, scope='screen'):
"""see doc in Term class
According to http://support.microsoft.com/kb/99261 the best way
to clear the console is to write out empty spaces
"""
#TODO: clear attributes too
if scope == 'screen':
bos = (0, self._get_console_info()['window']['top'])
(cols, lines) = self.get_size()
length = cols * lines
self._clear_console(length, bos)
self.move('beginning of screen') # depends on [control=['if'], data=[]]
elif scope == ' beginning of line':
pass # depends on [control=['if'], data=[]]
elif scope == 'end of line':
(curx, cury) = self._get_position()
(cols, lines) = self.get_size()
coord = (curx, cury)
length = cols - curx
self._clear_console(length, coord) # depends on [control=['if'], data=[]]
elif scope == 'end of screen':
(curx, cury) = self._get_position()
coord = (curx, cury)
(cols, lines) = self.get_size()
length = (lines - cury) * cols - curx
self._clear_console(length, coord) # depends on [control=['if'], data=[]]
elif scope == 'line':
(curx, cury) = self._get_position()
coord = (0, cury)
(cols, lines) = self.get_size()
self._clear_console(cols, coord)
self._set_position((curx, cury)) # depends on [control=['if'], data=[]]
elif scope == 'left':
self.move('left')
self.write(' ') # depends on [control=['if'], data=[]]
elif scope == 'right':
self.write(' ')
self.move('left') # depends on [control=['if'], data=[]]
else:
raise ValueError('invalid scope to clear')
|
def _handle_execute_reply(self, msg):
"""Save the reply to an execute_request into our results.
execute messages are never actually used. apply is used instead.
"""
parent = msg['parent_header']
msg_id = parent['msg_id']
if msg_id not in self.outstanding:
if msg_id in self.history:
print ("got stale result: %s"%msg_id)
else:
print ("got unknown result: %s"%msg_id)
else:
self.outstanding.remove(msg_id)
content = msg['content']
header = msg['header']
# construct metadata:
md = self.metadata[msg_id]
md.update(self._extract_metadata(header, parent, content))
# is this redundant?
self.metadata[msg_id] = md
e_outstanding = self._outstanding_dict[md['engine_uuid']]
if msg_id in e_outstanding:
e_outstanding.remove(msg_id)
# construct result:
if content['status'] == 'ok':
self.results[msg_id] = ExecuteReply(msg_id, content, md)
elif content['status'] == 'aborted':
self.results[msg_id] = error.TaskAborted(msg_id)
elif content['status'] == 'resubmitted':
# TODO: handle resubmission
pass
else:
self.results[msg_id] = self._unwrap_exception(content)
|
def function[_handle_execute_reply, parameter[self, msg]]:
constant[Save the reply to an execute_request into our results.
execute messages are never actually used. apply is used instead.
]
variable[parent] assign[=] call[name[msg]][constant[parent_header]]
variable[msg_id] assign[=] call[name[parent]][constant[msg_id]]
if compare[name[msg_id] <ast.NotIn object at 0x7da2590d7190> name[self].outstanding] begin[:]
if compare[name[msg_id] in name[self].history] begin[:]
call[name[print], parameter[binary_operation[constant[got stale result: %s] <ast.Mod object at 0x7da2590d6920> name[msg_id]]]]
variable[content] assign[=] call[name[msg]][constant[content]]
variable[header] assign[=] call[name[msg]][constant[header]]
variable[md] assign[=] call[name[self].metadata][name[msg_id]]
call[name[md].update, parameter[call[name[self]._extract_metadata, parameter[name[header], name[parent], name[content]]]]]
call[name[self].metadata][name[msg_id]] assign[=] name[md]
variable[e_outstanding] assign[=] call[name[self]._outstanding_dict][call[name[md]][constant[engine_uuid]]]
if compare[name[msg_id] in name[e_outstanding]] begin[:]
call[name[e_outstanding].remove, parameter[name[msg_id]]]
if compare[call[name[content]][constant[status]] equal[==] constant[ok]] begin[:]
call[name[self].results][name[msg_id]] assign[=] call[name[ExecuteReply], parameter[name[msg_id], name[content], name[md]]]
|
keyword[def] identifier[_handle_execute_reply] ( identifier[self] , identifier[msg] ):
literal[string]
identifier[parent] = identifier[msg] [ literal[string] ]
identifier[msg_id] = identifier[parent] [ literal[string] ]
keyword[if] identifier[msg_id] keyword[not] keyword[in] identifier[self] . identifier[outstanding] :
keyword[if] identifier[msg_id] keyword[in] identifier[self] . identifier[history] :
identifier[print] ( literal[string] % identifier[msg_id] )
keyword[else] :
identifier[print] ( literal[string] % identifier[msg_id] )
keyword[else] :
identifier[self] . identifier[outstanding] . identifier[remove] ( identifier[msg_id] )
identifier[content] = identifier[msg] [ literal[string] ]
identifier[header] = identifier[msg] [ literal[string] ]
identifier[md] = identifier[self] . identifier[metadata] [ identifier[msg_id] ]
identifier[md] . identifier[update] ( identifier[self] . identifier[_extract_metadata] ( identifier[header] , identifier[parent] , identifier[content] ))
identifier[self] . identifier[metadata] [ identifier[msg_id] ]= identifier[md]
identifier[e_outstanding] = identifier[self] . identifier[_outstanding_dict] [ identifier[md] [ literal[string] ]]
keyword[if] identifier[msg_id] keyword[in] identifier[e_outstanding] :
identifier[e_outstanding] . identifier[remove] ( identifier[msg_id] )
keyword[if] identifier[content] [ literal[string] ]== literal[string] :
identifier[self] . identifier[results] [ identifier[msg_id] ]= identifier[ExecuteReply] ( identifier[msg_id] , identifier[content] , identifier[md] )
keyword[elif] identifier[content] [ literal[string] ]== literal[string] :
identifier[self] . identifier[results] [ identifier[msg_id] ]= identifier[error] . identifier[TaskAborted] ( identifier[msg_id] )
keyword[elif] identifier[content] [ literal[string] ]== literal[string] :
keyword[pass]
keyword[else] :
identifier[self] . identifier[results] [ identifier[msg_id] ]= identifier[self] . identifier[_unwrap_exception] ( identifier[content] )
|
def _handle_execute_reply(self, msg):
"""Save the reply to an execute_request into our results.
execute messages are never actually used. apply is used instead.
"""
parent = msg['parent_header']
msg_id = parent['msg_id']
if msg_id not in self.outstanding:
if msg_id in self.history:
print('got stale result: %s' % msg_id) # depends on [control=['if'], data=['msg_id']]
else:
print('got unknown result: %s' % msg_id) # depends on [control=['if'], data=['msg_id']]
else:
self.outstanding.remove(msg_id)
content = msg['content']
header = msg['header']
# construct metadata:
md = self.metadata[msg_id]
md.update(self._extract_metadata(header, parent, content))
# is this redundant?
self.metadata[msg_id] = md
e_outstanding = self._outstanding_dict[md['engine_uuid']]
if msg_id in e_outstanding:
e_outstanding.remove(msg_id) # depends on [control=['if'], data=['msg_id', 'e_outstanding']]
# construct result:
if content['status'] == 'ok':
self.results[msg_id] = ExecuteReply(msg_id, content, md) # depends on [control=['if'], data=[]]
elif content['status'] == 'aborted':
self.results[msg_id] = error.TaskAborted(msg_id) # depends on [control=['if'], data=[]]
elif content['status'] == 'resubmitted':
# TODO: handle resubmission
pass # depends on [control=['if'], data=[]]
else:
self.results[msg_id] = self._unwrap_exception(content)
|
def _conan_user_home(self, conan, in_workdir=False):
"""Create the CONAN_USER_HOME for this task fingerprint and initialize the Conan remotes.
See https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install
for docs on configuring remotes.
"""
# This argument is exposed so tests don't leak out of the workdir.
if in_workdir:
base_cache_dir = self.workdir
else:
base_cache_dir = get_pants_cachedir()
user_home_base = os.path.join(base_cache_dir, 'conan-support', 'conan-user-home')
# Locate the subdirectory of the pants shared cachedir specific to this task's option values.
user_home = os.path.join(user_home_base, self.fingerprint)
conan_install_base = os.path.join(user_home, '.conan')
# Conan doesn't copy remotes.txt into the .conan subdir after the "config install" command, it
# simply edits registry.json. However, it is valid to have this file there, and Conan won't
# touch it, so we use its presence to detect whether we have appropriately initialized the
# Conan installation.
remotes_txt_sentinel = os.path.join(conan_install_base, 'remotes.txt')
if not os.path.isfile(remotes_txt_sentinel):
safe_mkdir(conan_install_base)
# Conan doesn't consume the remotes.txt file just by being in the conan directory -- we need
# to create another directory containing any selection of files detailed in
# https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install
# and "install" from there to our desired conan directory.
with temporary_dir() as remotes_install_dir:
# Create an artificial conan configuration dir containing just remotes.txt.
remotes_txt_for_install = os.path.join(remotes_install_dir, 'remotes.txt')
safe_file_dump(remotes_txt_for_install, self._remotes_txt_content)
# Configure the desired user home from this artificial config dir.
argv = ['config', 'install', remotes_install_dir]
workunit_factory = functools.partial(
self.context.new_workunit,
name='initial-conan-config',
labels=[WorkUnitLabel.TOOL])
env = {
'CONAN_USER_HOME': user_home,
}
cmdline, exit_code = conan.run(workunit_factory, argv, env=env)
if exit_code != 0:
raise self.ConanConfigError(
'Error configuring conan with argv {} and environment {}: exited non-zero ({}).'
.format(cmdline, env, exit_code),
exit_code=exit_code)
# Generate the sentinel file so that we know the remotes have been successfully configured for
# this particular task fingerprint in successive pants runs.
safe_file_dump(remotes_txt_sentinel, self._remotes_txt_content)
return user_home
|
def function[_conan_user_home, parameter[self, conan, in_workdir]]:
constant[Create the CONAN_USER_HOME for this task fingerprint and initialize the Conan remotes.
See https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install
for docs on configuring remotes.
]
if name[in_workdir] begin[:]
variable[base_cache_dir] assign[=] name[self].workdir
variable[user_home_base] assign[=] call[name[os].path.join, parameter[name[base_cache_dir], constant[conan-support], constant[conan-user-home]]]
variable[user_home] assign[=] call[name[os].path.join, parameter[name[user_home_base], name[self].fingerprint]]
variable[conan_install_base] assign[=] call[name[os].path.join, parameter[name[user_home], constant[.conan]]]
variable[remotes_txt_sentinel] assign[=] call[name[os].path.join, parameter[name[conan_install_base], constant[remotes.txt]]]
if <ast.UnaryOp object at 0x7da1b22b8a30> begin[:]
call[name[safe_mkdir], parameter[name[conan_install_base]]]
with call[name[temporary_dir], parameter[]] begin[:]
variable[remotes_txt_for_install] assign[=] call[name[os].path.join, parameter[name[remotes_install_dir], constant[remotes.txt]]]
call[name[safe_file_dump], parameter[name[remotes_txt_for_install], name[self]._remotes_txt_content]]
variable[argv] assign[=] list[[<ast.Constant object at 0x7da1b22bbd90>, <ast.Constant object at 0x7da1b22ba890>, <ast.Name object at 0x7da1b22b90f0>]]
variable[workunit_factory] assign[=] call[name[functools].partial, parameter[name[self].context.new_workunit]]
variable[env] assign[=] dictionary[[<ast.Constant object at 0x7da1b22b8a90>], [<ast.Name object at 0x7da1b22bb280>]]
<ast.Tuple object at 0x7da1b2249c90> assign[=] call[name[conan].run, parameter[name[workunit_factory], name[argv]]]
if compare[name[exit_code] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b224a2f0>
call[name[safe_file_dump], parameter[name[remotes_txt_sentinel], name[self]._remotes_txt_content]]
return[name[user_home]]
|
keyword[def] identifier[_conan_user_home] ( identifier[self] , identifier[conan] , identifier[in_workdir] = keyword[False] ):
literal[string]
keyword[if] identifier[in_workdir] :
identifier[base_cache_dir] = identifier[self] . identifier[workdir]
keyword[else] :
identifier[base_cache_dir] = identifier[get_pants_cachedir] ()
identifier[user_home_base] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_cache_dir] , literal[string] , literal[string] )
identifier[user_home] = identifier[os] . identifier[path] . identifier[join] ( identifier[user_home_base] , identifier[self] . identifier[fingerprint] )
identifier[conan_install_base] = identifier[os] . identifier[path] . identifier[join] ( identifier[user_home] , literal[string] )
identifier[remotes_txt_sentinel] = identifier[os] . identifier[path] . identifier[join] ( identifier[conan_install_base] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[remotes_txt_sentinel] ):
identifier[safe_mkdir] ( identifier[conan_install_base] )
keyword[with] identifier[temporary_dir] () keyword[as] identifier[remotes_install_dir] :
identifier[remotes_txt_for_install] = identifier[os] . identifier[path] . identifier[join] ( identifier[remotes_install_dir] , literal[string] )
identifier[safe_file_dump] ( identifier[remotes_txt_for_install] , identifier[self] . identifier[_remotes_txt_content] )
identifier[argv] =[ literal[string] , literal[string] , identifier[remotes_install_dir] ]
identifier[workunit_factory] = identifier[functools] . identifier[partial] (
identifier[self] . identifier[context] . identifier[new_workunit] ,
identifier[name] = literal[string] ,
identifier[labels] =[ identifier[WorkUnitLabel] . identifier[TOOL] ])
identifier[env] ={
literal[string] : identifier[user_home] ,
}
identifier[cmdline] , identifier[exit_code] = identifier[conan] . identifier[run] ( identifier[workunit_factory] , identifier[argv] , identifier[env] = identifier[env] )
keyword[if] identifier[exit_code] != literal[int] :
keyword[raise] identifier[self] . identifier[ConanConfigError] (
literal[string]
. identifier[format] ( identifier[cmdline] , identifier[env] , identifier[exit_code] ),
identifier[exit_code] = identifier[exit_code] )
identifier[safe_file_dump] ( identifier[remotes_txt_sentinel] , identifier[self] . identifier[_remotes_txt_content] )
keyword[return] identifier[user_home]
|
def _conan_user_home(self, conan, in_workdir=False):
"""Create the CONAN_USER_HOME for this task fingerprint and initialize the Conan remotes.
See https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install
for docs on configuring remotes.
"""
# This argument is exposed so tests don't leak out of the workdir.
if in_workdir:
base_cache_dir = self.workdir # depends on [control=['if'], data=[]]
else:
base_cache_dir = get_pants_cachedir()
user_home_base = os.path.join(base_cache_dir, 'conan-support', 'conan-user-home')
# Locate the subdirectory of the pants shared cachedir specific to this task's option values.
user_home = os.path.join(user_home_base, self.fingerprint)
conan_install_base = os.path.join(user_home, '.conan')
# Conan doesn't copy remotes.txt into the .conan subdir after the "config install" command, it
# simply edits registry.json. However, it is valid to have this file there, and Conan won't
# touch it, so we use its presence to detect whether we have appropriately initialized the
# Conan installation.
remotes_txt_sentinel = os.path.join(conan_install_base, 'remotes.txt')
if not os.path.isfile(remotes_txt_sentinel):
safe_mkdir(conan_install_base)
# Conan doesn't consume the remotes.txt file just by being in the conan directory -- we need
# to create another directory containing any selection of files detailed in
# https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install
# and "install" from there to our desired conan directory.
with temporary_dir() as remotes_install_dir:
# Create an artificial conan configuration dir containing just remotes.txt.
remotes_txt_for_install = os.path.join(remotes_install_dir, 'remotes.txt')
safe_file_dump(remotes_txt_for_install, self._remotes_txt_content)
# Configure the desired user home from this artificial config dir.
argv = ['config', 'install', remotes_install_dir]
workunit_factory = functools.partial(self.context.new_workunit, name='initial-conan-config', labels=[WorkUnitLabel.TOOL])
env = {'CONAN_USER_HOME': user_home}
(cmdline, exit_code) = conan.run(workunit_factory, argv, env=env)
if exit_code != 0:
raise self.ConanConfigError('Error configuring conan with argv {} and environment {}: exited non-zero ({}).'.format(cmdline, env, exit_code), exit_code=exit_code) # depends on [control=['if'], data=['exit_code']] # depends on [control=['with'], data=['remotes_install_dir']]
# Generate the sentinel file so that we know the remotes have been successfully configured for
# this particular task fingerprint in successive pants runs.
safe_file_dump(remotes_txt_sentinel, self._remotes_txt_content) # depends on [control=['if'], data=[]]
return user_home
|
def read_entry(self, file_name):
"""
Args:
file_name (str):
Returns:
pd.DataFrame:
"""
file_path = os.path.join(self.EXTRACTION_CACHE_PATH, file_name)
logger.info(f'Reading cache entry: {file_path}')
return joblib.load(file_path)
|
def function[read_entry, parameter[self, file_name]]:
constant[
Args:
file_name (str):
Returns:
pd.DataFrame:
]
variable[file_path] assign[=] call[name[os].path.join, parameter[name[self].EXTRACTION_CACHE_PATH, name[file_name]]]
call[name[logger].info, parameter[<ast.JoinedStr object at 0x7da1b032cb20>]]
return[call[name[joblib].load, parameter[name[file_path]]]]
|
keyword[def] identifier[read_entry] ( identifier[self] , identifier[file_name] ):
literal[string]
identifier[file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[EXTRACTION_CACHE_PATH] , identifier[file_name] )
identifier[logger] . identifier[info] ( literal[string] )
keyword[return] identifier[joblib] . identifier[load] ( identifier[file_path] )
|
def read_entry(self, file_name):
"""
Args:
file_name (str):
Returns:
pd.DataFrame:
"""
file_path = os.path.join(self.EXTRACTION_CACHE_PATH, file_name)
logger.info(f'Reading cache entry: {file_path}')
return joblib.load(file_path)
|
def flatten(d, parent_key='', separator='__'):
"""
Flatten a nested dictionary.
Parameters
----------
d: dict_like
Dictionary to flatten.
parent_key: string, optional
Concatenated names of the parent keys.
separator: string, optional
Separator between the names of the each key.
The default separator is '_'.
Examples
--------
>>> d = {'alpha': 1, 'beta': {'a': 10, 'b': 42}}
>>> flatten(d) == {'alpha': 1, 'beta_a': 10, 'beta_b': 42}
True
>>> flatten(d, separator='.') == {'alpha': 1, 'beta.a': 10, 'beta.b': 42}
True
"""
items = []
for k, v in d.items():
new_key = parent_key + separator + k if parent_key else k
if isinstance(v, (dict, OrderedDict)):
items.extend(flatten(v, new_key, separator).items())
else:
items.append((new_key, v))
return OrderedDict(items)
|
def function[flatten, parameter[d, parent_key, separator]]:
constant[
Flatten a nested dictionary.
Parameters
----------
d: dict_like
Dictionary to flatten.
parent_key: string, optional
Concatenated names of the parent keys.
separator: string, optional
Separator between the names of the each key.
The default separator is '_'.
Examples
--------
>>> d = {'alpha': 1, 'beta': {'a': 10, 'b': 42}}
>>> flatten(d) == {'alpha': 1, 'beta_a': 10, 'beta_b': 42}
True
>>> flatten(d, separator='.') == {'alpha': 1, 'beta.a': 10, 'beta.b': 42}
True
]
variable[items] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0dc15a0>, <ast.Name object at 0x7da1b0dc3850>]]] in starred[call[name[d].items, parameter[]]] begin[:]
variable[new_key] assign[=] <ast.IfExp object at 0x7da1b0dc1ae0>
if call[name[isinstance], parameter[name[v], tuple[[<ast.Name object at 0x7da1b0dc2500>, <ast.Name object at 0x7da1b0dc18a0>]]]] begin[:]
call[name[items].extend, parameter[call[call[name[flatten], parameter[name[v], name[new_key], name[separator]]].items, parameter[]]]]
return[call[name[OrderedDict], parameter[name[items]]]]
|
keyword[def] identifier[flatten] ( identifier[d] , identifier[parent_key] = literal[string] , identifier[separator] = literal[string] ):
literal[string]
identifier[items] =[]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] . identifier[items] ():
identifier[new_key] = identifier[parent_key] + identifier[separator] + identifier[k] keyword[if] identifier[parent_key] keyword[else] identifier[k]
keyword[if] identifier[isinstance] ( identifier[v] ,( identifier[dict] , identifier[OrderedDict] )):
identifier[items] . identifier[extend] ( identifier[flatten] ( identifier[v] , identifier[new_key] , identifier[separator] ). identifier[items] ())
keyword[else] :
identifier[items] . identifier[append] (( identifier[new_key] , identifier[v] ))
keyword[return] identifier[OrderedDict] ( identifier[items] )
|
def flatten(d, parent_key='', separator='__'):
"""
Flatten a nested dictionary.
Parameters
----------
d: dict_like
Dictionary to flatten.
parent_key: string, optional
Concatenated names of the parent keys.
separator: string, optional
Separator between the names of the each key.
The default separator is '_'.
Examples
--------
>>> d = {'alpha': 1, 'beta': {'a': 10, 'b': 42}}
>>> flatten(d) == {'alpha': 1, 'beta_a': 10, 'beta_b': 42}
True
>>> flatten(d, separator='.') == {'alpha': 1, 'beta.a': 10, 'beta.b': 42}
True
"""
items = []
for (k, v) in d.items():
new_key = parent_key + separator + k if parent_key else k
if isinstance(v, (dict, OrderedDict)):
items.extend(flatten(v, new_key, separator).items()) # depends on [control=['if'], data=[]]
else:
items.append((new_key, v)) # depends on [control=['for'], data=[]]
return OrderedDict(items)
|
def fromJSON(value):
"""loads the GP object from a JSON string """
j = json.loads(value)
v = GPRecordSet()
if "defaultValue" in j:
v.value = j['defaultValue']
else:
v.value = j['value']
if 'paramName' in j:
v.paramName = j['paramName']
elif 'name' in j:
v.paramName = j['name']
return v
|
def function[fromJSON, parameter[value]]:
constant[loads the GP object from a JSON string ]
variable[j] assign[=] call[name[json].loads, parameter[name[value]]]
variable[v] assign[=] call[name[GPRecordSet], parameter[]]
if compare[constant[defaultValue] in name[j]] begin[:]
name[v].value assign[=] call[name[j]][constant[defaultValue]]
if compare[constant[paramName] in name[j]] begin[:]
name[v].paramName assign[=] call[name[j]][constant[paramName]]
return[name[v]]
|
keyword[def] identifier[fromJSON] ( identifier[value] ):
literal[string]
identifier[j] = identifier[json] . identifier[loads] ( identifier[value] )
identifier[v] = identifier[GPRecordSet] ()
keyword[if] literal[string] keyword[in] identifier[j] :
identifier[v] . identifier[value] = identifier[j] [ literal[string] ]
keyword[else] :
identifier[v] . identifier[value] = identifier[j] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[j] :
identifier[v] . identifier[paramName] = identifier[j] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[j] :
identifier[v] . identifier[paramName] = identifier[j] [ literal[string] ]
keyword[return] identifier[v]
|
def fromJSON(value):
"""loads the GP object from a JSON string """
j = json.loads(value)
v = GPRecordSet()
if 'defaultValue' in j:
v.value = j['defaultValue'] # depends on [control=['if'], data=['j']]
else:
v.value = j['value']
if 'paramName' in j:
v.paramName = j['paramName'] # depends on [control=['if'], data=['j']]
elif 'name' in j:
v.paramName = j['name'] # depends on [control=['if'], data=['j']]
return v
|
def get_exchange_rate(base, target, *args, **kwargs):
"""
Return the ::base:: to ::target:: exchange rate.
Wraps around ::Cryptonator.get_exchange_rate::.
"""
return Cryptonator().get_exchange_rate(base, target, *args, **kwargs)
|
def function[get_exchange_rate, parameter[base, target]]:
constant[
Return the ::base:: to ::target:: exchange rate.
Wraps around ::Cryptonator.get_exchange_rate::.
]
return[call[call[name[Cryptonator], parameter[]].get_exchange_rate, parameter[name[base], name[target], <ast.Starred object at 0x7da1b0a4ef20>]]]
|
keyword[def] identifier[get_exchange_rate] ( identifier[base] , identifier[target] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[Cryptonator] (). identifier[get_exchange_rate] ( identifier[base] , identifier[target] ,* identifier[args] ,** identifier[kwargs] )
|
def get_exchange_rate(base, target, *args, **kwargs):
"""
Return the ::base:: to ::target:: exchange rate.
Wraps around ::Cryptonator.get_exchange_rate::.
"""
return Cryptonator().get_exchange_rate(base, target, *args, **kwargs)
|
def _do_log(self, client, _entry_class, payload=None, **kw):
"""Helper for :meth:`log_empty`, :meth:`log_text`, etc.
"""
client = self._require_client(client)
# Apply defaults
kw["log_name"] = kw.pop("log_name", self.full_name)
kw["labels"] = kw.pop("labels", self.labels)
kw["resource"] = kw.pop("resource", _GLOBAL_RESOURCE)
if payload is not None:
entry = _entry_class(payload=payload, **kw)
else:
entry = _entry_class(**kw)
api_repr = entry.to_api_repr()
client.logging_api.write_entries([api_repr])
|
def function[_do_log, parameter[self, client, _entry_class, payload]]:
constant[Helper for :meth:`log_empty`, :meth:`log_text`, etc.
]
variable[client] assign[=] call[name[self]._require_client, parameter[name[client]]]
call[name[kw]][constant[log_name]] assign[=] call[name[kw].pop, parameter[constant[log_name], name[self].full_name]]
call[name[kw]][constant[labels]] assign[=] call[name[kw].pop, parameter[constant[labels], name[self].labels]]
call[name[kw]][constant[resource]] assign[=] call[name[kw].pop, parameter[constant[resource], name[_GLOBAL_RESOURCE]]]
if compare[name[payload] is_not constant[None]] begin[:]
variable[entry] assign[=] call[name[_entry_class], parameter[]]
variable[api_repr] assign[=] call[name[entry].to_api_repr, parameter[]]
call[name[client].logging_api.write_entries, parameter[list[[<ast.Name object at 0x7da207f03010>]]]]
|
keyword[def] identifier[_do_log] ( identifier[self] , identifier[client] , identifier[_entry_class] , identifier[payload] = keyword[None] ,** identifier[kw] ):
literal[string]
identifier[client] = identifier[self] . identifier[_require_client] ( identifier[client] )
identifier[kw] [ literal[string] ]= identifier[kw] . identifier[pop] ( literal[string] , identifier[self] . identifier[full_name] )
identifier[kw] [ literal[string] ]= identifier[kw] . identifier[pop] ( literal[string] , identifier[self] . identifier[labels] )
identifier[kw] [ literal[string] ]= identifier[kw] . identifier[pop] ( literal[string] , identifier[_GLOBAL_RESOURCE] )
keyword[if] identifier[payload] keyword[is] keyword[not] keyword[None] :
identifier[entry] = identifier[_entry_class] ( identifier[payload] = identifier[payload] ,** identifier[kw] )
keyword[else] :
identifier[entry] = identifier[_entry_class] (** identifier[kw] )
identifier[api_repr] = identifier[entry] . identifier[to_api_repr] ()
identifier[client] . identifier[logging_api] . identifier[write_entries] ([ identifier[api_repr] ])
|
def _do_log(self, client, _entry_class, payload=None, **kw):
"""Helper for :meth:`log_empty`, :meth:`log_text`, etc.
"""
client = self._require_client(client)
# Apply defaults
kw['log_name'] = kw.pop('log_name', self.full_name)
kw['labels'] = kw.pop('labels', self.labels)
kw['resource'] = kw.pop('resource', _GLOBAL_RESOURCE)
if payload is not None:
entry = _entry_class(payload=payload, **kw) # depends on [control=['if'], data=['payload']]
else:
entry = _entry_class(**kw)
api_repr = entry.to_api_repr()
client.logging_api.write_entries([api_repr])
|
def square_root_mod_prime( a, p ):
"""Modular square root of a, mod p, p prime."""
# Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39.
# This module has been tested for all values in [0,p-1] for
# every prime p from 3 to 1229.
assert 0 <= a < p
assert 1 < p
if a == 0: return 0
if p == 2: return a
jac = jacobi( a, p )
if jac == -1: raise SquareRootError( "%d has no square root modulo %d" \
% ( a, p ) )
if p % 4 == 3: return modular_exp( a, (p+1)//4, p )
if p % 8 == 5:
d = modular_exp( a, (p-1)//4, p )
if d == 1: return modular_exp( a, (p+3)//8, p )
if d == p-1: return ( 2 * a * modular_exp( 4*a, (p-5)//8, p ) ) % p
raise RuntimeError("Shouldn't get here.")
for b in range( 2, p ):
if jacobi( b*b-4*a, p ) == -1:
f = ( a, -b, 1 )
ff = polynomial_exp_mod( ( 0, 1 ), (p+1)//2, f, p )
assert ff[1] == 0
return ff[0]
raise RuntimeError("No b found.")
|
def function[square_root_mod_prime, parameter[a, p]]:
constant[Modular square root of a, mod p, p prime.]
assert[compare[constant[0] less_or_equal[<=] name[a]]]
assert[compare[constant[1] less[<] name[p]]]
if compare[name[a] equal[==] constant[0]] begin[:]
return[constant[0]]
if compare[name[p] equal[==] constant[2]] begin[:]
return[name[a]]
variable[jac] assign[=] call[name[jacobi], parameter[name[a], name[p]]]
if compare[name[jac] equal[==] <ast.UnaryOp object at 0x7da18ede42e0>] begin[:]
<ast.Raise object at 0x7da18ede6ce0>
if compare[binary_operation[name[p] <ast.Mod object at 0x7da2590d6920> constant[4]] equal[==] constant[3]] begin[:]
return[call[name[modular_exp], parameter[name[a], binary_operation[binary_operation[name[p] + constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4]], name[p]]]]
if compare[binary_operation[name[p] <ast.Mod object at 0x7da2590d6920> constant[8]] equal[==] constant[5]] begin[:]
variable[d] assign[=] call[name[modular_exp], parameter[name[a], binary_operation[binary_operation[name[p] - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4]], name[p]]]
if compare[name[d] equal[==] constant[1]] begin[:]
return[call[name[modular_exp], parameter[name[a], binary_operation[binary_operation[name[p] + constant[3]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]], name[p]]]]
if compare[name[d] equal[==] binary_operation[name[p] - constant[1]]] begin[:]
return[binary_operation[binary_operation[binary_operation[constant[2] * name[a]] * call[name[modular_exp], parameter[binary_operation[constant[4] * name[a]], binary_operation[binary_operation[name[p] - constant[5]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]], name[p]]]] <ast.Mod object at 0x7da2590d6920> name[p]]]
<ast.Raise object at 0x7da18ede6350>
for taget[name[b]] in starred[call[name[range], parameter[constant[2], name[p]]]] begin[:]
if compare[call[name[jacobi], parameter[binary_operation[binary_operation[name[b] * name[b]] - binary_operation[constant[4] * name[a]]], name[p]]] equal[==] <ast.UnaryOp object at 0x7da18ede5ed0>] begin[:]
variable[f] assign[=] tuple[[<ast.Name object at 0x7da18ede4730>, <ast.UnaryOp object at 0x7da18ede6f50>, <ast.Constant object at 0x7da18ede5300>]]
variable[ff] assign[=] call[name[polynomial_exp_mod], parameter[tuple[[<ast.Constant object at 0x7da18ede4790>, <ast.Constant object at 0x7da18ede46a0>]], binary_operation[binary_operation[name[p] + constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]], name[f], name[p]]]
assert[compare[call[name[ff]][constant[1]] equal[==] constant[0]]]
return[call[name[ff]][constant[0]]]
<ast.Raise object at 0x7da18ede7220>
|
keyword[def] identifier[square_root_mod_prime] ( identifier[a] , identifier[p] ):
literal[string]
keyword[assert] literal[int] <= identifier[a] < identifier[p]
keyword[assert] literal[int] < identifier[p]
keyword[if] identifier[a] == literal[int] : keyword[return] literal[int]
keyword[if] identifier[p] == literal[int] : keyword[return] identifier[a]
identifier[jac] = identifier[jacobi] ( identifier[a] , identifier[p] )
keyword[if] identifier[jac] ==- literal[int] : keyword[raise] identifier[SquareRootError] ( literal[string] %( identifier[a] , identifier[p] ))
keyword[if] identifier[p] % literal[int] == literal[int] : keyword[return] identifier[modular_exp] ( identifier[a] ,( identifier[p] + literal[int] )// literal[int] , identifier[p] )
keyword[if] identifier[p] % literal[int] == literal[int] :
identifier[d] = identifier[modular_exp] ( identifier[a] ,( identifier[p] - literal[int] )// literal[int] , identifier[p] )
keyword[if] identifier[d] == literal[int] : keyword[return] identifier[modular_exp] ( identifier[a] ,( identifier[p] + literal[int] )// literal[int] , identifier[p] )
keyword[if] identifier[d] == identifier[p] - literal[int] : keyword[return] ( literal[int] * identifier[a] * identifier[modular_exp] ( literal[int] * identifier[a] ,( identifier[p] - literal[int] )// literal[int] , identifier[p] ))% identifier[p]
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[for] identifier[b] keyword[in] identifier[range] ( literal[int] , identifier[p] ):
keyword[if] identifier[jacobi] ( identifier[b] * identifier[b] - literal[int] * identifier[a] , identifier[p] )==- literal[int] :
identifier[f] =( identifier[a] ,- identifier[b] , literal[int] )
identifier[ff] = identifier[polynomial_exp_mod] (( literal[int] , literal[int] ),( identifier[p] + literal[int] )// literal[int] , identifier[f] , identifier[p] )
keyword[assert] identifier[ff] [ literal[int] ]== literal[int]
keyword[return] identifier[ff] [ literal[int] ]
keyword[raise] identifier[RuntimeError] ( literal[string] )
|
def square_root_mod_prime(a, p):
"""Modular square root of a, mod p, p prime."""
# Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39.
# This module has been tested for all values in [0,p-1] for
# every prime p from 3 to 1229.
assert 0 <= a < p
assert 1 < p
if a == 0:
return 0 # depends on [control=['if'], data=[]]
if p == 2:
return a # depends on [control=['if'], data=[]]
jac = jacobi(a, p)
if jac == -1:
raise SquareRootError('%d has no square root modulo %d' % (a, p)) # depends on [control=['if'], data=[]]
if p % 4 == 3:
return modular_exp(a, (p + 1) // 4, p) # depends on [control=['if'], data=[]]
if p % 8 == 5:
d = modular_exp(a, (p - 1) // 4, p)
if d == 1:
return modular_exp(a, (p + 3) // 8, p) # depends on [control=['if'], data=[]]
if d == p - 1:
return 2 * a * modular_exp(4 * a, (p - 5) // 8, p) % p # depends on [control=['if'], data=[]]
raise RuntimeError("Shouldn't get here.") # depends on [control=['if'], data=[]]
for b in range(2, p):
if jacobi(b * b - 4 * a, p) == -1:
f = (a, -b, 1)
ff = polynomial_exp_mod((0, 1), (p + 1) // 2, f, p)
assert ff[1] == 0
return ff[0] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['b']]
raise RuntimeError('No b found.')
|
def turn_on(self):
"""Turn bulb on (full brightness)."""
command = "C {},,,,100,\r\n".format(self._zid)
response = self._hub.send_command(command)
_LOGGER.debug("Turn on %s: %s", repr(command), response)
return response
|
def function[turn_on, parameter[self]]:
constant[Turn bulb on (full brightness).]
variable[command] assign[=] call[constant[C {},,,,100,
].format, parameter[name[self]._zid]]
variable[response] assign[=] call[name[self]._hub.send_command, parameter[name[command]]]
call[name[_LOGGER].debug, parameter[constant[Turn on %s: %s], call[name[repr], parameter[name[command]]], name[response]]]
return[name[response]]
|
keyword[def] identifier[turn_on] ( identifier[self] ):
literal[string]
identifier[command] = literal[string] . identifier[format] ( identifier[self] . identifier[_zid] )
identifier[response] = identifier[self] . identifier[_hub] . identifier[send_command] ( identifier[command] )
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[repr] ( identifier[command] ), identifier[response] )
keyword[return] identifier[response]
|
def turn_on(self):
"""Turn bulb on (full brightness)."""
command = 'C {},,,,100,\r\n'.format(self._zid)
response = self._hub.send_command(command)
_LOGGER.debug('Turn on %s: %s', repr(command), response)
return response
|
def value(self):
""" str: The value of the form element. """
if self.tag_name == "textarea":
return inner_content(self.native)
elif self.tag_name == "select":
if self["multiple"] == "multiple":
selected_options = self._find_xpath(".//option[@selected='selected']")
return [_get_option_value(option) for option in selected_options]
else:
options = (
self._find_xpath(".//option[@selected='selected']") +
self._find_xpath(".//option"))
return _get_option_value(options[0]) if options else None
elif self.tag_name == "input" and self["type"] in ["checkbox", "radio"]:
return self["value"] or "on"
else:
return self["value"]
|
def function[value, parameter[self]]:
constant[ str: The value of the form element. ]
if compare[name[self].tag_name equal[==] constant[textarea]] begin[:]
return[call[name[inner_content], parameter[name[self].native]]]
|
keyword[def] identifier[value] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[tag_name] == literal[string] :
keyword[return] identifier[inner_content] ( identifier[self] . identifier[native] )
keyword[elif] identifier[self] . identifier[tag_name] == literal[string] :
keyword[if] identifier[self] [ literal[string] ]== literal[string] :
identifier[selected_options] = identifier[self] . identifier[_find_xpath] ( literal[string] )
keyword[return] [ identifier[_get_option_value] ( identifier[option] ) keyword[for] identifier[option] keyword[in] identifier[selected_options] ]
keyword[else] :
identifier[options] =(
identifier[self] . identifier[_find_xpath] ( literal[string] )+
identifier[self] . identifier[_find_xpath] ( literal[string] ))
keyword[return] identifier[_get_option_value] ( identifier[options] [ literal[int] ]) keyword[if] identifier[options] keyword[else] keyword[None]
keyword[elif] identifier[self] . identifier[tag_name] == literal[string] keyword[and] identifier[self] [ literal[string] ] keyword[in] [ literal[string] , literal[string] ]:
keyword[return] identifier[self] [ literal[string] ] keyword[or] literal[string]
keyword[else] :
keyword[return] identifier[self] [ literal[string] ]
|
def value(self):
""" str: The value of the form element. """
if self.tag_name == 'textarea':
return inner_content(self.native) # depends on [control=['if'], data=[]]
elif self.tag_name == 'select':
if self['multiple'] == 'multiple':
selected_options = self._find_xpath(".//option[@selected='selected']")
return [_get_option_value(option) for option in selected_options] # depends on [control=['if'], data=[]]
else:
options = self._find_xpath(".//option[@selected='selected']") + self._find_xpath('.//option')
return _get_option_value(options[0]) if options else None # depends on [control=['if'], data=[]]
elif self.tag_name == 'input' and self['type'] in ['checkbox', 'radio']:
return self['value'] or 'on' # depends on [control=['if'], data=[]]
else:
return self['value']
|
def flush(self, exclude=None, include=None, dryrun=False):
'''Flush :attr:`registered_models`.
:param exclude: optional list of model names to exclude.
:param include: optional list of model names to include.
:param dryrun: Doesn't remove anything, simply collect managers
to flush.
:return:
'''
exclude = exclude or []
results = []
for manager in self._registered_models.values():
m = manager._meta
if include is not None and not (m.modelkey in include or
m.app_label in include):
continue
if not (m.modelkey in exclude or m.app_label in exclude):
if dryrun:
results.append(manager)
else:
results.append(manager.flush())
return results
|
def function[flush, parameter[self, exclude, include, dryrun]]:
constant[Flush :attr:`registered_models`.
:param exclude: optional list of model names to exclude.
:param include: optional list of model names to include.
:param dryrun: Doesn't remove anything, simply collect managers
to flush.
:return:
]
variable[exclude] assign[=] <ast.BoolOp object at 0x7da1b0e97760>
variable[results] assign[=] list[[]]
for taget[name[manager]] in starred[call[name[self]._registered_models.values, parameter[]]] begin[:]
variable[m] assign[=] name[manager]._meta
if <ast.BoolOp object at 0x7da1b0e96cb0> begin[:]
continue
if <ast.UnaryOp object at 0x7da1b0fe4880> begin[:]
if name[dryrun] begin[:]
call[name[results].append, parameter[name[manager]]]
return[name[results]]
|
keyword[def] identifier[flush] ( identifier[self] , identifier[exclude] = keyword[None] , identifier[include] = keyword[None] , identifier[dryrun] = keyword[False] ):
literal[string]
identifier[exclude] = identifier[exclude] keyword[or] []
identifier[results] =[]
keyword[for] identifier[manager] keyword[in] identifier[self] . identifier[_registered_models] . identifier[values] ():
identifier[m] = identifier[manager] . identifier[_meta]
keyword[if] identifier[include] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] ( identifier[m] . identifier[modelkey] keyword[in] identifier[include] keyword[or]
identifier[m] . identifier[app_label] keyword[in] identifier[include] ):
keyword[continue]
keyword[if] keyword[not] ( identifier[m] . identifier[modelkey] keyword[in] identifier[exclude] keyword[or] identifier[m] . identifier[app_label] keyword[in] identifier[exclude] ):
keyword[if] identifier[dryrun] :
identifier[results] . identifier[append] ( identifier[manager] )
keyword[else] :
identifier[results] . identifier[append] ( identifier[manager] . identifier[flush] ())
keyword[return] identifier[results]
|
def flush(self, exclude=None, include=None, dryrun=False):
"""Flush :attr:`registered_models`.
:param exclude: optional list of model names to exclude.
:param include: optional list of model names to include.
:param dryrun: Doesn't remove anything, simply collect managers
to flush.
:return:
"""
exclude = exclude or []
results = []
for manager in self._registered_models.values():
m = manager._meta
if include is not None and (not (m.modelkey in include or m.app_label in include)):
continue # depends on [control=['if'], data=[]]
if not (m.modelkey in exclude or m.app_label in exclude):
if dryrun:
results.append(manager) # depends on [control=['if'], data=[]]
else:
results.append(manager.flush()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['manager']]
return results
|
def sendATS_same(self, CorpNum, TemplateCode, Sender, Content, AltContent, AltSendType, SndDT, KakaoMessages,
UserID=None, RequestNum=None, ButtonList=None):
"""
알림톡 대량 전송
:param CorpNum: 팝빌회원 사업자번호
:param TemplateCode: 템플릿코드
:param Sender: 발신번호
:param Content: [동보] 알림톡 내용
:param AltContent: [동보] 대체문자 내용
:param AltSendType: 대체문자 유형 [공백-미전송, C-알림톡내용, A-대체문자내용]
:param SndDT: 예약일시 [작성형식 : yyyyMMddHHmmss]
:param KakaoMessages: 알림톡 내용 (배열)
:param UserID: 팝빌회원 아이디
:param RequestNum : 요청번호
:return: receiptNum (접수번호)
"""
if TemplateCode is None or TemplateCode == '':
raise PopbillException(-99999999, "알림톡 템플릿코드가 입력되지 않았습니다.")
if Sender is None or Sender == '':
raise PopbillException(-99999999, "발신번호가 입력되지 않았습니다.")
req = {}
if TemplateCode is not None or TemplateCode != '':
req['templateCode'] = TemplateCode
if Sender is not None or Sender != '':
req['snd'] = Sender
if Content is not None or Content != '':
req['content'] = Content
if AltContent is not None or AltContent != '':
req['altContent'] = AltContent
if AltSendType is not None or AltSendType != '':
req['altSendType'] = AltSendType
if SndDT is not None or SndDT != '':
req['sndDT'] = SndDT
if KakaoMessages is not None or KakaoMessages != '':
req['msgs'] = KakaoMessages
if ButtonList is not None:
req['btns'] = ButtonList
if RequestNum is not None or RequestNum != '':
req['requestnum'] = RequestNum
postData = self._stringtify(req)
result = self._httppost('/ATS', postData, CorpNum, UserID)
return result.receiptNum
|
def function[sendATS_same, parameter[self, CorpNum, TemplateCode, Sender, Content, AltContent, AltSendType, SndDT, KakaoMessages, UserID, RequestNum, ButtonList]]:
constant[
알림톡 대량 전송
:param CorpNum: 팝빌회원 사업자번호
:param TemplateCode: 템플릿코드
:param Sender: 발신번호
:param Content: [동보] 알림톡 내용
:param AltContent: [동보] 대체문자 내용
:param AltSendType: 대체문자 유형 [공백-미전송, C-알림톡내용, A-대체문자내용]
:param SndDT: 예약일시 [작성형식 : yyyyMMddHHmmss]
:param KakaoMessages: 알림톡 내용 (배열)
:param UserID: 팝빌회원 아이디
:param RequestNum : 요청번호
:return: receiptNum (접수번호)
]
if <ast.BoolOp object at 0x7da1b10efbb0> begin[:]
<ast.Raise object at 0x7da1b10efa60>
if <ast.BoolOp object at 0x7da1b10ef910> begin[:]
<ast.Raise object at 0x7da1b10ef7c0>
variable[req] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da1b10ef5e0> begin[:]
call[name[req]][constant[templateCode]] assign[=] name[TemplateCode]
if <ast.BoolOp object at 0x7da1b10ef370> begin[:]
call[name[req]][constant[snd]] assign[=] name[Sender]
if <ast.BoolOp object at 0x7da1b10ee2c0> begin[:]
call[name[req]][constant[content]] assign[=] name[Content]
if <ast.BoolOp object at 0x7da1b10ee050> begin[:]
call[name[req]][constant[altContent]] assign[=] name[AltContent]
if <ast.BoolOp object at 0x7da1b10358d0> begin[:]
call[name[req]][constant[altSendType]] assign[=] name[AltSendType]
if <ast.BoolOp object at 0x7da1b1035720> begin[:]
call[name[req]][constant[sndDT]] assign[=] name[SndDT]
if <ast.BoolOp object at 0x7da1b10edd20> begin[:]
call[name[req]][constant[msgs]] assign[=] name[KakaoMessages]
if compare[name[ButtonList] is_not constant[None]] begin[:]
call[name[req]][constant[btns]] assign[=] name[ButtonList]
if <ast.BoolOp object at 0x7da1b10ed900> begin[:]
call[name[req]][constant[requestnum]] assign[=] name[RequestNum]
variable[postData] assign[=] call[name[self]._stringtify, parameter[name[req]]]
variable[result] assign[=] call[name[self]._httppost, parameter[constant[/ATS], name[postData], name[CorpNum], name[UserID]]]
return[name[result].receiptNum]
|
keyword[def] identifier[sendATS_same] ( identifier[self] , identifier[CorpNum] , identifier[TemplateCode] , identifier[Sender] , identifier[Content] , identifier[AltContent] , identifier[AltSendType] , identifier[SndDT] , identifier[KakaoMessages] ,
identifier[UserID] = keyword[None] , identifier[RequestNum] = keyword[None] , identifier[ButtonList] = keyword[None] ):
literal[string]
keyword[if] identifier[TemplateCode] keyword[is] keyword[None] keyword[or] identifier[TemplateCode] == literal[string] :
keyword[raise] identifier[PopbillException] (- literal[int] , literal[string] )
keyword[if] identifier[Sender] keyword[is] keyword[None] keyword[or] identifier[Sender] == literal[string] :
keyword[raise] identifier[PopbillException] (- literal[int] , literal[string] )
identifier[req] ={}
keyword[if] identifier[TemplateCode] keyword[is] keyword[not] keyword[None] keyword[or] identifier[TemplateCode] != literal[string] :
identifier[req] [ literal[string] ]= identifier[TemplateCode]
keyword[if] identifier[Sender] keyword[is] keyword[not] keyword[None] keyword[or] identifier[Sender] != literal[string] :
identifier[req] [ literal[string] ]= identifier[Sender]
keyword[if] identifier[Content] keyword[is] keyword[not] keyword[None] keyword[or] identifier[Content] != literal[string] :
identifier[req] [ literal[string] ]= identifier[Content]
keyword[if] identifier[AltContent] keyword[is] keyword[not] keyword[None] keyword[or] identifier[AltContent] != literal[string] :
identifier[req] [ literal[string] ]= identifier[AltContent]
keyword[if] identifier[AltSendType] keyword[is] keyword[not] keyword[None] keyword[or] identifier[AltSendType] != literal[string] :
identifier[req] [ literal[string] ]= identifier[AltSendType]
keyword[if] identifier[SndDT] keyword[is] keyword[not] keyword[None] keyword[or] identifier[SndDT] != literal[string] :
identifier[req] [ literal[string] ]= identifier[SndDT]
keyword[if] identifier[KakaoMessages] keyword[is] keyword[not] keyword[None] keyword[or] identifier[KakaoMessages] != literal[string] :
identifier[req] [ literal[string] ]= identifier[KakaoMessages]
keyword[if] identifier[ButtonList] keyword[is] keyword[not] keyword[None] :
identifier[req] [ literal[string] ]= identifier[ButtonList]
keyword[if] identifier[RequestNum] keyword[is] keyword[not] keyword[None] keyword[or] identifier[RequestNum] != literal[string] :
identifier[req] [ literal[string] ]= identifier[RequestNum]
identifier[postData] = identifier[self] . identifier[_stringtify] ( identifier[req] )
identifier[result] = identifier[self] . identifier[_httppost] ( literal[string] , identifier[postData] , identifier[CorpNum] , identifier[UserID] )
keyword[return] identifier[result] . identifier[receiptNum]
|
def sendATS_same(self, CorpNum, TemplateCode, Sender, Content, AltContent, AltSendType, SndDT, KakaoMessages, UserID=None, RequestNum=None, ButtonList=None):
"""
알림톡 대량 전송
:param CorpNum: 팝빌회원 사업자번호
:param TemplateCode: 템플릿코드
:param Sender: 발신번호
:param Content: [동보] 알림톡 내용
:param AltContent: [동보] 대체문자 내용
:param AltSendType: 대체문자 유형 [공백-미전송, C-알림톡내용, A-대체문자내용]
:param SndDT: 예약일시 [작성형식 : yyyyMMddHHmmss]
:param KakaoMessages: 알림톡 내용 (배열)
:param UserID: 팝빌회원 아이디
:param RequestNum : 요청번호
:return: receiptNum (접수번호)
"""
if TemplateCode is None or TemplateCode == '':
raise PopbillException(-99999999, '알림톡 템플릿코드가 입력되지 않았습니다.') # depends on [control=['if'], data=[]]
if Sender is None or Sender == '':
raise PopbillException(-99999999, '발신번호가 입력되지 않았습니다.') # depends on [control=['if'], data=[]]
req = {}
if TemplateCode is not None or TemplateCode != '':
req['templateCode'] = TemplateCode # depends on [control=['if'], data=[]]
if Sender is not None or Sender != '':
req['snd'] = Sender # depends on [control=['if'], data=[]]
if Content is not None or Content != '':
req['content'] = Content # depends on [control=['if'], data=[]]
if AltContent is not None or AltContent != '':
req['altContent'] = AltContent # depends on [control=['if'], data=[]]
if AltSendType is not None or AltSendType != '':
req['altSendType'] = AltSendType # depends on [control=['if'], data=[]]
if SndDT is not None or SndDT != '':
req['sndDT'] = SndDT # depends on [control=['if'], data=[]]
if KakaoMessages is not None or KakaoMessages != '':
req['msgs'] = KakaoMessages # depends on [control=['if'], data=[]]
if ButtonList is not None:
req['btns'] = ButtonList # depends on [control=['if'], data=['ButtonList']]
if RequestNum is not None or RequestNum != '':
req['requestnum'] = RequestNum # depends on [control=['if'], data=[]]
postData = self._stringtify(req)
result = self._httppost('/ATS', postData, CorpNum, UserID)
return result.receiptNum
|
def getEthernetLinkStatus(self, wanInterfaceId=1, timeout=1):
"""Execute GetEthernetLinkStatus action to get the status of the ethernet link.
:param int wanInterfaceId: the id of the WAN device
:param float timeout: the timeout to wait for the action to be executed
:return: status of the ethernet link
:rtype: str
"""
namespace = Wan.getServiceType("getEthernetLinkStatus") + str(wanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetEthernetLinkStatus", timeout=timeout)
return results["NewEthernetLinkStatus"]
|
def function[getEthernetLinkStatus, parameter[self, wanInterfaceId, timeout]]:
constant[Execute GetEthernetLinkStatus action to get the status of the ethernet link.
:param int wanInterfaceId: the id of the WAN device
:param float timeout: the timeout to wait for the action to be executed
:return: status of the ethernet link
:rtype: str
]
variable[namespace] assign[=] binary_operation[call[name[Wan].getServiceType, parameter[constant[getEthernetLinkStatus]]] + call[name[str], parameter[name[wanInterfaceId]]]]
variable[uri] assign[=] call[name[self].getControlURL, parameter[name[namespace]]]
variable[results] assign[=] call[name[self].execute, parameter[name[uri], name[namespace], constant[GetEthernetLinkStatus]]]
return[call[name[results]][constant[NewEthernetLinkStatus]]]
|
keyword[def] identifier[getEthernetLinkStatus] ( identifier[self] , identifier[wanInterfaceId] = literal[int] , identifier[timeout] = literal[int] ):
literal[string]
identifier[namespace] = identifier[Wan] . identifier[getServiceType] ( literal[string] )+ identifier[str] ( identifier[wanInterfaceId] )
identifier[uri] = identifier[self] . identifier[getControlURL] ( identifier[namespace] )
identifier[results] = identifier[self] . identifier[execute] ( identifier[uri] , identifier[namespace] , literal[string] , identifier[timeout] = identifier[timeout] )
keyword[return] identifier[results] [ literal[string] ]
|
def getEthernetLinkStatus(self, wanInterfaceId=1, timeout=1):
"""Execute GetEthernetLinkStatus action to get the status of the ethernet link.
:param int wanInterfaceId: the id of the WAN device
:param float timeout: the timeout to wait for the action to be executed
:return: status of the ethernet link
:rtype: str
"""
namespace = Wan.getServiceType('getEthernetLinkStatus') + str(wanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, 'GetEthernetLinkStatus', timeout=timeout)
return results['NewEthernetLinkStatus']
|
def to_rio(self):
"""Converts the colormap to a rasterio colormap.
"""
self.colors = (((self.colors * 1.0 - self.colors.min()) /
(self.colors.max() - self.colors.min())) * 255)
return dict(zip(self.values, tuple(map(tuple, self.colors))))
|
def function[to_rio, parameter[self]]:
constant[Converts the colormap to a rasterio colormap.
]
name[self].colors assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[self].colors * constant[1.0]] - call[name[self].colors.min, parameter[]]] / binary_operation[call[name[self].colors.max, parameter[]] - call[name[self].colors.min, parameter[]]]] * constant[255]]
return[call[name[dict], parameter[call[name[zip], parameter[name[self].values, call[name[tuple], parameter[call[name[map], parameter[name[tuple], name[self].colors]]]]]]]]]
|
keyword[def] identifier[to_rio] ( identifier[self] ):
literal[string]
identifier[self] . identifier[colors] =((( identifier[self] . identifier[colors] * literal[int] - identifier[self] . identifier[colors] . identifier[min] ())/
( identifier[self] . identifier[colors] . identifier[max] ()- identifier[self] . identifier[colors] . identifier[min] ()))* literal[int] )
keyword[return] identifier[dict] ( identifier[zip] ( identifier[self] . identifier[values] , identifier[tuple] ( identifier[map] ( identifier[tuple] , identifier[self] . identifier[colors] ))))
|
def to_rio(self):
"""Converts the colormap to a rasterio colormap.
"""
self.colors = (self.colors * 1.0 - self.colors.min()) / (self.colors.max() - self.colors.min()) * 255
return dict(zip(self.values, tuple(map(tuple, self.colors))))
|
def run_checks(self, **kwargs):
"""
Check to see whether the system is expected to be computable.
This is called by default for each set_value but will only raise a
logger warning if fails. This is also called immediately when calling
:meth:`run_compute`.
kwargs are passed to override currently set values as if they were
sent to :meth:`run_compute`.
:return: True if passed, False if failed and a message
"""
# make sure all constraints have been run
changed_params = self.run_delayed_constraints()
hier = self.hierarchy
if hier is None:
return True, ''
for component in hier.get_stars():
kind = hier.get_kind_of(component)
comp_ps = self.get_component(component)
if not len(comp_ps):
return False, "component '{}' in the hierarchy is not in the bundle".format(component)
parent = hier.get_parent_of(component)
parent_ps = self.get_component(parent)
if kind in ['star']:
# ignore the single star case
if parent:
# contact systems MUST by synchronous
if hier.is_contact_binary(component):
if self.get_value(qualifier='syncpar', component=component, context='component', **kwargs) != 1.0:
return False,\
'contact binaries must by synchronous, but syncpar@{}!=1'.format(component)
if self.get_value(qualifier='ecc', component=parent, context='component', **kwargs) != 0.0:
return False,\
'contact binaries must by circular, but ecc@{}!=0'.format(component)
if self.get_value(qualifier='pitch', component=component, context='component', **kwargs) != 0.0:
return False,\
'contact binaries must be aligned, but pitch@{}!=0'.format(component)
if self.get_value(qualifier='yaw', component=component, context='component', **kwargs) != 0.0:
return False,\
'contact binaries must be aligned, but yaw@{}!=0'.format(component)
# MUST NOT be overflowing at PERIASTRON (d=1-ecc, etheta=0)
requiv = comp_ps.get_value('requiv', unit=u.solRad, **kwargs)
requiv_max = comp_ps.get_value('requiv_max', unit=u.solRad, **kwargs)
if hier.is_contact_binary(component):
if np.isnan(requiv) or requiv > requiv_max:
return False,\
'{} is overflowing at L2/L3 (requiv={}, requiv_max={})'.format(component, requiv, requiv_max)
requiv_min = comp_ps.get_value('requiv_min')
if np.isnan(requiv) or requiv <= requiv_min:
return False,\
'{} is underflowing at L1 and not a contact system (requiv={}, requiv_min={})'.format(component, requiv, requiv_min)
elif requiv <= requiv_min * 1.001:
return False,\
'requiv@{} is too close to requiv_min (within 0.1% of critical). Use detached/semidetached model instead.'.format(component)
else:
if requiv > requiv_max:
return False,\
'{} is overflowing at periastron (requiv={}, requiv_max={})'.format(component, requiv, requiv_max)
else:
raise NotImplementedError("checks not implemented for type '{}'".format(kind))
# we also need to make sure that stars don't overlap each other
# so we'll check for each pair of stars (see issue #70 on github)
# TODO: rewrite overlap checks
for orbitref in []: #hier.get_orbits():
if len(hier.get_children_of(orbitref)) == 2:
q = self.get_value(qualifier='q', component=orbitref, context='component', **kwargs)
ecc = self.get_value(qualifier='ecc', component=orbitref, context='component', **kwargs)
starrefs = hier.get_children_of(orbitref)
if hier.get_kind_of(starrefs[0]) != 'star' or hier.get_kind_of(starrefs[1]) != 'star':
# print "***", hier.get_kind_of(starrefs[0]), hier.get_kind_of(starrefs[1])
continue
if self.get_value(qualifier='pitch', component=starrefs[0])!=0.0 or \
self.get_value(qualifier='pitch', component=starrefs[1])!=0.0 or \
self.get_value(qualifier='yaw', component=starrefs[0])!=0.0 or \
self.get_value(qualifier='yaw', component=starrefs[1])!=0.0:
# we cannot run this test for misaligned cases
continue
comp0 = hier.get_primary_or_secondary(starrefs[0], return_ind=True)
comp1 = hier.get_primary_or_secondary(starrefs[1], return_ind=True)
q0 = roche.q_for_component(q, comp0)
q1 = roche.q_for_component(q, comp1)
F0 = self.get_value(qualifier='syncpar', component=starrefs[0], context='component', **kwargs)
F1 = self.get_value(qualifier='syncpar', component=starrefs[1], context='component', **kwargs)
pot0 = self.get_value(qualifier='pot', component=starrefs[0], context='component', **kwargs)
pot0 = roche.pot_for_component(pot0, q0, comp0)
pot1 = self.get_value(qualifier='pot', component=starrefs[1], context='component', **kwargs)
pot1 = roche.pot_for_component(pot1, q1, comp1)
xrange0 = libphoebe.roche_xrange(q0, F0, 1.0-ecc, pot0+1e-6, choice=0)
xrange1 = libphoebe.roche_xrange(q1, F1, 1.0-ecc, pot1+1e-6, choice=0)
if xrange0[1]+xrange1[1] > 1.0-ecc:
return False,\
'components in {} are overlapping at periastron (change ecc@{}, syncpar@{}, or syncpar@{}).'.format(orbitref, orbitref, starrefs[0], starrefs[1])
# check to make sure passband supports the selected atm
for pbparam in self.filter(qualifier='passband').to_list():
pb = pbparam.get_value()
pbatms = _pbtable[pb]['atms']
# NOTE: atms are not attached to datasets, but per-compute and per-component
for atmparam in self.filter(qualifier='atm', kind='phoebe').to_list():
atm = atmparam.get_value()
if atm not in pbatms:
return False, "'{}' passband ({}) does not support atm='{}' ({}).".format(pb, pbparam.twig, atm, atmparam.twig)
# check length of ld_coeffs vs ld_func and ld_func vs atm
def ld_coeffs_len(ld_func, ld_coeffs):
# current choices for ld_func are:
# ['interp', 'uniform', 'linear', 'logarithmic', 'quadratic', 'square_root', 'power', 'claret', 'hillen', 'prsa']
if ld_func == 'interp':
return True,
elif ld_func in ['linear'] and len(ld_coeffs)==1:
return True,
elif ld_func in ['logarithmic', 'square_root', 'quadratic'] and len(ld_coeffs)==2:
return True,
elif ld_func in ['power'] and len(ld_coeffs)==4:
return True,
else:
return False, "ld_coeffs={} wrong length for ld_func='{}'.".format(ld_coeffs, ld_func)
for component in self.hierarchy.get_stars():
# first check ld_coeffs_bol vs ld_func_bol
ld_func = str(self.get_value(qualifier='ld_func_bol', component=component, context='component', check_visible=False, **kwargs))
ld_coeffs = np.asarray(self.get_value(qualifier='ld_coeffs_bol', component=component, context='component', check_visible=False, **kwargs))
check = ld_coeffs_len(ld_func, ld_coeffs)
if not check[0]:
return check
if ld_func != 'interp':
check = libphoebe.ld_check(ld_func, ld_coeffs)
if not check:
return False, 'ld_coeffs_bol={} not compatible for ld_func_bol=\'{}\'.'.format(ld_coeffs, ld_func)
for dataset in self.datasets:
if dataset=='_default' or self.get_dataset(dataset=dataset, kind='*dep').kind not in ['lc_dep', 'rv_dep']:
continue
ld_func = str(self.get_value(qualifier='ld_func', dataset=dataset, component=component, context='dataset', **kwargs))
ld_coeffs = np.asarray(self.get_value(qualifier='ld_coeffs', dataset=dataset, component=component, context='dataset', check_visible=False, **kwargs))
if ld_coeffs is not None:
check = ld_coeffs_len(ld_func, ld_coeffs)
if not check[0]:
return check
if ld_func != 'interp':
check = libphoebe.ld_check(ld_func, ld_coeffs)
if not check:
return False, 'ld_coeffs={} not compatible for ld_func=\'{}\'.'.format(ld_coeffs, ld_func)
if ld_func=='interp':
for compute in kwargs.get('computes', self.computes):
atm = self.get_value(qualifier='atm', component=component, compute=compute, context='compute', **kwargs)
if atm != 'ck2004':
return False, "ld_func='interp' only supported by atm='ck2004'. Either change atm@{} or ld_func@{}@{}".format(component, component, dataset)
# mesh-consistency checks
for compute in self.computes:
mesh_methods = [p.get_value() for p in self.filter(qualifier='mesh_method', compute=compute, force_ps=True).to_list()]
if 'wd' in mesh_methods:
if len(set(mesh_methods)) > 1:
return False, "all (or none) components must use mesh_method='wd'."
#### WARNINGS ONLY ####
# let's check teff vs gravb_bol and irrad_frac_refl_bol
for component in self.hierarchy.get_stars():
teff = self.get_value(qualifier='teff', component=component, context='component', unit=u.K, **kwargs)
gravb_bol = self.get_value(qualifier='gravb_bol', component=component, context='component', **kwargs)
if teff >= 8000. and gravb_bol < 0.9:
return None, "'{}' probably has a radiative atm (teff={:.0f}K>8000K), for which gravb_bol=1.00 might be a better approx than gravb_bol={:.2f}.".format(component, teff, gravb_bol)
elif teff <= 6600. and gravb_bol >= 0.9:
return None, "'{}' probably has a convective atm (teff={:.0f}K<6600K), for which gravb_bol=0.32 might be a better approx than gravb_bol={:.2f}.".format(component, teff, gravb_bol)
elif gravb_bol < 0.32 or gravb_bol > 1.00:
return None, "'{}' has intermittent temperature (6600K<teff={:.0f}K<8000K), gravb_bol might be better between 0.32-1.00 than gravb_bol={:.2f}.".format(component, teff, gravb_bol)
for component in self.hierarchy.get_stars():
teff = self.get_value(qualifier='teff', component=component, context='component', unit=u.K, **kwargs)
irrad_frac_refl_bol = self.get_value(qualifier='irrad_frac_refl_bol', component=component, context='component', **kwargs)
if teff >= 8000. and irrad_frac_refl_bol < 0.8:
return None, "'{}' probably has a radiative atm (teff={:.0f}K>8000K), for which irrad_frac_refl_bol=1.00 might be a better approx than irrad_frac_refl_bol={:.2f}.".format(component, teff, irrad_frac_refl_bol)
elif teff <= 6600. and irrad_frac_refl_bol >= 0.75:
return None, "'{}' probably has a convective atm (teff={:.0f}K<6600K), for which irrad_frac_refl_bol=0.6 might be a better approx than irrad_frac_refl_bol={:.2f}.".format(component, teff, irrad_frac_refl_bol)
elif irrad_frac_refl_bol < 0.6:
return None, "'{}' has intermittent temperature (6600K<teff={:.0f}K<8000K), irrad_frac_refl_bol might be better between 0.6-1.00 than irrad_frac_refl_bol={:.2f}.".format(component, teff, irrad_frac_refl_bol)
# TODO: add other checks
# - make sure all ETV components are legal
# - check for conflict between dynamics_method and mesh_method (?)
# we've survived all tests
return True, ''
|
def function[run_checks, parameter[self]]:
constant[
Check to see whether the system is expected to be computable.
This is called by default for each set_value but will only raise a
logger warning if fails. This is also called immediately when calling
:meth:`run_compute`.
kwargs are passed to override currently set values as if they were
sent to :meth:`run_compute`.
:return: True if passed, False if failed and a message
]
variable[changed_params] assign[=] call[name[self].run_delayed_constraints, parameter[]]
variable[hier] assign[=] name[self].hierarchy
if compare[name[hier] is constant[None]] begin[:]
return[tuple[[<ast.Constant object at 0x7da20c992d10>, <ast.Constant object at 0x7da20c9937f0>]]]
for taget[name[component]] in starred[call[name[hier].get_stars, parameter[]]] begin[:]
variable[kind] assign[=] call[name[hier].get_kind_of, parameter[name[component]]]
variable[comp_ps] assign[=] call[name[self].get_component, parameter[name[component]]]
if <ast.UnaryOp object at 0x7da20c9922f0> begin[:]
return[tuple[[<ast.Constant object at 0x7da20c991690>, <ast.Call object at 0x7da20c991ea0>]]]
variable[parent] assign[=] call[name[hier].get_parent_of, parameter[name[component]]]
variable[parent_ps] assign[=] call[name[self].get_component, parameter[name[parent]]]
if compare[name[kind] in list[[<ast.Constant object at 0x7da20c991f00>]]] begin[:]
if name[parent] begin[:]
if call[name[hier].is_contact_binary, parameter[name[component]]] begin[:]
if compare[call[name[self].get_value, parameter[]] not_equal[!=] constant[1.0]] begin[:]
return[tuple[[<ast.Constant object at 0x7da20c992e30>, <ast.Call object at 0x7da20c992590>]]]
if compare[call[name[self].get_value, parameter[]] not_equal[!=] constant[0.0]] begin[:]
return[tuple[[<ast.Constant object at 0x7da20c990910>, <ast.Call object at 0x7da20c9917b0>]]]
if compare[call[name[self].get_value, parameter[]] not_equal[!=] constant[0.0]] begin[:]
return[tuple[[<ast.Constant object at 0x7da20c993e20>, <ast.Call object at 0x7da20c991750>]]]
if compare[call[name[self].get_value, parameter[]] not_equal[!=] constant[0.0]] begin[:]
return[tuple[[<ast.Constant object at 0x7da20c990040>, <ast.Call object at 0x7da20c993c70>]]]
variable[requiv] assign[=] call[name[comp_ps].get_value, parameter[constant[requiv]]]
variable[requiv_max] assign[=] call[name[comp_ps].get_value, parameter[constant[requiv_max]]]
if call[name[hier].is_contact_binary, parameter[name[component]]] begin[:]
if <ast.BoolOp object at 0x7da20c9923e0> begin[:]
return[tuple[[<ast.Constant object at 0x7da20c990e20>, <ast.Call object at 0x7da20c993f10>]]]
variable[requiv_min] assign[=] call[name[comp_ps].get_value, parameter[constant[requiv_min]]]
if <ast.BoolOp object at 0x7da20c993d30> begin[:]
return[tuple[[<ast.Constant object at 0x7da20c990790>, <ast.Call object at 0x7da20c9904c0>]]]
for taget[name[orbitref]] in starred[list[[]]] begin[:]
if compare[call[name[len], parameter[call[name[hier].get_children_of, parameter[name[orbitref]]]]] equal[==] constant[2]] begin[:]
variable[q] assign[=] call[name[self].get_value, parameter[]]
variable[ecc] assign[=] call[name[self].get_value, parameter[]]
variable[starrefs] assign[=] call[name[hier].get_children_of, parameter[name[orbitref]]]
if <ast.BoolOp object at 0x7da20c992050> begin[:]
continue
if <ast.BoolOp object at 0x7da20c990610> begin[:]
continue
variable[comp0] assign[=] call[name[hier].get_primary_or_secondary, parameter[call[name[starrefs]][constant[0]]]]
variable[comp1] assign[=] call[name[hier].get_primary_or_secondary, parameter[call[name[starrefs]][constant[1]]]]
variable[q0] assign[=] call[name[roche].q_for_component, parameter[name[q], name[comp0]]]
variable[q1] assign[=] call[name[roche].q_for_component, parameter[name[q], name[comp1]]]
variable[F0] assign[=] call[name[self].get_value, parameter[]]
variable[F1] assign[=] call[name[self].get_value, parameter[]]
variable[pot0] assign[=] call[name[self].get_value, parameter[]]
variable[pot0] assign[=] call[name[roche].pot_for_component, parameter[name[pot0], name[q0], name[comp0]]]
variable[pot1] assign[=] call[name[self].get_value, parameter[]]
variable[pot1] assign[=] call[name[roche].pot_for_component, parameter[name[pot1], name[q1], name[comp1]]]
variable[xrange0] assign[=] call[name[libphoebe].roche_xrange, parameter[name[q0], name[F0], binary_operation[constant[1.0] - name[ecc]], binary_operation[name[pot0] + constant[1e-06]]]]
variable[xrange1] assign[=] call[name[libphoebe].roche_xrange, parameter[name[q1], name[F1], binary_operation[constant[1.0] - name[ecc]], binary_operation[name[pot1] + constant[1e-06]]]]
if compare[binary_operation[call[name[xrange0]][constant[1]] + call[name[xrange1]][constant[1]]] greater[>] binary_operation[constant[1.0] - name[ecc]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da20c6a8be0>, <ast.Call object at 0x7da20c6aac20>]]]
for taget[name[pbparam]] in starred[call[call[name[self].filter, parameter[]].to_list, parameter[]]] begin[:]
variable[pb] assign[=] call[name[pbparam].get_value, parameter[]]
variable[pbatms] assign[=] call[call[name[_pbtable]][name[pb]]][constant[atms]]
for taget[name[atmparam]] in starred[call[call[name[self].filter, parameter[]].to_list, parameter[]]] begin[:]
variable[atm] assign[=] call[name[atmparam].get_value, parameter[]]
if compare[name[atm] <ast.NotIn object at 0x7da2590d7190> name[pbatms]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b2344910>, <ast.Call object at 0x7da1b2345510>]]]
def function[ld_coeffs_len, parameter[ld_func, ld_coeffs]]:
if compare[name[ld_func] equal[==] constant[interp]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b2347ca0>]]]
for taget[name[component]] in starred[call[name[self].hierarchy.get_stars, parameter[]]] begin[:]
variable[ld_func] assign[=] call[name[str], parameter[call[name[self].get_value, parameter[]]]]
variable[ld_coeffs] assign[=] call[name[np].asarray, parameter[call[name[self].get_value, parameter[]]]]
variable[check] assign[=] call[name[ld_coeffs_len], parameter[name[ld_func], name[ld_coeffs]]]
if <ast.UnaryOp object at 0x7da1b2344130> begin[:]
return[name[check]]
if compare[name[ld_func] not_equal[!=] constant[interp]] begin[:]
variable[check] assign[=] call[name[libphoebe].ld_check, parameter[name[ld_func], name[ld_coeffs]]]
if <ast.UnaryOp object at 0x7da1b2346800> begin[:]
return[tuple[[<ast.Constant object at 0x7da1b2347910>, <ast.Call object at 0x7da1b2347d30>]]]
for taget[name[dataset]] in starred[name[self].datasets] begin[:]
if <ast.BoolOp object at 0x7da1b2345e10> begin[:]
continue
variable[ld_func] assign[=] call[name[str], parameter[call[name[self].get_value, parameter[]]]]
variable[ld_coeffs] assign[=] call[name[np].asarray, parameter[call[name[self].get_value, parameter[]]]]
if compare[name[ld_coeffs] is_not constant[None]] begin[:]
variable[check] assign[=] call[name[ld_coeffs_len], parameter[name[ld_func], name[ld_coeffs]]]
if <ast.UnaryOp object at 0x7da1b2346500> begin[:]
return[name[check]]
if compare[name[ld_func] not_equal[!=] constant[interp]] begin[:]
variable[check] assign[=] call[name[libphoebe].ld_check, parameter[name[ld_func], name[ld_coeffs]]]
if <ast.UnaryOp object at 0x7da18f811c30> begin[:]
return[tuple[[<ast.Constant object at 0x7da18f812bc0>, <ast.Call object at 0x7da18f811510>]]]
if compare[name[ld_func] equal[==] constant[interp]] begin[:]
for taget[name[compute]] in starred[call[name[kwargs].get, parameter[constant[computes], name[self].computes]]] begin[:]
variable[atm] assign[=] call[name[self].get_value, parameter[]]
if compare[name[atm] not_equal[!=] constant[ck2004]] begin[:]
return[tuple[[<ast.Constant object at 0x7da18f8114e0>, <ast.Call object at 0x7da18f813ca0>]]]
for taget[name[compute]] in starred[name[self].computes] begin[:]
variable[mesh_methods] assign[=] <ast.ListComp object at 0x7da18f811f30>
if compare[constant[wd] in name[mesh_methods]] begin[:]
if compare[call[name[len], parameter[call[name[set], parameter[name[mesh_methods]]]]] greater[>] constant[1]] begin[:]
return[tuple[[<ast.Constant object at 0x7da18f811fc0>, <ast.Constant object at 0x7da18f8114b0>]]]
for taget[name[component]] in starred[call[name[self].hierarchy.get_stars, parameter[]]] begin[:]
variable[teff] assign[=] call[name[self].get_value, parameter[]]
variable[gravb_bol] assign[=] call[name[self].get_value, parameter[]]
if <ast.BoolOp object at 0x7da18f8119c0> begin[:]
return[tuple[[<ast.Constant object at 0x7da18f813d90>, <ast.Call object at 0x7da18f810730>]]]
for taget[name[component]] in starred[call[name[self].hierarchy.get_stars, parameter[]]] begin[:]
variable[teff] assign[=] call[name[self].get_value, parameter[]]
variable[irrad_frac_refl_bol] assign[=] call[name[self].get_value, parameter[]]
if <ast.BoolOp object at 0x7da18f811ab0> begin[:]
return[tuple[[<ast.Constant object at 0x7da18f812170>, <ast.Call object at 0x7da18f8126b0>]]]
return[tuple[[<ast.Constant object at 0x7da18f810250>, <ast.Constant object at 0x7da18f812a70>]]]
|
keyword[def] identifier[run_checks] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[changed_params] = identifier[self] . identifier[run_delayed_constraints] ()
identifier[hier] = identifier[self] . identifier[hierarchy]
keyword[if] identifier[hier] keyword[is] keyword[None] :
keyword[return] keyword[True] , literal[string]
keyword[for] identifier[component] keyword[in] identifier[hier] . identifier[get_stars] ():
identifier[kind] = identifier[hier] . identifier[get_kind_of] ( identifier[component] )
identifier[comp_ps] = identifier[self] . identifier[get_component] ( identifier[component] )
keyword[if] keyword[not] identifier[len] ( identifier[comp_ps] ):
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] )
identifier[parent] = identifier[hier] . identifier[get_parent_of] ( identifier[component] )
identifier[parent_ps] = identifier[self] . identifier[get_component] ( identifier[parent] )
keyword[if] identifier[kind] keyword[in] [ literal[string] ]:
keyword[if] identifier[parent] :
keyword[if] identifier[hier] . identifier[is_contact_binary] ( identifier[component] ):
keyword[if] identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[context] = literal[string] ,** identifier[kwargs] )!= literal[int] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] )
keyword[if] identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[parent] , identifier[context] = literal[string] ,** identifier[kwargs] )!= literal[int] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] )
keyword[if] identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[context] = literal[string] ,** identifier[kwargs] )!= literal[int] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] )
keyword[if] identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[context] = literal[string] ,** identifier[kwargs] )!= literal[int] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] )
identifier[requiv] = identifier[comp_ps] . identifier[get_value] ( literal[string] , identifier[unit] = identifier[u] . identifier[solRad] ,** identifier[kwargs] )
identifier[requiv_max] = identifier[comp_ps] . identifier[get_value] ( literal[string] , identifier[unit] = identifier[u] . identifier[solRad] ,** identifier[kwargs] )
keyword[if] identifier[hier] . identifier[is_contact_binary] ( identifier[component] ):
keyword[if] identifier[np] . identifier[isnan] ( identifier[requiv] ) keyword[or] identifier[requiv] > identifier[requiv_max] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] , identifier[requiv] , identifier[requiv_max] )
identifier[requiv_min] = identifier[comp_ps] . identifier[get_value] ( literal[string] )
keyword[if] identifier[np] . identifier[isnan] ( identifier[requiv] ) keyword[or] identifier[requiv] <= identifier[requiv_min] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] , identifier[requiv] , identifier[requiv_min] )
keyword[elif] identifier[requiv] <= identifier[requiv_min] * literal[int] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] )
keyword[else] :
keyword[if] identifier[requiv] > identifier[requiv_max] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] , identifier[requiv] , identifier[requiv_max] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] . identifier[format] ( identifier[kind] ))
keyword[for] identifier[orbitref] keyword[in] []:
keyword[if] identifier[len] ( identifier[hier] . identifier[get_children_of] ( identifier[orbitref] ))== literal[int] :
identifier[q] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[orbitref] , identifier[context] = literal[string] ,** identifier[kwargs] )
identifier[ecc] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[orbitref] , identifier[context] = literal[string] ,** identifier[kwargs] )
identifier[starrefs] = identifier[hier] . identifier[get_children_of] ( identifier[orbitref] )
keyword[if] identifier[hier] . identifier[get_kind_of] ( identifier[starrefs] [ literal[int] ])!= literal[string] keyword[or] identifier[hier] . identifier[get_kind_of] ( identifier[starrefs] [ literal[int] ])!= literal[string] :
keyword[continue]
keyword[if] identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[starrefs] [ literal[int] ])!= literal[int] keyword[or] identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[starrefs] [ literal[int] ])!= literal[int] keyword[or] identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[starrefs] [ literal[int] ])!= literal[int] keyword[or] identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[starrefs] [ literal[int] ])!= literal[int] :
keyword[continue]
identifier[comp0] = identifier[hier] . identifier[get_primary_or_secondary] ( identifier[starrefs] [ literal[int] ], identifier[return_ind] = keyword[True] )
identifier[comp1] = identifier[hier] . identifier[get_primary_or_secondary] ( identifier[starrefs] [ literal[int] ], identifier[return_ind] = keyword[True] )
identifier[q0] = identifier[roche] . identifier[q_for_component] ( identifier[q] , identifier[comp0] )
identifier[q1] = identifier[roche] . identifier[q_for_component] ( identifier[q] , identifier[comp1] )
identifier[F0] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[starrefs] [ literal[int] ], identifier[context] = literal[string] ,** identifier[kwargs] )
identifier[F1] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[starrefs] [ literal[int] ], identifier[context] = literal[string] ,** identifier[kwargs] )
identifier[pot0] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[starrefs] [ literal[int] ], identifier[context] = literal[string] ,** identifier[kwargs] )
identifier[pot0] = identifier[roche] . identifier[pot_for_component] ( identifier[pot0] , identifier[q0] , identifier[comp0] )
identifier[pot1] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[starrefs] [ literal[int] ], identifier[context] = literal[string] ,** identifier[kwargs] )
identifier[pot1] = identifier[roche] . identifier[pot_for_component] ( identifier[pot1] , identifier[q1] , identifier[comp1] )
identifier[xrange0] = identifier[libphoebe] . identifier[roche_xrange] ( identifier[q0] , identifier[F0] , literal[int] - identifier[ecc] , identifier[pot0] + literal[int] , identifier[choice] = literal[int] )
identifier[xrange1] = identifier[libphoebe] . identifier[roche_xrange] ( identifier[q1] , identifier[F1] , literal[int] - identifier[ecc] , identifier[pot1] + literal[int] , identifier[choice] = literal[int] )
keyword[if] identifier[xrange0] [ literal[int] ]+ identifier[xrange1] [ literal[int] ]> literal[int] - identifier[ecc] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[orbitref] , identifier[orbitref] , identifier[starrefs] [ literal[int] ], identifier[starrefs] [ literal[int] ])
keyword[for] identifier[pbparam] keyword[in] identifier[self] . identifier[filter] ( identifier[qualifier] = literal[string] ). identifier[to_list] ():
identifier[pb] = identifier[pbparam] . identifier[get_value] ()
identifier[pbatms] = identifier[_pbtable] [ identifier[pb] ][ literal[string] ]
keyword[for] identifier[atmparam] keyword[in] identifier[self] . identifier[filter] ( identifier[qualifier] = literal[string] , identifier[kind] = literal[string] ). identifier[to_list] ():
identifier[atm] = identifier[atmparam] . identifier[get_value] ()
keyword[if] identifier[atm] keyword[not] keyword[in] identifier[pbatms] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[pb] , identifier[pbparam] . identifier[twig] , identifier[atm] , identifier[atmparam] . identifier[twig] )
keyword[def] identifier[ld_coeffs_len] ( identifier[ld_func] , identifier[ld_coeffs] ):
keyword[if] identifier[ld_func] == literal[string] :
keyword[return] keyword[True] ,
keyword[elif] identifier[ld_func] keyword[in] [ literal[string] ] keyword[and] identifier[len] ( identifier[ld_coeffs] )== literal[int] :
keyword[return] keyword[True] ,
keyword[elif] identifier[ld_func] keyword[in] [ literal[string] , literal[string] , literal[string] ] keyword[and] identifier[len] ( identifier[ld_coeffs] )== literal[int] :
keyword[return] keyword[True] ,
keyword[elif] identifier[ld_func] keyword[in] [ literal[string] ] keyword[and] identifier[len] ( identifier[ld_coeffs] )== literal[int] :
keyword[return] keyword[True] ,
keyword[else] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[ld_coeffs] , identifier[ld_func] )
keyword[for] identifier[component] keyword[in] identifier[self] . identifier[hierarchy] . identifier[get_stars] ():
identifier[ld_func] = identifier[str] ( identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[context] = literal[string] , identifier[check_visible] = keyword[False] ,** identifier[kwargs] ))
identifier[ld_coeffs] = identifier[np] . identifier[asarray] ( identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[context] = literal[string] , identifier[check_visible] = keyword[False] ,** identifier[kwargs] ))
identifier[check] = identifier[ld_coeffs_len] ( identifier[ld_func] , identifier[ld_coeffs] )
keyword[if] keyword[not] identifier[check] [ literal[int] ]:
keyword[return] identifier[check]
keyword[if] identifier[ld_func] != literal[string] :
identifier[check] = identifier[libphoebe] . identifier[ld_check] ( identifier[ld_func] , identifier[ld_coeffs] )
keyword[if] keyword[not] identifier[check] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[ld_coeffs] , identifier[ld_func] )
keyword[for] identifier[dataset] keyword[in] identifier[self] . identifier[datasets] :
keyword[if] identifier[dataset] == literal[string] keyword[or] identifier[self] . identifier[get_dataset] ( identifier[dataset] = identifier[dataset] , identifier[kind] = literal[string] ). identifier[kind] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
keyword[continue]
identifier[ld_func] = identifier[str] ( identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[dataset] = identifier[dataset] , identifier[component] = identifier[component] , identifier[context] = literal[string] ,** identifier[kwargs] ))
identifier[ld_coeffs] = identifier[np] . identifier[asarray] ( identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[dataset] = identifier[dataset] , identifier[component] = identifier[component] , identifier[context] = literal[string] , identifier[check_visible] = keyword[False] ,** identifier[kwargs] ))
keyword[if] identifier[ld_coeffs] keyword[is] keyword[not] keyword[None] :
identifier[check] = identifier[ld_coeffs_len] ( identifier[ld_func] , identifier[ld_coeffs] )
keyword[if] keyword[not] identifier[check] [ literal[int] ]:
keyword[return] identifier[check]
keyword[if] identifier[ld_func] != literal[string] :
identifier[check] = identifier[libphoebe] . identifier[ld_check] ( identifier[ld_func] , identifier[ld_coeffs] )
keyword[if] keyword[not] identifier[check] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[ld_coeffs] , identifier[ld_func] )
keyword[if] identifier[ld_func] == literal[string] :
keyword[for] identifier[compute] keyword[in] identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[computes] ):
identifier[atm] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[compute] = identifier[compute] , identifier[context] = literal[string] ,** identifier[kwargs] )
keyword[if] identifier[atm] != literal[string] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[component] , identifier[component] , identifier[dataset] )
keyword[for] identifier[compute] keyword[in] identifier[self] . identifier[computes] :
identifier[mesh_methods] =[ identifier[p] . identifier[get_value] () keyword[for] identifier[p] keyword[in] identifier[self] . identifier[filter] ( identifier[qualifier] = literal[string] , identifier[compute] = identifier[compute] , identifier[force_ps] = keyword[True] ). identifier[to_list] ()]
keyword[if] literal[string] keyword[in] identifier[mesh_methods] :
keyword[if] identifier[len] ( identifier[set] ( identifier[mesh_methods] ))> literal[int] :
keyword[return] keyword[False] , literal[string]
keyword[for] identifier[component] keyword[in] identifier[self] . identifier[hierarchy] . identifier[get_stars] ():
identifier[teff] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[context] = literal[string] , identifier[unit] = identifier[u] . identifier[K] ,** identifier[kwargs] )
identifier[gravb_bol] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[context] = literal[string] ,** identifier[kwargs] )
keyword[if] identifier[teff] >= literal[int] keyword[and] identifier[gravb_bol] < literal[int] :
keyword[return] keyword[None] , literal[string] . identifier[format] ( identifier[component] , identifier[teff] , identifier[gravb_bol] )
keyword[elif] identifier[teff] <= literal[int] keyword[and] identifier[gravb_bol] >= literal[int] :
keyword[return] keyword[None] , literal[string] . identifier[format] ( identifier[component] , identifier[teff] , identifier[gravb_bol] )
keyword[elif] identifier[gravb_bol] < literal[int] keyword[or] identifier[gravb_bol] > literal[int] :
keyword[return] keyword[None] , literal[string] . identifier[format] ( identifier[component] , identifier[teff] , identifier[gravb_bol] )
keyword[for] identifier[component] keyword[in] identifier[self] . identifier[hierarchy] . identifier[get_stars] ():
identifier[teff] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[context] = literal[string] , identifier[unit] = identifier[u] . identifier[K] ,** identifier[kwargs] )
identifier[irrad_frac_refl_bol] = identifier[self] . identifier[get_value] ( identifier[qualifier] = literal[string] , identifier[component] = identifier[component] , identifier[context] = literal[string] ,** identifier[kwargs] )
keyword[if] identifier[teff] >= literal[int] keyword[and] identifier[irrad_frac_refl_bol] < literal[int] :
keyword[return] keyword[None] , literal[string] . identifier[format] ( identifier[component] , identifier[teff] , identifier[irrad_frac_refl_bol] )
keyword[elif] identifier[teff] <= literal[int] keyword[and] identifier[irrad_frac_refl_bol] >= literal[int] :
keyword[return] keyword[None] , literal[string] . identifier[format] ( identifier[component] , identifier[teff] , identifier[irrad_frac_refl_bol] )
keyword[elif] identifier[irrad_frac_refl_bol] < literal[int] :
keyword[return] keyword[None] , literal[string] . identifier[format] ( identifier[component] , identifier[teff] , identifier[irrad_frac_refl_bol] )
keyword[return] keyword[True] , literal[string]
|
def run_checks(self, **kwargs):
"""
Check to see whether the system is expected to be computable.
This is called by default for each set_value but will only raise a
logger warning if fails. This is also called immediately when calling
:meth:`run_compute`.
kwargs are passed to override currently set values as if they were
sent to :meth:`run_compute`.
:return: True if passed, False if failed and a message
"""
# make sure all constraints have been run
changed_params = self.run_delayed_constraints()
hier = self.hierarchy
if hier is None:
return (True, '') # depends on [control=['if'], data=[]]
for component in hier.get_stars():
kind = hier.get_kind_of(component)
comp_ps = self.get_component(component)
if not len(comp_ps):
return (False, "component '{}' in the hierarchy is not in the bundle".format(component)) # depends on [control=['if'], data=[]]
parent = hier.get_parent_of(component)
parent_ps = self.get_component(parent)
if kind in ['star']:
# ignore the single star case
if parent:
# contact systems MUST by synchronous
if hier.is_contact_binary(component):
if self.get_value(qualifier='syncpar', component=component, context='component', **kwargs) != 1.0:
return (False, 'contact binaries must by synchronous, but syncpar@{}!=1'.format(component)) # depends on [control=['if'], data=[]]
if self.get_value(qualifier='ecc', component=parent, context='component', **kwargs) != 0.0:
return (False, 'contact binaries must by circular, but ecc@{}!=0'.format(component)) # depends on [control=['if'], data=[]]
if self.get_value(qualifier='pitch', component=component, context='component', **kwargs) != 0.0:
return (False, 'contact binaries must be aligned, but pitch@{}!=0'.format(component)) # depends on [control=['if'], data=[]]
if self.get_value(qualifier='yaw', component=component, context='component', **kwargs) != 0.0:
return (False, 'contact binaries must be aligned, but yaw@{}!=0'.format(component)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# MUST NOT be overflowing at PERIASTRON (d=1-ecc, etheta=0)
requiv = comp_ps.get_value('requiv', unit=u.solRad, **kwargs)
requiv_max = comp_ps.get_value('requiv_max', unit=u.solRad, **kwargs)
if hier.is_contact_binary(component):
if np.isnan(requiv) or requiv > requiv_max:
return (False, '{} is overflowing at L2/L3 (requiv={}, requiv_max={})'.format(component, requiv, requiv_max)) # depends on [control=['if'], data=[]]
requiv_min = comp_ps.get_value('requiv_min')
if np.isnan(requiv) or requiv <= requiv_min:
return (False, '{} is underflowing at L1 and not a contact system (requiv={}, requiv_min={})'.format(component, requiv, requiv_min)) # depends on [control=['if'], data=[]]
elif requiv <= requiv_min * 1.001:
return (False, 'requiv@{} is too close to requiv_min (within 0.1% of critical). Use detached/semidetached model instead.'.format(component)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif requiv > requiv_max:
return (False, '{} is overflowing at periastron (requiv={}, requiv_max={})'.format(component, requiv, requiv_max)) # depends on [control=['if'], data=['requiv', 'requiv_max']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
raise NotImplementedError("checks not implemented for type '{}'".format(kind)) # depends on [control=['for'], data=['component']]
# we also need to make sure that stars don't overlap each other
# so we'll check for each pair of stars (see issue #70 on github)
# TODO: rewrite overlap checks
for orbitref in []: #hier.get_orbits():
if len(hier.get_children_of(orbitref)) == 2:
q = self.get_value(qualifier='q', component=orbitref, context='component', **kwargs)
ecc = self.get_value(qualifier='ecc', component=orbitref, context='component', **kwargs)
starrefs = hier.get_children_of(orbitref)
if hier.get_kind_of(starrefs[0]) != 'star' or hier.get_kind_of(starrefs[1]) != 'star':
# print "***", hier.get_kind_of(starrefs[0]), hier.get_kind_of(starrefs[1])
continue # depends on [control=['if'], data=[]]
if self.get_value(qualifier='pitch', component=starrefs[0]) != 0.0 or self.get_value(qualifier='pitch', component=starrefs[1]) != 0.0 or self.get_value(qualifier='yaw', component=starrefs[0]) != 0.0 or (self.get_value(qualifier='yaw', component=starrefs[1]) != 0.0):
# we cannot run this test for misaligned cases
continue # depends on [control=['if'], data=[]]
comp0 = hier.get_primary_or_secondary(starrefs[0], return_ind=True)
comp1 = hier.get_primary_or_secondary(starrefs[1], return_ind=True)
q0 = roche.q_for_component(q, comp0)
q1 = roche.q_for_component(q, comp1)
F0 = self.get_value(qualifier='syncpar', component=starrefs[0], context='component', **kwargs)
F1 = self.get_value(qualifier='syncpar', component=starrefs[1], context='component', **kwargs)
pot0 = self.get_value(qualifier='pot', component=starrefs[0], context='component', **kwargs)
pot0 = roche.pot_for_component(pot0, q0, comp0)
pot1 = self.get_value(qualifier='pot', component=starrefs[1], context='component', **kwargs)
pot1 = roche.pot_for_component(pot1, q1, comp1)
xrange0 = libphoebe.roche_xrange(q0, F0, 1.0 - ecc, pot0 + 1e-06, choice=0)
xrange1 = libphoebe.roche_xrange(q1, F1, 1.0 - ecc, pot1 + 1e-06, choice=0)
if xrange0[1] + xrange1[1] > 1.0 - ecc:
return (False, 'components in {} are overlapping at periastron (change ecc@{}, syncpar@{}, or syncpar@{}).'.format(orbitref, orbitref, starrefs[0], starrefs[1])) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['orbitref']]
# check to make sure passband supports the selected atm
for pbparam in self.filter(qualifier='passband').to_list():
pb = pbparam.get_value()
pbatms = _pbtable[pb]['atms']
# NOTE: atms are not attached to datasets, but per-compute and per-component
for atmparam in self.filter(qualifier='atm', kind='phoebe').to_list():
atm = atmparam.get_value()
if atm not in pbatms:
return (False, "'{}' passband ({}) does not support atm='{}' ({}).".format(pb, pbparam.twig, atm, atmparam.twig)) # depends on [control=['if'], data=['atm']] # depends on [control=['for'], data=['atmparam']] # depends on [control=['for'], data=['pbparam']]
# check length of ld_coeffs vs ld_func and ld_func vs atm
def ld_coeffs_len(ld_func, ld_coeffs):
# current choices for ld_func are:
# ['interp', 'uniform', 'linear', 'logarithmic', 'quadratic', 'square_root', 'power', 'claret', 'hillen', 'prsa']
if ld_func == 'interp':
return (True,) # depends on [control=['if'], data=[]]
elif ld_func in ['linear'] and len(ld_coeffs) == 1:
return (True,) # depends on [control=['if'], data=[]]
elif ld_func in ['logarithmic', 'square_root', 'quadratic'] and len(ld_coeffs) == 2:
return (True,) # depends on [control=['if'], data=[]]
elif ld_func in ['power'] and len(ld_coeffs) == 4:
return (True,) # depends on [control=['if'], data=[]]
else:
return (False, "ld_coeffs={} wrong length for ld_func='{}'.".format(ld_coeffs, ld_func))
for component in self.hierarchy.get_stars():
# first check ld_coeffs_bol vs ld_func_bol
ld_func = str(self.get_value(qualifier='ld_func_bol', component=component, context='component', check_visible=False, **kwargs))
ld_coeffs = np.asarray(self.get_value(qualifier='ld_coeffs_bol', component=component, context='component', check_visible=False, **kwargs))
check = ld_coeffs_len(ld_func, ld_coeffs)
if not check[0]:
return check # depends on [control=['if'], data=[]]
if ld_func != 'interp':
check = libphoebe.ld_check(ld_func, ld_coeffs)
if not check:
return (False, "ld_coeffs_bol={} not compatible for ld_func_bol='{}'.".format(ld_coeffs, ld_func)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['ld_func']]
for dataset in self.datasets:
if dataset == '_default' or self.get_dataset(dataset=dataset, kind='*dep').kind not in ['lc_dep', 'rv_dep']:
continue # depends on [control=['if'], data=[]]
ld_func = str(self.get_value(qualifier='ld_func', dataset=dataset, component=component, context='dataset', **kwargs))
ld_coeffs = np.asarray(self.get_value(qualifier='ld_coeffs', dataset=dataset, component=component, context='dataset', check_visible=False, **kwargs))
if ld_coeffs is not None:
check = ld_coeffs_len(ld_func, ld_coeffs)
if not check[0]:
return check # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['ld_coeffs']]
if ld_func != 'interp':
check = libphoebe.ld_check(ld_func, ld_coeffs)
if not check:
return (False, "ld_coeffs={} not compatible for ld_func='{}'.".format(ld_coeffs, ld_func)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['ld_func']]
if ld_func == 'interp':
for compute in kwargs.get('computes', self.computes):
atm = self.get_value(qualifier='atm', component=component, compute=compute, context='compute', **kwargs)
if atm != 'ck2004':
return (False, "ld_func='interp' only supported by atm='ck2004'. Either change atm@{} or ld_func@{}@{}".format(component, component, dataset)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['compute']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dataset']] # depends on [control=['for'], data=['component']]
# mesh-consistency checks
for compute in self.computes:
mesh_methods = [p.get_value() for p in self.filter(qualifier='mesh_method', compute=compute, force_ps=True).to_list()]
if 'wd' in mesh_methods:
if len(set(mesh_methods)) > 1:
return (False, "all (or none) components must use mesh_method='wd'.") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['mesh_methods']] # depends on [control=['for'], data=['compute']]
#### WARNINGS ONLY ####
# let's check teff vs gravb_bol and irrad_frac_refl_bol
for component in self.hierarchy.get_stars():
teff = self.get_value(qualifier='teff', component=component, context='component', unit=u.K, **kwargs)
gravb_bol = self.get_value(qualifier='gravb_bol', component=component, context='component', **kwargs)
if teff >= 8000.0 and gravb_bol < 0.9:
return (None, "'{}' probably has a radiative atm (teff={:.0f}K>8000K), for which gravb_bol=1.00 might be a better approx than gravb_bol={:.2f}.".format(component, teff, gravb_bol)) # depends on [control=['if'], data=[]]
elif teff <= 6600.0 and gravb_bol >= 0.9:
return (None, "'{}' probably has a convective atm (teff={:.0f}K<6600K), for which gravb_bol=0.32 might be a better approx than gravb_bol={:.2f}.".format(component, teff, gravb_bol)) # depends on [control=['if'], data=[]]
elif gravb_bol < 0.32 or gravb_bol > 1.0:
return (None, "'{}' has intermittent temperature (6600K<teff={:.0f}K<8000K), gravb_bol might be better between 0.32-1.00 than gravb_bol={:.2f}.".format(component, teff, gravb_bol)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['component']]
for component in self.hierarchy.get_stars():
teff = self.get_value(qualifier='teff', component=component, context='component', unit=u.K, **kwargs)
irrad_frac_refl_bol = self.get_value(qualifier='irrad_frac_refl_bol', component=component, context='component', **kwargs)
if teff >= 8000.0 and irrad_frac_refl_bol < 0.8:
return (None, "'{}' probably has a radiative atm (teff={:.0f}K>8000K), for which irrad_frac_refl_bol=1.00 might be a better approx than irrad_frac_refl_bol={:.2f}.".format(component, teff, irrad_frac_refl_bol)) # depends on [control=['if'], data=[]]
elif teff <= 6600.0 and irrad_frac_refl_bol >= 0.75:
return (None, "'{}' probably has a convective atm (teff={:.0f}K<6600K), for which irrad_frac_refl_bol=0.6 might be a better approx than irrad_frac_refl_bol={:.2f}.".format(component, teff, irrad_frac_refl_bol)) # depends on [control=['if'], data=[]]
elif irrad_frac_refl_bol < 0.6:
return (None, "'{}' has intermittent temperature (6600K<teff={:.0f}K<8000K), irrad_frac_refl_bol might be better between 0.6-1.00 than irrad_frac_refl_bol={:.2f}.".format(component, teff, irrad_frac_refl_bol)) # depends on [control=['if'], data=['irrad_frac_refl_bol']] # depends on [control=['for'], data=['component']]
# TODO: add other checks
# - make sure all ETV components are legal
# - check for conflict between dynamics_method and mesh_method (?)
# we've survived all tests
return (True, '')
|
def FromReadings(cls, uuid, readings):
"""Generate an instance of the report format from a list of readings and a uuid
"""
if len(readings) != 1:
raise ArgumentError("IndividualReading reports must be created with exactly one reading",
num_readings=len(readings))
reading = readings[0]
data = struct.pack("<BBHLLLL", 0, 0, reading.stream, uuid, 0, reading.raw_time, reading.value)
return IndividualReadingReport(data)
|
def function[FromReadings, parameter[cls, uuid, readings]]:
constant[Generate an instance of the report format from a list of readings and a uuid
]
if compare[call[name[len], parameter[name[readings]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da20c76cca0>
variable[reading] assign[=] call[name[readings]][constant[0]]
variable[data] assign[=] call[name[struct].pack, parameter[constant[<BBHLLLL], constant[0], constant[0], name[reading].stream, name[uuid], constant[0], name[reading].raw_time, name[reading].value]]
return[call[name[IndividualReadingReport], parameter[name[data]]]]
|
keyword[def] identifier[FromReadings] ( identifier[cls] , identifier[uuid] , identifier[readings] ):
literal[string]
keyword[if] identifier[len] ( identifier[readings] )!= literal[int] :
keyword[raise] identifier[ArgumentError] ( literal[string] ,
identifier[num_readings] = identifier[len] ( identifier[readings] ))
identifier[reading] = identifier[readings] [ literal[int] ]
identifier[data] = identifier[struct] . identifier[pack] ( literal[string] , literal[int] , literal[int] , identifier[reading] . identifier[stream] , identifier[uuid] , literal[int] , identifier[reading] . identifier[raw_time] , identifier[reading] . identifier[value] )
keyword[return] identifier[IndividualReadingReport] ( identifier[data] )
|
def FromReadings(cls, uuid, readings):
"""Generate an instance of the report format from a list of readings and a uuid
"""
if len(readings) != 1:
raise ArgumentError('IndividualReading reports must be created with exactly one reading', num_readings=len(readings)) # depends on [control=['if'], data=[]]
reading = readings[0]
data = struct.pack('<BBHLLLL', 0, 0, reading.stream, uuid, 0, reading.raw_time, reading.value)
return IndividualReadingReport(data)
|
def get_icon_by_extension(fname, scale_factor):
"""Return the icon depending on the file extension"""
application_icons = {}
application_icons.update(BIN_FILES)
application_icons.update(DOCUMENT_FILES)
if osp.isdir(fname):
return icon('DirOpenIcon', scale_factor)
else:
basename = osp.basename(fname)
__, extension = osp.splitext(basename.lower())
mime_type, __ = mime.guess_type(basename)
icon_by_extension = icon('FileIcon', scale_factor)
if extension in OFFICE_FILES:
icon_by_extension = icon(OFFICE_FILES[extension], scale_factor)
if extension in LANGUAGE_ICONS:
icon_by_extension = icon(LANGUAGE_ICONS[extension], scale_factor)
else:
if extension == '.ipynb':
if is_dark_interface():
icon_by_extension = QIcon(
get_image_path('notebook_dark.svg'))
else:
icon_by_extension = QIcon(
get_image_path('notebook_light.svg'))
elif mime_type is not None:
try:
# Fix for issue 5080. Even though
# mimetypes.guess_type documentation states that
# the return value will be None or a tuple of
# the form type/subtype, in the Windows registry,
# .sql has a mimetype of text\plain
# instead of text/plain therefore mimetypes is
# returning it incorrectly.
file_type, bin_name = mime_type.split('/')
except ValueError:
file_type = 'text'
if file_type == 'text':
icon_by_extension = icon('TextFileIcon', scale_factor)
elif file_type == 'audio':
icon_by_extension = icon('AudioFileIcon', scale_factor)
elif file_type == 'video':
icon_by_extension = icon('VideoFileIcon', scale_factor)
elif file_type == 'image':
icon_by_extension = icon('ImageFileIcon', scale_factor)
elif file_type == 'application':
if bin_name in application_icons:
icon_by_extension = icon(
application_icons[bin_name], scale_factor)
return icon_by_extension
|
def function[get_icon_by_extension, parameter[fname, scale_factor]]:
constant[Return the icon depending on the file extension]
variable[application_icons] assign[=] dictionary[[], []]
call[name[application_icons].update, parameter[name[BIN_FILES]]]
call[name[application_icons].update, parameter[name[DOCUMENT_FILES]]]
if call[name[osp].isdir, parameter[name[fname]]] begin[:]
return[call[name[icon], parameter[constant[DirOpenIcon], name[scale_factor]]]]
return[name[icon_by_extension]]
|
keyword[def] identifier[get_icon_by_extension] ( identifier[fname] , identifier[scale_factor] ):
literal[string]
identifier[application_icons] ={}
identifier[application_icons] . identifier[update] ( identifier[BIN_FILES] )
identifier[application_icons] . identifier[update] ( identifier[DOCUMENT_FILES] )
keyword[if] identifier[osp] . identifier[isdir] ( identifier[fname] ):
keyword[return] identifier[icon] ( literal[string] , identifier[scale_factor] )
keyword[else] :
identifier[basename] = identifier[osp] . identifier[basename] ( identifier[fname] )
identifier[__] , identifier[extension] = identifier[osp] . identifier[splitext] ( identifier[basename] . identifier[lower] ())
identifier[mime_type] , identifier[__] = identifier[mime] . identifier[guess_type] ( identifier[basename] )
identifier[icon_by_extension] = identifier[icon] ( literal[string] , identifier[scale_factor] )
keyword[if] identifier[extension] keyword[in] identifier[OFFICE_FILES] :
identifier[icon_by_extension] = identifier[icon] ( identifier[OFFICE_FILES] [ identifier[extension] ], identifier[scale_factor] )
keyword[if] identifier[extension] keyword[in] identifier[LANGUAGE_ICONS] :
identifier[icon_by_extension] = identifier[icon] ( identifier[LANGUAGE_ICONS] [ identifier[extension] ], identifier[scale_factor] )
keyword[else] :
keyword[if] identifier[extension] == literal[string] :
keyword[if] identifier[is_dark_interface] ():
identifier[icon_by_extension] = identifier[QIcon] (
identifier[get_image_path] ( literal[string] ))
keyword[else] :
identifier[icon_by_extension] = identifier[QIcon] (
identifier[get_image_path] ( literal[string] ))
keyword[elif] identifier[mime_type] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[file_type] , identifier[bin_name] = identifier[mime_type] . identifier[split] ( literal[string] )
keyword[except] identifier[ValueError] :
identifier[file_type] = literal[string]
keyword[if] identifier[file_type] == literal[string] :
identifier[icon_by_extension] = identifier[icon] ( literal[string] , identifier[scale_factor] )
keyword[elif] identifier[file_type] == literal[string] :
identifier[icon_by_extension] = identifier[icon] ( literal[string] , identifier[scale_factor] )
keyword[elif] identifier[file_type] == literal[string] :
identifier[icon_by_extension] = identifier[icon] ( literal[string] , identifier[scale_factor] )
keyword[elif] identifier[file_type] == literal[string] :
identifier[icon_by_extension] = identifier[icon] ( literal[string] , identifier[scale_factor] )
keyword[elif] identifier[file_type] == literal[string] :
keyword[if] identifier[bin_name] keyword[in] identifier[application_icons] :
identifier[icon_by_extension] = identifier[icon] (
identifier[application_icons] [ identifier[bin_name] ], identifier[scale_factor] )
keyword[return] identifier[icon_by_extension]
|
def get_icon_by_extension(fname, scale_factor):
"""Return the icon depending on the file extension"""
application_icons = {}
application_icons.update(BIN_FILES)
application_icons.update(DOCUMENT_FILES)
if osp.isdir(fname):
return icon('DirOpenIcon', scale_factor) # depends on [control=['if'], data=[]]
else:
basename = osp.basename(fname)
(__, extension) = osp.splitext(basename.lower())
(mime_type, __) = mime.guess_type(basename)
icon_by_extension = icon('FileIcon', scale_factor)
if extension in OFFICE_FILES:
icon_by_extension = icon(OFFICE_FILES[extension], scale_factor) # depends on [control=['if'], data=['extension', 'OFFICE_FILES']]
if extension in LANGUAGE_ICONS:
icon_by_extension = icon(LANGUAGE_ICONS[extension], scale_factor) # depends on [control=['if'], data=['extension', 'LANGUAGE_ICONS']]
elif extension == '.ipynb':
if is_dark_interface():
icon_by_extension = QIcon(get_image_path('notebook_dark.svg')) # depends on [control=['if'], data=[]]
else:
icon_by_extension = QIcon(get_image_path('notebook_light.svg')) # depends on [control=['if'], data=[]]
elif mime_type is not None:
try:
# Fix for issue 5080. Even though
# mimetypes.guess_type documentation states that
# the return value will be None or a tuple of
# the form type/subtype, in the Windows registry,
# .sql has a mimetype of text\plain
# instead of text/plain therefore mimetypes is
# returning it incorrectly.
(file_type, bin_name) = mime_type.split('/') # depends on [control=['try'], data=[]]
except ValueError:
file_type = 'text' # depends on [control=['except'], data=[]]
if file_type == 'text':
icon_by_extension = icon('TextFileIcon', scale_factor) # depends on [control=['if'], data=[]]
elif file_type == 'audio':
icon_by_extension = icon('AudioFileIcon', scale_factor) # depends on [control=['if'], data=[]]
elif file_type == 'video':
icon_by_extension = icon('VideoFileIcon', scale_factor) # depends on [control=['if'], data=[]]
elif file_type == 'image':
icon_by_extension = icon('ImageFileIcon', scale_factor) # depends on [control=['if'], data=[]]
elif file_type == 'application':
if bin_name in application_icons:
icon_by_extension = icon(application_icons[bin_name], scale_factor) # depends on [control=['if'], data=['bin_name', 'application_icons']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['mime_type']]
return icon_by_extension
|
def concat(self, other, inplace=False):
'''Concatenate two ChemicalEntity of the same kind'''
# Create new entity
if inplace:
obj = self
else:
obj = self.copy()
# Stitch every attribute
for name, attr in obj.__attributes__.items():
attr.append(other.__attributes__[name])
# Stitch every relation
for name, rel in obj.__relations__.items():
rel.append(other.__relations__[name])
# Update maps
# Update dimensions
if obj.is_empty():
obj.maps = {k: m.copy() for k, m in other.maps.items()}
obj.dimensions = other.dimensions.copy()
else:
for (a, b), rel in obj.maps.items():
rel.append(other.maps[a, b])
for d in obj.dimensions:
obj.dimensions[d] += other.dimensions[d]
return obj
|
def function[concat, parameter[self, other, inplace]]:
constant[Concatenate two ChemicalEntity of the same kind]
if name[inplace] begin[:]
variable[obj] assign[=] name[self]
for taget[tuple[[<ast.Name object at 0x7da18f810a00>, <ast.Name object at 0x7da18f812470>]]] in starred[call[name[obj].__attributes__.items, parameter[]]] begin[:]
call[name[attr].append, parameter[call[name[other].__attributes__][name[name]]]]
for taget[tuple[[<ast.Name object at 0x7da18f811000>, <ast.Name object at 0x7da18f8133d0>]]] in starred[call[name[obj].__relations__.items, parameter[]]] begin[:]
call[name[rel].append, parameter[call[name[other].__relations__][name[name]]]]
if call[name[obj].is_empty, parameter[]] begin[:]
name[obj].maps assign[=] <ast.DictComp object at 0x7da18f812ec0>
name[obj].dimensions assign[=] call[name[other].dimensions.copy, parameter[]]
return[name[obj]]
|
keyword[def] identifier[concat] ( identifier[self] , identifier[other] , identifier[inplace] = keyword[False] ):
literal[string]
keyword[if] identifier[inplace] :
identifier[obj] = identifier[self]
keyword[else] :
identifier[obj] = identifier[self] . identifier[copy] ()
keyword[for] identifier[name] , identifier[attr] keyword[in] identifier[obj] . identifier[__attributes__] . identifier[items] ():
identifier[attr] . identifier[append] ( identifier[other] . identifier[__attributes__] [ identifier[name] ])
keyword[for] identifier[name] , identifier[rel] keyword[in] identifier[obj] . identifier[__relations__] . identifier[items] ():
identifier[rel] . identifier[append] ( identifier[other] . identifier[__relations__] [ identifier[name] ])
keyword[if] identifier[obj] . identifier[is_empty] ():
identifier[obj] . identifier[maps] ={ identifier[k] : identifier[m] . identifier[copy] () keyword[for] identifier[k] , identifier[m] keyword[in] identifier[other] . identifier[maps] . identifier[items] ()}
identifier[obj] . identifier[dimensions] = identifier[other] . identifier[dimensions] . identifier[copy] ()
keyword[else] :
keyword[for] ( identifier[a] , identifier[b] ), identifier[rel] keyword[in] identifier[obj] . identifier[maps] . identifier[items] ():
identifier[rel] . identifier[append] ( identifier[other] . identifier[maps] [ identifier[a] , identifier[b] ])
keyword[for] identifier[d] keyword[in] identifier[obj] . identifier[dimensions] :
identifier[obj] . identifier[dimensions] [ identifier[d] ]+= identifier[other] . identifier[dimensions] [ identifier[d] ]
keyword[return] identifier[obj]
|
def concat(self, other, inplace=False):
"""Concatenate two ChemicalEntity of the same kind"""
# Create new entity
if inplace:
obj = self # depends on [control=['if'], data=[]]
else:
obj = self.copy()
# Stitch every attribute
for (name, attr) in obj.__attributes__.items():
attr.append(other.__attributes__[name]) # depends on [control=['for'], data=[]]
# Stitch every relation
for (name, rel) in obj.__relations__.items():
rel.append(other.__relations__[name]) # depends on [control=['for'], data=[]]
# Update maps
# Update dimensions
if obj.is_empty():
obj.maps = {k: m.copy() for (k, m) in other.maps.items()}
obj.dimensions = other.dimensions.copy() # depends on [control=['if'], data=[]]
else:
for ((a, b), rel) in obj.maps.items():
rel.append(other.maps[a, b]) # depends on [control=['for'], data=[]]
for d in obj.dimensions:
obj.dimensions[d] += other.dimensions[d] # depends on [control=['for'], data=['d']]
return obj
|
def _pycall_path_simple(
x1: int, y1: int, x2: int, y2: int, handle: Any
) -> float:
"""Does less and should run faster, just calls the handle function."""
return ffi.from_handle(handle)(x1, y1, x2, y2)
|
def function[_pycall_path_simple, parameter[x1, y1, x2, y2, handle]]:
constant[Does less and should run faster, just calls the handle function.]
return[call[call[name[ffi].from_handle, parameter[name[handle]]], parameter[name[x1], name[y1], name[x2], name[y2]]]]
|
keyword[def] identifier[_pycall_path_simple] (
identifier[x1] : identifier[int] , identifier[y1] : identifier[int] , identifier[x2] : identifier[int] , identifier[y2] : identifier[int] , identifier[handle] : identifier[Any]
)-> identifier[float] :
literal[string]
keyword[return] identifier[ffi] . identifier[from_handle] ( identifier[handle] )( identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] )
|
def _pycall_path_simple(x1: int, y1: int, x2: int, y2: int, handle: Any) -> float:
"""Does less and should run faster, just calls the handle function."""
return ffi.from_handle(handle)(x1, y1, x2, y2)
|
def insert(self, context):
"""
Create resource.
:param resort.engine.execution.Context context:
Current execution context.
"""
status_code, msg = self.__endpoint.post(
"/resources/connector-resource",
data={
"id": self.__name,
"poolname": self.__pool_name
}
)
self.__available = True
|
def function[insert, parameter[self, context]]:
constant[
Create resource.
:param resort.engine.execution.Context context:
Current execution context.
]
<ast.Tuple object at 0x7da18bc72350> assign[=] call[name[self].__endpoint.post, parameter[constant[/resources/connector-resource]]]
name[self].__available assign[=] constant[True]
|
keyword[def] identifier[insert] ( identifier[self] , identifier[context] ):
literal[string]
identifier[status_code] , identifier[msg] = identifier[self] . identifier[__endpoint] . identifier[post] (
literal[string] ,
identifier[data] ={
literal[string] : identifier[self] . identifier[__name] ,
literal[string] : identifier[self] . identifier[__pool_name]
}
)
identifier[self] . identifier[__available] = keyword[True]
|
def insert(self, context):
"""
Create resource.
:param resort.engine.execution.Context context:
Current execution context.
"""
(status_code, msg) = self.__endpoint.post('/resources/connector-resource', data={'id': self.__name, 'poolname': self.__pool_name})
self.__available = True
|
def repeat(mode):
"""Change repeat mode of current player."""
message = command(protobuf.CommandInfo_pb2.ChangeShuffleMode)
send_command = message.inner()
send_command.options.externalPlayerCommand = True
send_command.options.repeatMode = mode
return message
|
def function[repeat, parameter[mode]]:
constant[Change repeat mode of current player.]
variable[message] assign[=] call[name[command], parameter[name[protobuf].CommandInfo_pb2.ChangeShuffleMode]]
variable[send_command] assign[=] call[name[message].inner, parameter[]]
name[send_command].options.externalPlayerCommand assign[=] constant[True]
name[send_command].options.repeatMode assign[=] name[mode]
return[name[message]]
|
keyword[def] identifier[repeat] ( identifier[mode] ):
literal[string]
identifier[message] = identifier[command] ( identifier[protobuf] . identifier[CommandInfo_pb2] . identifier[ChangeShuffleMode] )
identifier[send_command] = identifier[message] . identifier[inner] ()
identifier[send_command] . identifier[options] . identifier[externalPlayerCommand] = keyword[True]
identifier[send_command] . identifier[options] . identifier[repeatMode] = identifier[mode]
keyword[return] identifier[message]
|
def repeat(mode):
"""Change repeat mode of current player."""
message = command(protobuf.CommandInfo_pb2.ChangeShuffleMode)
send_command = message.inner()
send_command.options.externalPlayerCommand = True
send_command.options.repeatMode = mode
return message
|
def addDataToQueue(self, displacement, reset=False):
"""
Add the given displacement to the region's internal queue. Calls to compute
will cause items in the queue to be dequeued in FIFO order.
:param displacement: Two floats representing translation vector [dx, dy] to
be passed to the linked regions via 'dataOut'
:type displacement: list
:param reset: Reset flag to be passed to the linked regions via 'resetOut'
:type reset: bool
"""
self.queue.appendleft({
"dataOut": list(displacement),
"reset": bool(reset)
})
|
def function[addDataToQueue, parameter[self, displacement, reset]]:
constant[
Add the given displacement to the region's internal queue. Calls to compute
will cause items in the queue to be dequeued in FIFO order.
:param displacement: Two floats representing translation vector [dx, dy] to
be passed to the linked regions via 'dataOut'
:type displacement: list
:param reset: Reset flag to be passed to the linked regions via 'resetOut'
:type reset: bool
]
call[name[self].queue.appendleft, parameter[dictionary[[<ast.Constant object at 0x7da1b0900f70>, <ast.Constant object at 0x7da1b09025c0>], [<ast.Call object at 0x7da1b0902b30>, <ast.Call object at 0x7da1b0903e80>]]]]
|
keyword[def] identifier[addDataToQueue] ( identifier[self] , identifier[displacement] , identifier[reset] = keyword[False] ):
literal[string]
identifier[self] . identifier[queue] . identifier[appendleft] ({
literal[string] : identifier[list] ( identifier[displacement] ),
literal[string] : identifier[bool] ( identifier[reset] )
})
|
def addDataToQueue(self, displacement, reset=False):
"""
Add the given displacement to the region's internal queue. Calls to compute
will cause items in the queue to be dequeued in FIFO order.
:param displacement: Two floats representing translation vector [dx, dy] to
be passed to the linked regions via 'dataOut'
:type displacement: list
:param reset: Reset flag to be passed to the linked regions via 'resetOut'
:type reset: bool
"""
self.queue.appendleft({'dataOut': list(displacement), 'reset': bool(reset)})
|
def make_folium_polyline(edge, edge_color, edge_width, edge_opacity, popup_attribute=None):
"""
Turn a row from the gdf_edges GeoDataFrame into a folium PolyLine with
attributes.
Parameters
----------
edge : GeoSeries
a row from the gdf_edges GeoDataFrame
edge_color : string
color of the edge lines
edge_width : numeric
width of the edge lines
edge_opacity : numeric
opacity of the edge lines
popup_attribute : string
edge attribute to display in a pop-up when an edge is clicked, if None,
no popup
Returns
-------
pl : folium.PolyLine
"""
# check if we were able to import folium successfully
if not folium:
raise ImportError('The folium package must be installed to use this optional feature.')
# locations is a list of points for the polyline
# folium takes coords in lat,lon but geopandas provides them in lon,lat
# so we have to flip them around
locations = list([(lat, lon) for lon, lat in edge['geometry'].coords])
# if popup_attribute is None, then create no pop-up
if popup_attribute is None:
popup = None
else:
# folium doesn't interpret html in the html argument (weird), so can't
# do newlines without an iframe
popup_text = json.dumps(edge[popup_attribute])
popup = folium.Popup(html=popup_text)
# create a folium polyline with attributes
pl = folium.PolyLine(locations=locations, popup=popup,
color=edge_color, weight=edge_width, opacity=edge_opacity)
return pl
|
def function[make_folium_polyline, parameter[edge, edge_color, edge_width, edge_opacity, popup_attribute]]:
constant[
Turn a row from the gdf_edges GeoDataFrame into a folium PolyLine with
attributes.
Parameters
----------
edge : GeoSeries
a row from the gdf_edges GeoDataFrame
edge_color : string
color of the edge lines
edge_width : numeric
width of the edge lines
edge_opacity : numeric
opacity of the edge lines
popup_attribute : string
edge attribute to display in a pop-up when an edge is clicked, if None,
no popup
Returns
-------
pl : folium.PolyLine
]
if <ast.UnaryOp object at 0x7da1b1b11c60> begin[:]
<ast.Raise object at 0x7da1b1b10040>
variable[locations] assign[=] call[name[list], parameter[<ast.ListComp object at 0x7da1b1b69b40>]]
if compare[name[popup_attribute] is constant[None]] begin[:]
variable[popup] assign[=] constant[None]
variable[pl] assign[=] call[name[folium].PolyLine, parameter[]]
return[name[pl]]
|
keyword[def] identifier[make_folium_polyline] ( identifier[edge] , identifier[edge_color] , identifier[edge_width] , identifier[edge_opacity] , identifier[popup_attribute] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[folium] :
keyword[raise] identifier[ImportError] ( literal[string] )
identifier[locations] = identifier[list] ([( identifier[lat] , identifier[lon] ) keyword[for] identifier[lon] , identifier[lat] keyword[in] identifier[edge] [ literal[string] ]. identifier[coords] ])
keyword[if] identifier[popup_attribute] keyword[is] keyword[None] :
identifier[popup] = keyword[None]
keyword[else] :
identifier[popup_text] = identifier[json] . identifier[dumps] ( identifier[edge] [ identifier[popup_attribute] ])
identifier[popup] = identifier[folium] . identifier[Popup] ( identifier[html] = identifier[popup_text] )
identifier[pl] = identifier[folium] . identifier[PolyLine] ( identifier[locations] = identifier[locations] , identifier[popup] = identifier[popup] ,
identifier[color] = identifier[edge_color] , identifier[weight] = identifier[edge_width] , identifier[opacity] = identifier[edge_opacity] )
keyword[return] identifier[pl]
|
def make_folium_polyline(edge, edge_color, edge_width, edge_opacity, popup_attribute=None):
"""
Turn a row from the gdf_edges GeoDataFrame into a folium PolyLine with
attributes.
Parameters
----------
edge : GeoSeries
a row from the gdf_edges GeoDataFrame
edge_color : string
color of the edge lines
edge_width : numeric
width of the edge lines
edge_opacity : numeric
opacity of the edge lines
popup_attribute : string
edge attribute to display in a pop-up when an edge is clicked, if None,
no popup
Returns
-------
pl : folium.PolyLine
"""
# check if we were able to import folium successfully
if not folium:
raise ImportError('The folium package must be installed to use this optional feature.') # depends on [control=['if'], data=[]]
# locations is a list of points for the polyline
# folium takes coords in lat,lon but geopandas provides them in lon,lat
# so we have to flip them around
locations = list([(lat, lon) for (lon, lat) in edge['geometry'].coords])
# if popup_attribute is None, then create no pop-up
if popup_attribute is None:
popup = None # depends on [control=['if'], data=[]]
else:
# folium doesn't interpret html in the html argument (weird), so can't
# do newlines without an iframe
popup_text = json.dumps(edge[popup_attribute])
popup = folium.Popup(html=popup_text)
# create a folium polyline with attributes
pl = folium.PolyLine(locations=locations, popup=popup, color=edge_color, weight=edge_width, opacity=edge_opacity)
return pl
|
def _task_periodic(self):
"""
This is a callback that is registered to be called periodically
from the legion. The legion chooses when it might be called,
typically when it is otherwise idle.
"""
log = self._params.get('log', self._discard)
log.debug("periodic")
self.manage()
|
def function[_task_periodic, parameter[self]]:
constant[
This is a callback that is registered to be called periodically
from the legion. The legion chooses when it might be called,
typically when it is otherwise idle.
]
variable[log] assign[=] call[name[self]._params.get, parameter[constant[log], name[self]._discard]]
call[name[log].debug, parameter[constant[periodic]]]
call[name[self].manage, parameter[]]
|
keyword[def] identifier[_task_periodic] ( identifier[self] ):
literal[string]
identifier[log] = identifier[self] . identifier[_params] . identifier[get] ( literal[string] , identifier[self] . identifier[_discard] )
identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[manage] ()
|
def _task_periodic(self):
"""
This is a callback that is registered to be called periodically
from the legion. The legion chooses when it might be called,
typically when it is otherwise idle.
"""
log = self._params.get('log', self._discard)
log.debug('periodic')
self.manage()
|
def start(self):
"""
Starts running the timer. If the timer is currently running, then
this method will do nothing.
:sa stop, reset
"""
if self._timer.isActive():
return
self._starttime = datetime.datetime.now()
self._timer.start()
|
def function[start, parameter[self]]:
constant[
Starts running the timer. If the timer is currently running, then
this method will do nothing.
:sa stop, reset
]
if call[name[self]._timer.isActive, parameter[]] begin[:]
return[None]
name[self]._starttime assign[=] call[name[datetime].datetime.now, parameter[]]
call[name[self]._timer.start, parameter[]]
|
keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_timer] . identifier[isActive] ():
keyword[return]
identifier[self] . identifier[_starttime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[self] . identifier[_timer] . identifier[start] ()
|
def start(self):
"""
Starts running the timer. If the timer is currently running, then
this method will do nothing.
:sa stop, reset
"""
if self._timer.isActive():
return # depends on [control=['if'], data=[]]
self._starttime = datetime.datetime.now()
self._timer.start()
|
def initialize_hmac(self):
# type: (EncryptionMetadata) -> hmac.HMAC
"""Initialize an hmac from a signing key if it exists
:param EncryptionMetadata self: this
:rtype: hmac.HMAC or None
:return: hmac
"""
if self._signkey is not None:
return hmac.new(self._signkey, digestmod=hashlib.sha256)
else:
return None
|
def function[initialize_hmac, parameter[self]]:
constant[Initialize an hmac from a signing key if it exists
:param EncryptionMetadata self: this
:rtype: hmac.HMAC or None
:return: hmac
]
if compare[name[self]._signkey is_not constant[None]] begin[:]
return[call[name[hmac].new, parameter[name[self]._signkey]]]
|
keyword[def] identifier[initialize_hmac] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_signkey] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[hmac] . identifier[new] ( identifier[self] . identifier[_signkey] , identifier[digestmod] = identifier[hashlib] . identifier[sha256] )
keyword[else] :
keyword[return] keyword[None]
|
def initialize_hmac(self):
# type: (EncryptionMetadata) -> hmac.HMAC
'Initialize an hmac from a signing key if it exists\n :param EncryptionMetadata self: this\n :rtype: hmac.HMAC or None\n :return: hmac\n '
if self._signkey is not None:
return hmac.new(self._signkey, digestmod=hashlib.sha256) # depends on [control=['if'], data=[]]
else:
return None
|
def wrap_list(item):
"""
Returns an object as a list.
If the object is a list, it is returned directly. If it is a tuple or set, it
is returned as a list. If it is another object, it is wrapped in a list and
returned.
"""
if item is None:
return []
elif isinstance(item, list):
return item
elif isinstance(item, (tuple, set)):
return list(item)
else:
return [item]
|
def function[wrap_list, parameter[item]]:
constant[
Returns an object as a list.
If the object is a list, it is returned directly. If it is a tuple or set, it
is returned as a list. If it is another object, it is wrapped in a list and
returned.
]
if compare[name[item] is constant[None]] begin[:]
return[list[[]]]
|
keyword[def] identifier[wrap_list] ( identifier[item] ):
literal[string]
keyword[if] identifier[item] keyword[is] keyword[None] :
keyword[return] []
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[list] ):
keyword[return] identifier[item]
keyword[elif] identifier[isinstance] ( identifier[item] ,( identifier[tuple] , identifier[set] )):
keyword[return] identifier[list] ( identifier[item] )
keyword[else] :
keyword[return] [ identifier[item] ]
|
def wrap_list(item):
"""
Returns an object as a list.
If the object is a list, it is returned directly. If it is a tuple or set, it
is returned as a list. If it is another object, it is wrapped in a list and
returned.
"""
if item is None:
return [] # depends on [control=['if'], data=[]]
elif isinstance(item, list):
return item # depends on [control=['if'], data=[]]
elif isinstance(item, (tuple, set)):
return list(item) # depends on [control=['if'], data=[]]
else:
return [item]
|
def ignore_exception(exception_class):
"""A decorator that ignores `exception_class` exceptions"""
def _decorator(func):
def newfunc(*args, **kwds):
try:
return func(*args, **kwds)
except exception_class:
pass
return newfunc
return _decorator
|
def function[ignore_exception, parameter[exception_class]]:
constant[A decorator that ignores `exception_class` exceptions]
def function[_decorator, parameter[func]]:
def function[newfunc, parameter[]]:
<ast.Try object at 0x7da207f02a70>
return[name[newfunc]]
return[name[_decorator]]
|
keyword[def] identifier[ignore_exception] ( identifier[exception_class] ):
literal[string]
keyword[def] identifier[_decorator] ( identifier[func] ):
keyword[def] identifier[newfunc] (* identifier[args] ,** identifier[kwds] ):
keyword[try] :
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwds] )
keyword[except] identifier[exception_class] :
keyword[pass]
keyword[return] identifier[newfunc]
keyword[return] identifier[_decorator]
|
def ignore_exception(exception_class):
"""A decorator that ignores `exception_class` exceptions"""
def _decorator(func):
def newfunc(*args, **kwds):
try:
return func(*args, **kwds) # depends on [control=['try'], data=[]]
except exception_class:
pass # depends on [control=['except'], data=[]]
return newfunc
return _decorator
|
def DedupVcardFilenames(vcard_dict):
"""Make sure every vCard in the dictionary has a unique filename."""
remove_keys = []
add_pairs = []
for k, v in vcard_dict.items():
if not len(v) > 1:
continue
for idx, vcard in enumerate(v):
fname, ext = os.path.splitext(k)
fname = '{}-{}'.format(fname, idx + 1)
fname = fname + ext
assert fname not in vcard_dict
add_pairs.append((fname, vcard))
remove_keys.append(k)
for k, v in add_pairs:
vcard_dict[k].append(v)
for k in remove_keys:
vcard_dict.pop(k)
return vcard_dict
|
def function[DedupVcardFilenames, parameter[vcard_dict]]:
constant[Make sure every vCard in the dictionary has a unique filename.]
variable[remove_keys] assign[=] list[[]]
variable[add_pairs] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da207f9a0b0>, <ast.Name object at 0x7da207f98310>]]] in starred[call[name[vcard_dict].items, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da207f98100> begin[:]
continue
for taget[tuple[[<ast.Name object at 0x7da207f99d80>, <ast.Name object at 0x7da207f99ba0>]]] in starred[call[name[enumerate], parameter[name[v]]]] begin[:]
<ast.Tuple object at 0x7da207f99ab0> assign[=] call[name[os].path.splitext, parameter[name[k]]]
variable[fname] assign[=] call[constant[{}-{}].format, parameter[name[fname], binary_operation[name[idx] + constant[1]]]]
variable[fname] assign[=] binary_operation[name[fname] + name[ext]]
assert[compare[name[fname] <ast.NotIn object at 0x7da2590d7190> name[vcard_dict]]]
call[name[add_pairs].append, parameter[tuple[[<ast.Name object at 0x7da20c9918d0>, <ast.Name object at 0x7da20c991180>]]]]
call[name[remove_keys].append, parameter[name[k]]]
for taget[tuple[[<ast.Name object at 0x7da20c993df0>, <ast.Name object at 0x7da20c9925f0>]]] in starred[name[add_pairs]] begin[:]
call[call[name[vcard_dict]][name[k]].append, parameter[name[v]]]
for taget[name[k]] in starred[name[remove_keys]] begin[:]
call[name[vcard_dict].pop, parameter[name[k]]]
return[name[vcard_dict]]
|
keyword[def] identifier[DedupVcardFilenames] ( identifier[vcard_dict] ):
literal[string]
identifier[remove_keys] =[]
identifier[add_pairs] =[]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[vcard_dict] . identifier[items] ():
keyword[if] keyword[not] identifier[len] ( identifier[v] )> literal[int] :
keyword[continue]
keyword[for] identifier[idx] , identifier[vcard] keyword[in] identifier[enumerate] ( identifier[v] ):
identifier[fname] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[k] )
identifier[fname] = literal[string] . identifier[format] ( identifier[fname] , identifier[idx] + literal[int] )
identifier[fname] = identifier[fname] + identifier[ext]
keyword[assert] identifier[fname] keyword[not] keyword[in] identifier[vcard_dict]
identifier[add_pairs] . identifier[append] (( identifier[fname] , identifier[vcard] ))
identifier[remove_keys] . identifier[append] ( identifier[k] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[add_pairs] :
identifier[vcard_dict] [ identifier[k] ]. identifier[append] ( identifier[v] )
keyword[for] identifier[k] keyword[in] identifier[remove_keys] :
identifier[vcard_dict] . identifier[pop] ( identifier[k] )
keyword[return] identifier[vcard_dict]
|
def DedupVcardFilenames(vcard_dict):
"""Make sure every vCard in the dictionary has a unique filename."""
remove_keys = []
add_pairs = []
for (k, v) in vcard_dict.items():
if not len(v) > 1:
continue # depends on [control=['if'], data=[]]
for (idx, vcard) in enumerate(v):
(fname, ext) = os.path.splitext(k)
fname = '{}-{}'.format(fname, idx + 1)
fname = fname + ext
assert fname not in vcard_dict
add_pairs.append((fname, vcard)) # depends on [control=['for'], data=[]]
remove_keys.append(k) # depends on [control=['for'], data=[]]
for (k, v) in add_pairs:
vcard_dict[k].append(v) # depends on [control=['for'], data=[]]
for k in remove_keys:
vcard_dict.pop(k) # depends on [control=['for'], data=['k']]
return vcard_dict
|
def update_state(self, identifier, state):
"""Update state of identified model run.
Raises exception if state change results in invalid run life cycle.
Parameters
----------
identifier : string
Unique model run identifier
state : ModelRunState
Object representing new run state
Returns
-------
ModelRunHandle
Modified model run handle or None if no run with given identifier
exists
"""
# Get model run to ensure that it exists
model_run = self.get_object(identifier)
if model_run is None:
return None
# Set timestamp of state change. Raise exception if state change results
# in invalid life cycle
timestamp = str(datetime.datetime.utcnow().isoformat())
if state.is_idle:
raise ValueError('invalid state change: run cannot become idle')
elif state.is_running:
# Current state is required to be IDLE
if not model_run.state.is_idle:
raise ValueError('invalid state change: finished run cannot start again')
model_run.schedule[RUN_STARTED] = timestamp
elif state.is_failed:
# Current state is required to be RUNNING
if not (model_run.state.is_running or model_run.state.is_idle):
raise ValueError('invalid state change: cannot fail finished run')
model_run.schedule[RUN_FINISHED] = timestamp
elif state.is_success:
# Current state is required to be RUNNING
if not model_run.state.is_running:
raise ValueError('invalid state change: cannot finish inactive run')
model_run.schedule[RUN_FINISHED] = timestamp
# Update model run state and replace object in database
model_run.state = state
model_run.properties[datastore.PROPERTY_STATE] = str(state)
self.replace_object(model_run)
# Return modified model run
return model_run
|
def function[update_state, parameter[self, identifier, state]]:
constant[Update state of identified model run.
Raises exception if state change results in invalid run life cycle.
Parameters
----------
identifier : string
Unique model run identifier
state : ModelRunState
Object representing new run state
Returns
-------
ModelRunHandle
Modified model run handle or None if no run with given identifier
exists
]
variable[model_run] assign[=] call[name[self].get_object, parameter[name[identifier]]]
if compare[name[model_run] is constant[None]] begin[:]
return[constant[None]]
variable[timestamp] assign[=] call[name[str], parameter[call[call[name[datetime].datetime.utcnow, parameter[]].isoformat, parameter[]]]]
if name[state].is_idle begin[:]
<ast.Raise object at 0x7da1b15fbbb0>
name[model_run].state assign[=] name[state]
call[name[model_run].properties][name[datastore].PROPERTY_STATE] assign[=] call[name[str], parameter[name[state]]]
call[name[self].replace_object, parameter[name[model_run]]]
return[name[model_run]]
|
keyword[def] identifier[update_state] ( identifier[self] , identifier[identifier] , identifier[state] ):
literal[string]
identifier[model_run] = identifier[self] . identifier[get_object] ( identifier[identifier] )
keyword[if] identifier[model_run] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[timestamp] = identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[utcnow] (). identifier[isoformat] ())
keyword[if] identifier[state] . identifier[is_idle] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[elif] identifier[state] . identifier[is_running] :
keyword[if] keyword[not] identifier[model_run] . identifier[state] . identifier[is_idle] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[model_run] . identifier[schedule] [ identifier[RUN_STARTED] ]= identifier[timestamp]
keyword[elif] identifier[state] . identifier[is_failed] :
keyword[if] keyword[not] ( identifier[model_run] . identifier[state] . identifier[is_running] keyword[or] identifier[model_run] . identifier[state] . identifier[is_idle] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[model_run] . identifier[schedule] [ identifier[RUN_FINISHED] ]= identifier[timestamp]
keyword[elif] identifier[state] . identifier[is_success] :
keyword[if] keyword[not] identifier[model_run] . identifier[state] . identifier[is_running] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[model_run] . identifier[schedule] [ identifier[RUN_FINISHED] ]= identifier[timestamp]
identifier[model_run] . identifier[state] = identifier[state]
identifier[model_run] . identifier[properties] [ identifier[datastore] . identifier[PROPERTY_STATE] ]= identifier[str] ( identifier[state] )
identifier[self] . identifier[replace_object] ( identifier[model_run] )
keyword[return] identifier[model_run]
|
def update_state(self, identifier, state):
"""Update state of identified model run.
Raises exception if state change results in invalid run life cycle.
Parameters
----------
identifier : string
Unique model run identifier
state : ModelRunState
Object representing new run state
Returns
-------
ModelRunHandle
Modified model run handle or None if no run with given identifier
exists
"""
# Get model run to ensure that it exists
model_run = self.get_object(identifier)
if model_run is None:
return None # depends on [control=['if'], data=[]]
# Set timestamp of state change. Raise exception if state change results
# in invalid life cycle
timestamp = str(datetime.datetime.utcnow().isoformat())
if state.is_idle:
raise ValueError('invalid state change: run cannot become idle') # depends on [control=['if'], data=[]]
elif state.is_running:
# Current state is required to be IDLE
if not model_run.state.is_idle:
raise ValueError('invalid state change: finished run cannot start again') # depends on [control=['if'], data=[]]
model_run.schedule[RUN_STARTED] = timestamp # depends on [control=['if'], data=[]]
elif state.is_failed:
# Current state is required to be RUNNING
if not (model_run.state.is_running or model_run.state.is_idle):
raise ValueError('invalid state change: cannot fail finished run') # depends on [control=['if'], data=[]]
model_run.schedule[RUN_FINISHED] = timestamp # depends on [control=['if'], data=[]]
elif state.is_success:
# Current state is required to be RUNNING
if not model_run.state.is_running:
raise ValueError('invalid state change: cannot finish inactive run') # depends on [control=['if'], data=[]]
model_run.schedule[RUN_FINISHED] = timestamp # depends on [control=['if'], data=[]]
# Update model run state and replace object in database
model_run.state = state
model_run.properties[datastore.PROPERTY_STATE] = str(state)
self.replace_object(model_run)
# Return modified model run
return model_run
|
def percentage_distance(cls, highmark, current):
"""Percentage of distance the current offset is behind the highmark."""
highmark = int(highmark)
current = int(current)
if highmark > 0:
return round(
(highmark - current) * 100.0 / highmark,
2,
)
else:
return 0.0
|
def function[percentage_distance, parameter[cls, highmark, current]]:
constant[Percentage of distance the current offset is behind the highmark.]
variable[highmark] assign[=] call[name[int], parameter[name[highmark]]]
variable[current] assign[=] call[name[int], parameter[name[current]]]
if compare[name[highmark] greater[>] constant[0]] begin[:]
return[call[name[round], parameter[binary_operation[binary_operation[binary_operation[name[highmark] - name[current]] * constant[100.0]] / name[highmark]], constant[2]]]]
|
keyword[def] identifier[percentage_distance] ( identifier[cls] , identifier[highmark] , identifier[current] ):
literal[string]
identifier[highmark] = identifier[int] ( identifier[highmark] )
identifier[current] = identifier[int] ( identifier[current] )
keyword[if] identifier[highmark] > literal[int] :
keyword[return] identifier[round] (
( identifier[highmark] - identifier[current] )* literal[int] / identifier[highmark] ,
literal[int] ,
)
keyword[else] :
keyword[return] literal[int]
|
def percentage_distance(cls, highmark, current):
"""Percentage of distance the current offset is behind the highmark."""
highmark = int(highmark)
current = int(current)
if highmark > 0:
return round((highmark - current) * 100.0 / highmark, 2) # depends on [control=['if'], data=['highmark']]
else:
return 0.0
|
def parseCapabilities(self, capdict):
"""Parse a capabilities dictionary and adjust instance settings.
At the time this function is called, the user has requested some
settings (e.g., mode identifier), but we haven't yet asked the reader
whether those requested settings are within its capabilities. This
function's job is to parse the reader's capabilities, compare them
against any requested settings, and raise an error if there are any
incompatibilities.
Sets the following instance variables:
- self.antennas (list of antenna numbers, e.g., [1] or [1, 2])
- self.tx_power_table (list of dBm values)
- self.reader_mode (dictionary of mode settings, e.g., Tari)
Raises ReaderConfigurationError if the requested settings are not
within the reader's capabilities.
"""
# check requested antenna set
gdc = capdict['GeneralDeviceCapabilities']
max_ant = gdc['MaxNumberOfAntennaSupported']
if max(self.antennas) > max_ant:
reqd = ','.join(map(str, self.antennas))
avail = ','.join(map(str, range(1, max_ant + 1)))
errmsg = ('Invalid antenna set specified: requested={},'
' available={}; ignoring invalid antennas'.format(
reqd, avail))
raise ReaderConfigurationError(errmsg)
logger.debug('set antennas: %s', self.antennas)
# parse available transmit power entries, set self.tx_power
bandcap = capdict['RegulatoryCapabilities']['UHFBandCapabilities']
self.tx_power_table = self.parsePowerTable(bandcap)
logger.debug('tx_power_table: %s', self.tx_power_table)
self.setTxPower(self.tx_power)
# parse list of reader's supported mode identifiers
regcap = capdict['RegulatoryCapabilities']
modes = regcap['UHFBandCapabilities']['UHFRFModeTable']
mode_list = [modes[k] for k in sorted(modes.keys(), key=natural_keys)]
# select a mode by matching available modes to requested parameters
if self.mode_identifier is not None:
logger.debug('Setting mode from mode_identifier=%s',
self.mode_identifier)
try:
mode = [mo for mo in mode_list
if mo['ModeIdentifier'] == self.mode_identifier][0]
self.reader_mode = mode
except IndexError:
valid_modes = sorted(mo['ModeIdentifier'] for mo in mode_list)
errstr = ('Invalid mode_identifier; valid mode_identifiers'
' are {}'.format(valid_modes))
raise ReaderConfigurationError(errstr)
# if we're trying to set Tari explicitly, but the selected mode doesn't
# support the requested Tari, that's a configuration error.
if self.reader_mode and self.tari:
if self.reader_mode['MinTari'] < self.tari < self.reader_mode['MaxTari']:
logger.debug('Overriding mode Tari %s with requested Tari %s',
self.reader_mode['MaxTari'], self.tari)
else:
errstr = ('Requested Tari {} is incompatible with selected '
'mode {}'.format(self.tari, self.reader_mode))
logger.info('using reader mode: %s', self.reader_mode)
|
def function[parseCapabilities, parameter[self, capdict]]:
constant[Parse a capabilities dictionary and adjust instance settings.
At the time this function is called, the user has requested some
settings (e.g., mode identifier), but we haven't yet asked the reader
whether those requested settings are within its capabilities. This
function's job is to parse the reader's capabilities, compare them
against any requested settings, and raise an error if there are any
incompatibilities.
Sets the following instance variables:
- self.antennas (list of antenna numbers, e.g., [1] or [1, 2])
- self.tx_power_table (list of dBm values)
- self.reader_mode (dictionary of mode settings, e.g., Tari)
Raises ReaderConfigurationError if the requested settings are not
within the reader's capabilities.
]
variable[gdc] assign[=] call[name[capdict]][constant[GeneralDeviceCapabilities]]
variable[max_ant] assign[=] call[name[gdc]][constant[MaxNumberOfAntennaSupported]]
if compare[call[name[max], parameter[name[self].antennas]] greater[>] name[max_ant]] begin[:]
variable[reqd] assign[=] call[constant[,].join, parameter[call[name[map], parameter[name[str], name[self].antennas]]]]
variable[avail] assign[=] call[constant[,].join, parameter[call[name[map], parameter[name[str], call[name[range], parameter[constant[1], binary_operation[name[max_ant] + constant[1]]]]]]]]
variable[errmsg] assign[=] call[constant[Invalid antenna set specified: requested={}, available={}; ignoring invalid antennas].format, parameter[name[reqd], name[avail]]]
<ast.Raise object at 0x7da18dc05810>
call[name[logger].debug, parameter[constant[set antennas: %s], name[self].antennas]]
variable[bandcap] assign[=] call[call[name[capdict]][constant[RegulatoryCapabilities]]][constant[UHFBandCapabilities]]
name[self].tx_power_table assign[=] call[name[self].parsePowerTable, parameter[name[bandcap]]]
call[name[logger].debug, parameter[constant[tx_power_table: %s], name[self].tx_power_table]]
call[name[self].setTxPower, parameter[name[self].tx_power]]
variable[regcap] assign[=] call[name[capdict]][constant[RegulatoryCapabilities]]
variable[modes] assign[=] call[call[name[regcap]][constant[UHFBandCapabilities]]][constant[UHFRFModeTable]]
variable[mode_list] assign[=] <ast.ListComp object at 0x7da18dc07010>
if compare[name[self].mode_identifier is_not constant[None]] begin[:]
call[name[logger].debug, parameter[constant[Setting mode from mode_identifier=%s], name[self].mode_identifier]]
<ast.Try object at 0x7da18dc04e50>
if <ast.BoolOp object at 0x7da18dc053c0> begin[:]
if compare[call[name[self].reader_mode][constant[MinTari]] less[<] name[self].tari] begin[:]
call[name[logger].debug, parameter[constant[Overriding mode Tari %s with requested Tari %s], call[name[self].reader_mode][constant[MaxTari]], name[self].tari]]
call[name[logger].info, parameter[constant[using reader mode: %s], name[self].reader_mode]]
|
keyword[def] identifier[parseCapabilities] ( identifier[self] , identifier[capdict] ):
literal[string]
identifier[gdc] = identifier[capdict] [ literal[string] ]
identifier[max_ant] = identifier[gdc] [ literal[string] ]
keyword[if] identifier[max] ( identifier[self] . identifier[antennas] )> identifier[max_ant] :
identifier[reqd] = literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[self] . identifier[antennas] ))
identifier[avail] = literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[range] ( literal[int] , identifier[max_ant] + literal[int] )))
identifier[errmsg] =( literal[string]
literal[string] . identifier[format] (
identifier[reqd] , identifier[avail] ))
keyword[raise] identifier[ReaderConfigurationError] ( identifier[errmsg] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[antennas] )
identifier[bandcap] = identifier[capdict] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[tx_power_table] = identifier[self] . identifier[parsePowerTable] ( identifier[bandcap] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[tx_power_table] )
identifier[self] . identifier[setTxPower] ( identifier[self] . identifier[tx_power] )
identifier[regcap] = identifier[capdict] [ literal[string] ]
identifier[modes] = identifier[regcap] [ literal[string] ][ literal[string] ]
identifier[mode_list] =[ identifier[modes] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[sorted] ( identifier[modes] . identifier[keys] (), identifier[key] = identifier[natural_keys] )]
keyword[if] identifier[self] . identifier[mode_identifier] keyword[is] keyword[not] keyword[None] :
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[self] . identifier[mode_identifier] )
keyword[try] :
identifier[mode] =[ identifier[mo] keyword[for] identifier[mo] keyword[in] identifier[mode_list]
keyword[if] identifier[mo] [ literal[string] ]== identifier[self] . identifier[mode_identifier] ][ literal[int] ]
identifier[self] . identifier[reader_mode] = identifier[mode]
keyword[except] identifier[IndexError] :
identifier[valid_modes] = identifier[sorted] ( identifier[mo] [ literal[string] ] keyword[for] identifier[mo] keyword[in] identifier[mode_list] )
identifier[errstr] =( literal[string]
literal[string] . identifier[format] ( identifier[valid_modes] ))
keyword[raise] identifier[ReaderConfigurationError] ( identifier[errstr] )
keyword[if] identifier[self] . identifier[reader_mode] keyword[and] identifier[self] . identifier[tari] :
keyword[if] identifier[self] . identifier[reader_mode] [ literal[string] ]< identifier[self] . identifier[tari] < identifier[self] . identifier[reader_mode] [ literal[string] ]:
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[self] . identifier[reader_mode] [ literal[string] ], identifier[self] . identifier[tari] )
keyword[else] :
identifier[errstr] =( literal[string]
literal[string] . identifier[format] ( identifier[self] . identifier[tari] , identifier[self] . identifier[reader_mode] ))
identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[reader_mode] )
|
def parseCapabilities(self, capdict):
"""Parse a capabilities dictionary and adjust instance settings.
At the time this function is called, the user has requested some
settings (e.g., mode identifier), but we haven't yet asked the reader
whether those requested settings are within its capabilities. This
function's job is to parse the reader's capabilities, compare them
against any requested settings, and raise an error if there are any
incompatibilities.
Sets the following instance variables:
- self.antennas (list of antenna numbers, e.g., [1] or [1, 2])
- self.tx_power_table (list of dBm values)
- self.reader_mode (dictionary of mode settings, e.g., Tari)
Raises ReaderConfigurationError if the requested settings are not
within the reader's capabilities.
"""
# check requested antenna set
gdc = capdict['GeneralDeviceCapabilities']
max_ant = gdc['MaxNumberOfAntennaSupported']
if max(self.antennas) > max_ant:
reqd = ','.join(map(str, self.antennas))
avail = ','.join(map(str, range(1, max_ant + 1)))
errmsg = 'Invalid antenna set specified: requested={}, available={}; ignoring invalid antennas'.format(reqd, avail)
raise ReaderConfigurationError(errmsg) # depends on [control=['if'], data=['max_ant']]
logger.debug('set antennas: %s', self.antennas)
# parse available transmit power entries, set self.tx_power
bandcap = capdict['RegulatoryCapabilities']['UHFBandCapabilities']
self.tx_power_table = self.parsePowerTable(bandcap)
logger.debug('tx_power_table: %s', self.tx_power_table)
self.setTxPower(self.tx_power)
# parse list of reader's supported mode identifiers
regcap = capdict['RegulatoryCapabilities']
modes = regcap['UHFBandCapabilities']['UHFRFModeTable']
mode_list = [modes[k] for k in sorted(modes.keys(), key=natural_keys)]
# select a mode by matching available modes to requested parameters
if self.mode_identifier is not None:
logger.debug('Setting mode from mode_identifier=%s', self.mode_identifier)
try:
mode = [mo for mo in mode_list if mo['ModeIdentifier'] == self.mode_identifier][0]
self.reader_mode = mode # depends on [control=['try'], data=[]]
except IndexError:
valid_modes = sorted((mo['ModeIdentifier'] for mo in mode_list))
errstr = 'Invalid mode_identifier; valid mode_identifiers are {}'.format(valid_modes)
raise ReaderConfigurationError(errstr) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# if we're trying to set Tari explicitly, but the selected mode doesn't
# support the requested Tari, that's a configuration error.
if self.reader_mode and self.tari:
if self.reader_mode['MinTari'] < self.tari < self.reader_mode['MaxTari']:
logger.debug('Overriding mode Tari %s with requested Tari %s', self.reader_mode['MaxTari'], self.tari) # depends on [control=['if'], data=[]]
else:
errstr = 'Requested Tari {} is incompatible with selected mode {}'.format(self.tari, self.reader_mode) # depends on [control=['if'], data=[]]
logger.info('using reader mode: %s', self.reader_mode)
|
def get_es_value(obj, def_obj):
"""
Returns the value for an object that goes into the elacticsearch 'value'
field
args:
obj: data object to update
def_obj: the class instance that has defintion values
"""
def get_dict_val(item):
"""
Returns the string representation of the dict item
"""
if isinstance(item, dict):
return str(item.get('value'))
return str(item)
value_flds = []
if def_obj.es_defs.get('kds_esValue'):
value_flds = def_obj.es_defs['kds_esValue'].copy()
else:
# pdb.set_trace()
value_flds = set(obj).difference(__ALL_IGN__)
value_flds = list(value_flds)
value_flds += __COMBINED__
try:
obj['value'] = [obj.get(label) for label in value_flds
if obj.get(label)][0]
except IndexError:
obj['value'] = ", ".join(["%s: %s" % (value.get('label'),
value.get('value'))
for prop, value in obj.items()
if isinstance(value, dict) and \
value.get('label')])
if isinstance(obj['value'], list):
obj['value'] = ", ".join([get_dict_val(item) for item in obj['value']])
else:
obj['value'] = get_dict_val(obj['value'])
if str(obj['value']).strip().endswith("/"):
obj['value'] = str(obj['value']).strip()[:-1].strip()
if not obj['value']:
obj['value'] = obj.get('uri', '')
return obj
|
def function[get_es_value, parameter[obj, def_obj]]:
constant[
Returns the value for an object that goes into the elacticsearch 'value'
field
args:
obj: data object to update
def_obj: the class instance that has defintion values
]
def function[get_dict_val, parameter[item]]:
constant[
Returns the string representation of the dict item
]
if call[name[isinstance], parameter[name[item], name[dict]]] begin[:]
return[call[name[str], parameter[call[name[item].get, parameter[constant[value]]]]]]
return[call[name[str], parameter[name[item]]]]
variable[value_flds] assign[=] list[[]]
if call[name[def_obj].es_defs.get, parameter[constant[kds_esValue]]] begin[:]
variable[value_flds] assign[=] call[call[name[def_obj].es_defs][constant[kds_esValue]].copy, parameter[]]
<ast.AugAssign object at 0x7da20c6e5d20>
<ast.Try object at 0x7da20c6e76a0>
if call[name[isinstance], parameter[call[name[obj]][constant[value]], name[list]]] begin[:]
call[name[obj]][constant[value]] assign[=] call[constant[, ].join, parameter[<ast.ListComp object at 0x7da204564700>]]
if call[call[call[name[str], parameter[call[name[obj]][constant[value]]]].strip, parameter[]].endswith, parameter[constant[/]]] begin[:]
call[name[obj]][constant[value]] assign[=] call[call[call[call[name[str], parameter[call[name[obj]][constant[value]]]].strip, parameter[]]][<ast.Slice object at 0x7da204564b50>].strip, parameter[]]
if <ast.UnaryOp object at 0x7da18f720cd0> begin[:]
call[name[obj]][constant[value]] assign[=] call[name[obj].get, parameter[constant[uri], constant[]]]
return[name[obj]]
|
keyword[def] identifier[get_es_value] ( identifier[obj] , identifier[def_obj] ):
literal[string]
keyword[def] identifier[get_dict_val] ( identifier[item] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[item] , identifier[dict] ):
keyword[return] identifier[str] ( identifier[item] . identifier[get] ( literal[string] ))
keyword[return] identifier[str] ( identifier[item] )
identifier[value_flds] =[]
keyword[if] identifier[def_obj] . identifier[es_defs] . identifier[get] ( literal[string] ):
identifier[value_flds] = identifier[def_obj] . identifier[es_defs] [ literal[string] ]. identifier[copy] ()
keyword[else] :
identifier[value_flds] = identifier[set] ( identifier[obj] ). identifier[difference] ( identifier[__ALL_IGN__] )
identifier[value_flds] = identifier[list] ( identifier[value_flds] )
identifier[value_flds] += identifier[__COMBINED__]
keyword[try] :
identifier[obj] [ literal[string] ]=[ identifier[obj] . identifier[get] ( identifier[label] ) keyword[for] identifier[label] keyword[in] identifier[value_flds]
keyword[if] identifier[obj] . identifier[get] ( identifier[label] )][ literal[int] ]
keyword[except] identifier[IndexError] :
identifier[obj] [ literal[string] ]= literal[string] . identifier[join] ([ literal[string] %( identifier[value] . identifier[get] ( literal[string] ),
identifier[value] . identifier[get] ( literal[string] ))
keyword[for] identifier[prop] , identifier[value] keyword[in] identifier[obj] . identifier[items] ()
keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ) keyword[and] identifier[value] . identifier[get] ( literal[string] )])
keyword[if] identifier[isinstance] ( identifier[obj] [ literal[string] ], identifier[list] ):
identifier[obj] [ literal[string] ]= literal[string] . identifier[join] ([ identifier[get_dict_val] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[obj] [ literal[string] ]])
keyword[else] :
identifier[obj] [ literal[string] ]= identifier[get_dict_val] ( identifier[obj] [ literal[string] ])
keyword[if] identifier[str] ( identifier[obj] [ literal[string] ]). identifier[strip] (). identifier[endswith] ( literal[string] ):
identifier[obj] [ literal[string] ]= identifier[str] ( identifier[obj] [ literal[string] ]). identifier[strip] ()[:- literal[int] ]. identifier[strip] ()
keyword[if] keyword[not] identifier[obj] [ literal[string] ]:
identifier[obj] [ literal[string] ]= identifier[obj] . identifier[get] ( literal[string] , literal[string] )
keyword[return] identifier[obj]
|
def get_es_value(obj, def_obj):
"""
Returns the value for an object that goes into the elacticsearch 'value'
field
args:
obj: data object to update
def_obj: the class instance that has defintion values
"""
def get_dict_val(item):
"""
Returns the string representation of the dict item
"""
if isinstance(item, dict):
return str(item.get('value')) # depends on [control=['if'], data=[]]
return str(item)
value_flds = []
if def_obj.es_defs.get('kds_esValue'):
value_flds = def_obj.es_defs['kds_esValue'].copy() # depends on [control=['if'], data=[]]
else:
# pdb.set_trace()
value_flds = set(obj).difference(__ALL_IGN__)
value_flds = list(value_flds)
value_flds += __COMBINED__
try:
obj['value'] = [obj.get(label) for label in value_flds if obj.get(label)][0] # depends on [control=['try'], data=[]]
except IndexError:
obj['value'] = ', '.join(['%s: %s' % (value.get('label'), value.get('value')) for (prop, value) in obj.items() if isinstance(value, dict) and value.get('label')]) # depends on [control=['except'], data=[]]
if isinstance(obj['value'], list):
obj['value'] = ', '.join([get_dict_val(item) for item in obj['value']]) # depends on [control=['if'], data=[]]
else:
obj['value'] = get_dict_val(obj['value'])
if str(obj['value']).strip().endswith('/'):
obj['value'] = str(obj['value']).strip()[:-1].strip() # depends on [control=['if'], data=[]]
if not obj['value']:
obj['value'] = obj.get('uri', '') # depends on [control=['if'], data=[]]
return obj
|
def update(self, friendly_name=values.unset, max_size=values.unset):
"""
Update the QueueInstance
:param unicode friendly_name: A string to describe this resource
:param unicode max_size: The max number of calls allowed in the queue
:returns: Updated QueueInstance
:rtype: twilio.rest.api.v2010.account.queue.QueueInstance
"""
return self._proxy.update(friendly_name=friendly_name, max_size=max_size, )
|
def function[update, parameter[self, friendly_name, max_size]]:
constant[
Update the QueueInstance
:param unicode friendly_name: A string to describe this resource
:param unicode max_size: The max number of calls allowed in the queue
:returns: Updated QueueInstance
:rtype: twilio.rest.api.v2010.account.queue.QueueInstance
]
return[call[name[self]._proxy.update, parameter[]]]
|
keyword[def] identifier[update] ( identifier[self] , identifier[friendly_name] = identifier[values] . identifier[unset] , identifier[max_size] = identifier[values] . identifier[unset] ):
literal[string]
keyword[return] identifier[self] . identifier[_proxy] . identifier[update] ( identifier[friendly_name] = identifier[friendly_name] , identifier[max_size] = identifier[max_size] ,)
|
def update(self, friendly_name=values.unset, max_size=values.unset):
"""
Update the QueueInstance
:param unicode friendly_name: A string to describe this resource
:param unicode max_size: The max number of calls allowed in the queue
:returns: Updated QueueInstance
:rtype: twilio.rest.api.v2010.account.queue.QueueInstance
"""
return self._proxy.update(friendly_name=friendly_name, max_size=max_size)
|
def format(self, obj, include=None, exclude=None):
"""Return a format data dict for an object.
By default all format types will be computed.
The following MIME types are currently implemented:
* text/plain
* text/html
* text/latex
* application/json
* application/javascript
* image/png
* image/jpeg
* image/svg+xml
Parameters
----------
obj : object
The Python object whose format data will be computed.
include : list or tuple, optional
A list of format type strings (MIME types) to include in the
format data dict. If this is set *only* the format types included
in this list will be computed.
exclude : list or tuple, optional
A list of format type string (MIME types) to exclue in the format
data dict. If this is set all format types will be computed,
except for those included in this argument.
Returns
-------
format_dict : dict
A dictionary of key/value pairs, one or each format that was
generated for the object. The keys are the format types, which
will usually be MIME type strings and the values and JSON'able
data structure containing the raw data for the representation in
that format.
"""
format_dict = {}
# If plain text only is active
if self.plain_text_only:
formatter = self.formatters['text/plain']
try:
data = formatter(obj)
except:
# FIXME: log the exception
raise
if data is not None:
format_dict['text/plain'] = data
return format_dict
for format_type, formatter in self.formatters.items():
if include is not None:
if format_type not in include:
continue
if exclude is not None:
if format_type in exclude:
continue
try:
data = formatter(obj)
except:
# FIXME: log the exception
raise
if data is not None:
format_dict[format_type] = data
return format_dict
|
def function[format, parameter[self, obj, include, exclude]]:
constant[Return a format data dict for an object.
By default all format types will be computed.
The following MIME types are currently implemented:
* text/plain
* text/html
* text/latex
* application/json
* application/javascript
* image/png
* image/jpeg
* image/svg+xml
Parameters
----------
obj : object
The Python object whose format data will be computed.
include : list or tuple, optional
A list of format type strings (MIME types) to include in the
format data dict. If this is set *only* the format types included
in this list will be computed.
exclude : list or tuple, optional
A list of format type string (MIME types) to exclue in the format
data dict. If this is set all format types will be computed,
except for those included in this argument.
Returns
-------
format_dict : dict
A dictionary of key/value pairs, one or each format that was
generated for the object. The keys are the format types, which
will usually be MIME type strings and the values and JSON'able
data structure containing the raw data for the representation in
that format.
]
variable[format_dict] assign[=] dictionary[[], []]
if name[self].plain_text_only begin[:]
variable[formatter] assign[=] call[name[self].formatters][constant[text/plain]]
<ast.Try object at 0x7da18ede4670>
if compare[name[data] is_not constant[None]] begin[:]
call[name[format_dict]][constant[text/plain]] assign[=] name[data]
return[name[format_dict]]
for taget[tuple[[<ast.Name object at 0x7da18ede42e0>, <ast.Name object at 0x7da18ede7ac0>]]] in starred[call[name[self].formatters.items, parameter[]]] begin[:]
if compare[name[include] is_not constant[None]] begin[:]
if compare[name[format_type] <ast.NotIn object at 0x7da2590d7190> name[include]] begin[:]
continue
if compare[name[exclude] is_not constant[None]] begin[:]
if compare[name[format_type] in name[exclude]] begin[:]
continue
<ast.Try object at 0x7da18ede6470>
if compare[name[data] is_not constant[None]] begin[:]
call[name[format_dict]][name[format_type]] assign[=] name[data]
return[name[format_dict]]
|
keyword[def] identifier[format] ( identifier[self] , identifier[obj] , identifier[include] = keyword[None] , identifier[exclude] = keyword[None] ):
literal[string]
identifier[format_dict] ={}
keyword[if] identifier[self] . identifier[plain_text_only] :
identifier[formatter] = identifier[self] . identifier[formatters] [ literal[string] ]
keyword[try] :
identifier[data] = identifier[formatter] ( identifier[obj] )
keyword[except] :
keyword[raise]
keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] :
identifier[format_dict] [ literal[string] ]= identifier[data]
keyword[return] identifier[format_dict]
keyword[for] identifier[format_type] , identifier[formatter] keyword[in] identifier[self] . identifier[formatters] . identifier[items] ():
keyword[if] identifier[include] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[format_type] keyword[not] keyword[in] identifier[include] :
keyword[continue]
keyword[if] identifier[exclude] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[format_type] keyword[in] identifier[exclude] :
keyword[continue]
keyword[try] :
identifier[data] = identifier[formatter] ( identifier[obj] )
keyword[except] :
keyword[raise]
keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] :
identifier[format_dict] [ identifier[format_type] ]= identifier[data]
keyword[return] identifier[format_dict]
|
def format(self, obj, include=None, exclude=None):
"""Return a format data dict for an object.
By default all format types will be computed.
The following MIME types are currently implemented:
* text/plain
* text/html
* text/latex
* application/json
* application/javascript
* image/png
* image/jpeg
* image/svg+xml
Parameters
----------
obj : object
The Python object whose format data will be computed.
include : list or tuple, optional
A list of format type strings (MIME types) to include in the
format data dict. If this is set *only* the format types included
in this list will be computed.
exclude : list or tuple, optional
A list of format type string (MIME types) to exclue in the format
data dict. If this is set all format types will be computed,
except for those included in this argument.
Returns
-------
format_dict : dict
A dictionary of key/value pairs, one or each format that was
generated for the object. The keys are the format types, which
will usually be MIME type strings and the values and JSON'able
data structure containing the raw data for the representation in
that format.
"""
format_dict = {}
# If plain text only is active
if self.plain_text_only:
formatter = self.formatters['text/plain']
try:
data = formatter(obj) # depends on [control=['try'], data=[]]
except:
# FIXME: log the exception
raise # depends on [control=['except'], data=[]]
if data is not None:
format_dict['text/plain'] = data # depends on [control=['if'], data=['data']]
return format_dict # depends on [control=['if'], data=[]]
for (format_type, formatter) in self.formatters.items():
if include is not None:
if format_type not in include:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['include']]
if exclude is not None:
if format_type in exclude:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['exclude']]
try:
data = formatter(obj) # depends on [control=['try'], data=[]]
except:
# FIXME: log the exception
raise # depends on [control=['except'], data=[]]
if data is not None:
format_dict[format_type] = data # depends on [control=['if'], data=['data']] # depends on [control=['for'], data=[]]
return format_dict
|
def get_relation_count_query(self, query, parent):
"""
Add the constraints for a relationship count query.
:type query: Builder
:type parent: Builder
:rtype: Builder
"""
query = super(MorphOneOrMany, self).get_relation_count_query(query, parent)
return query.where(self._morph_type, self._morph_class)
|
def function[get_relation_count_query, parameter[self, query, parent]]:
constant[
Add the constraints for a relationship count query.
:type query: Builder
:type parent: Builder
:rtype: Builder
]
variable[query] assign[=] call[call[name[super], parameter[name[MorphOneOrMany], name[self]]].get_relation_count_query, parameter[name[query], name[parent]]]
return[call[name[query].where, parameter[name[self]._morph_type, name[self]._morph_class]]]
|
keyword[def] identifier[get_relation_count_query] ( identifier[self] , identifier[query] , identifier[parent] ):
literal[string]
identifier[query] = identifier[super] ( identifier[MorphOneOrMany] , identifier[self] ). identifier[get_relation_count_query] ( identifier[query] , identifier[parent] )
keyword[return] identifier[query] . identifier[where] ( identifier[self] . identifier[_morph_type] , identifier[self] . identifier[_morph_class] )
|
def get_relation_count_query(self, query, parent):
"""
Add the constraints for a relationship count query.
:type query: Builder
:type parent: Builder
:rtype: Builder
"""
query = super(MorphOneOrMany, self).get_relation_count_query(query, parent)
return query.where(self._morph_type, self._morph_class)
|
def connect_to_nsqd(self, host, port):
"""
Adds a connection to ``nsqd`` at the specified address.
:param host: the address to connect to
:param port: the port to connect to
"""
assert isinstance(host, string_types)
assert isinstance(port, int)
conn = AsyncConn(host, port, **self.conn_kwargs)
conn.on('identify', self._on_connection_identify)
conn.on('identify_response', self._on_connection_identify_response)
conn.on('auth', self._on_connection_auth)
conn.on('auth_response', self._on_connection_auth_response)
conn.on('error', self._on_connection_error)
conn.on('close', self._on_connection_close)
conn.on('ready', self._on_connection_ready)
conn.on('message', self._on_message)
conn.on('heartbeat', self._on_heartbeat)
conn.on('backoff', functools.partial(self._on_backoff_resume, success=False))
conn.on('resume', functools.partial(self._on_backoff_resume, success=True))
conn.on('continue', functools.partial(self._on_backoff_resume, success=None))
if conn.id in self.conns:
return
# only attempt to re-connect once every 10s per destination
# this throttles reconnects to failed endpoints
now = time.time()
last_connect_attempt = self.connection_attempts.get(conn.id)
if last_connect_attempt and last_connect_attempt > now - 10:
return
self.connection_attempts[conn.id] = now
logger.info('[%s:%s] connecting to nsqd', conn.id, self.name)
conn.connect()
return conn
|
def function[connect_to_nsqd, parameter[self, host, port]]:
constant[
Adds a connection to ``nsqd`` at the specified address.
:param host: the address to connect to
:param port: the port to connect to
]
assert[call[name[isinstance], parameter[name[host], name[string_types]]]]
assert[call[name[isinstance], parameter[name[port], name[int]]]]
variable[conn] assign[=] call[name[AsyncConn], parameter[name[host], name[port]]]
call[name[conn].on, parameter[constant[identify], name[self]._on_connection_identify]]
call[name[conn].on, parameter[constant[identify_response], name[self]._on_connection_identify_response]]
call[name[conn].on, parameter[constant[auth], name[self]._on_connection_auth]]
call[name[conn].on, parameter[constant[auth_response], name[self]._on_connection_auth_response]]
call[name[conn].on, parameter[constant[error], name[self]._on_connection_error]]
call[name[conn].on, parameter[constant[close], name[self]._on_connection_close]]
call[name[conn].on, parameter[constant[ready], name[self]._on_connection_ready]]
call[name[conn].on, parameter[constant[message], name[self]._on_message]]
call[name[conn].on, parameter[constant[heartbeat], name[self]._on_heartbeat]]
call[name[conn].on, parameter[constant[backoff], call[name[functools].partial, parameter[name[self]._on_backoff_resume]]]]
call[name[conn].on, parameter[constant[resume], call[name[functools].partial, parameter[name[self]._on_backoff_resume]]]]
call[name[conn].on, parameter[constant[continue], call[name[functools].partial, parameter[name[self]._on_backoff_resume]]]]
if compare[name[conn].id in name[self].conns] begin[:]
return[None]
variable[now] assign[=] call[name[time].time, parameter[]]
variable[last_connect_attempt] assign[=] call[name[self].connection_attempts.get, parameter[name[conn].id]]
if <ast.BoolOp object at 0x7da1b0243490> begin[:]
return[None]
call[name[self].connection_attempts][name[conn].id] assign[=] name[now]
call[name[logger].info, parameter[constant[[%s:%s] connecting to nsqd], name[conn].id, name[self].name]]
call[name[conn].connect, parameter[]]
return[name[conn]]
|
keyword[def] identifier[connect_to_nsqd] ( identifier[self] , identifier[host] , identifier[port] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[host] , identifier[string_types] )
keyword[assert] identifier[isinstance] ( identifier[port] , identifier[int] )
identifier[conn] = identifier[AsyncConn] ( identifier[host] , identifier[port] ,** identifier[self] . identifier[conn_kwargs] )
identifier[conn] . identifier[on] ( literal[string] , identifier[self] . identifier[_on_connection_identify] )
identifier[conn] . identifier[on] ( literal[string] , identifier[self] . identifier[_on_connection_identify_response] )
identifier[conn] . identifier[on] ( literal[string] , identifier[self] . identifier[_on_connection_auth] )
identifier[conn] . identifier[on] ( literal[string] , identifier[self] . identifier[_on_connection_auth_response] )
identifier[conn] . identifier[on] ( literal[string] , identifier[self] . identifier[_on_connection_error] )
identifier[conn] . identifier[on] ( literal[string] , identifier[self] . identifier[_on_connection_close] )
identifier[conn] . identifier[on] ( literal[string] , identifier[self] . identifier[_on_connection_ready] )
identifier[conn] . identifier[on] ( literal[string] , identifier[self] . identifier[_on_message] )
identifier[conn] . identifier[on] ( literal[string] , identifier[self] . identifier[_on_heartbeat] )
identifier[conn] . identifier[on] ( literal[string] , identifier[functools] . identifier[partial] ( identifier[self] . identifier[_on_backoff_resume] , identifier[success] = keyword[False] ))
identifier[conn] . identifier[on] ( literal[string] , identifier[functools] . identifier[partial] ( identifier[self] . identifier[_on_backoff_resume] , identifier[success] = keyword[True] ))
identifier[conn] . identifier[on] ( literal[string] , identifier[functools] . identifier[partial] ( identifier[self] . identifier[_on_backoff_resume] , identifier[success] = keyword[None] ))
keyword[if] identifier[conn] . identifier[id] keyword[in] identifier[self] . identifier[conns] :
keyword[return]
identifier[now] = identifier[time] . identifier[time] ()
identifier[last_connect_attempt] = identifier[self] . identifier[connection_attempts] . identifier[get] ( identifier[conn] . identifier[id] )
keyword[if] identifier[last_connect_attempt] keyword[and] identifier[last_connect_attempt] > identifier[now] - literal[int] :
keyword[return]
identifier[self] . identifier[connection_attempts] [ identifier[conn] . identifier[id] ]= identifier[now]
identifier[logger] . identifier[info] ( literal[string] , identifier[conn] . identifier[id] , identifier[self] . identifier[name] )
identifier[conn] . identifier[connect] ()
keyword[return] identifier[conn]
|
def connect_to_nsqd(self, host, port):
"""
Adds a connection to ``nsqd`` at the specified address.
:param host: the address to connect to
:param port: the port to connect to
"""
assert isinstance(host, string_types)
assert isinstance(port, int)
conn = AsyncConn(host, port, **self.conn_kwargs)
conn.on('identify', self._on_connection_identify)
conn.on('identify_response', self._on_connection_identify_response)
conn.on('auth', self._on_connection_auth)
conn.on('auth_response', self._on_connection_auth_response)
conn.on('error', self._on_connection_error)
conn.on('close', self._on_connection_close)
conn.on('ready', self._on_connection_ready)
conn.on('message', self._on_message)
conn.on('heartbeat', self._on_heartbeat)
conn.on('backoff', functools.partial(self._on_backoff_resume, success=False))
conn.on('resume', functools.partial(self._on_backoff_resume, success=True))
conn.on('continue', functools.partial(self._on_backoff_resume, success=None))
if conn.id in self.conns:
return # depends on [control=['if'], data=[]]
# only attempt to re-connect once every 10s per destination
# this throttles reconnects to failed endpoints
now = time.time()
last_connect_attempt = self.connection_attempts.get(conn.id)
if last_connect_attempt and last_connect_attempt > now - 10:
return # depends on [control=['if'], data=[]]
self.connection_attempts[conn.id] = now
logger.info('[%s:%s] connecting to nsqd', conn.id, self.name)
conn.connect()
return conn
|
def add_new_spawn_method(obj):
"""
TODO
"""
def new_spawn(self):
# TODO/FIXME: Check that this does the right thing:
# (i) the spawned generator is independent of the original one (i.e. they can be reset independently without altering the other's behaviour)
# (ii) ensure that it also works if this custom generator's __init__ requires additional arguments
new_instance = self.__class__()
return new_instance
obj._spawn = new_spawn
|
def function[add_new_spawn_method, parameter[obj]]:
constant[
TODO
]
def function[new_spawn, parameter[self]]:
variable[new_instance] assign[=] call[name[self].__class__, parameter[]]
return[name[new_instance]]
name[obj]._spawn assign[=] name[new_spawn]
|
keyword[def] identifier[add_new_spawn_method] ( identifier[obj] ):
literal[string]
keyword[def] identifier[new_spawn] ( identifier[self] ):
identifier[new_instance] = identifier[self] . identifier[__class__] ()
keyword[return] identifier[new_instance]
identifier[obj] . identifier[_spawn] = identifier[new_spawn]
|
def add_new_spawn_method(obj):
"""
TODO
"""
def new_spawn(self):
# TODO/FIXME: Check that this does the right thing:
# (i) the spawned generator is independent of the original one (i.e. they can be reset independently without altering the other's behaviour)
# (ii) ensure that it also works if this custom generator's __init__ requires additional arguments
new_instance = self.__class__()
return new_instance
obj._spawn = new_spawn
|
def validator(input_data):
"""Simple model input validator.
Validator ensures the input data array is
- two dimensional
- has the correct number of features.
"""
global data
# check num dims
if input_data.ndim != 2:
return False, 'Data should have two dimensions.'
# check number of columns
if input_data.shape[1] != data.data.shape[1]:
reason = '{} features required, {} features provided'.format(
data.data.shape[1], input_data.shape[1])
return False, reason
# validation passed
return True, None
|
def function[validator, parameter[input_data]]:
constant[Simple model input validator.
Validator ensures the input data array is
- two dimensional
- has the correct number of features.
]
<ast.Global object at 0x7da1b0653e50>
if compare[name[input_data].ndim not_equal[!=] constant[2]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b0652d10>, <ast.Constant object at 0x7da1b0650b50>]]]
if compare[call[name[input_data].shape][constant[1]] not_equal[!=] call[name[data].data.shape][constant[1]]] begin[:]
variable[reason] assign[=] call[constant[{} features required, {} features provided].format, parameter[call[name[data].data.shape][constant[1]], call[name[input_data].shape][constant[1]]]]
return[tuple[[<ast.Constant object at 0x7da1b0505e10>, <ast.Name object at 0x7da1b0505720>]]]
return[tuple[[<ast.Constant object at 0x7da1b05076d0>, <ast.Constant object at 0x7da1b0507220>]]]
|
keyword[def] identifier[validator] ( identifier[input_data] ):
literal[string]
keyword[global] identifier[data]
keyword[if] identifier[input_data] . identifier[ndim] != literal[int] :
keyword[return] keyword[False] , literal[string]
keyword[if] identifier[input_data] . identifier[shape] [ literal[int] ]!= identifier[data] . identifier[data] . identifier[shape] [ literal[int] ]:
identifier[reason] = literal[string] . identifier[format] (
identifier[data] . identifier[data] . identifier[shape] [ literal[int] ], identifier[input_data] . identifier[shape] [ literal[int] ])
keyword[return] keyword[False] , identifier[reason]
keyword[return] keyword[True] , keyword[None]
|
def validator(input_data):
"""Simple model input validator.
Validator ensures the input data array is
- two dimensional
- has the correct number of features.
"""
global data
# check num dims
if input_data.ndim != 2:
return (False, 'Data should have two dimensions.') # depends on [control=['if'], data=[]]
# check number of columns
if input_data.shape[1] != data.data.shape[1]:
reason = '{} features required, {} features provided'.format(data.data.shape[1], input_data.shape[1])
return (False, reason) # depends on [control=['if'], data=[]]
# validation passed
return (True, None)
|
def check_raw_string(self, string, is_bstring=True):
"""
Check whether the given string
is properly UTF-8 encoded (if ``is_bytes`` is ``True``),
it is not empty, and
it does not contain reserved characters.
:param string string: the byte string or Unicode string to be checked
:param bool is_bstring: if True, string is a byte string
:rtype: :class:`~aeneas.validator.ValidatorResult`
"""
self.log(u"Checking the given byte string")
self.result = ValidatorResult()
if self._are_safety_checks_disabled(u"check_raw_string"):
return self.result
if is_bstring:
self._check_utf8_encoding(string)
if not self.result.passed:
return self.result
string = gf.safe_unicode(string)
self._check_not_empty(string)
if not self.result.passed:
return self.result
self._check_reserved_characters(string)
return self.result
|
def function[check_raw_string, parameter[self, string, is_bstring]]:
constant[
Check whether the given string
is properly UTF-8 encoded (if ``is_bytes`` is ``True``),
it is not empty, and
it does not contain reserved characters.
:param string string: the byte string or Unicode string to be checked
:param bool is_bstring: if True, string is a byte string
:rtype: :class:`~aeneas.validator.ValidatorResult`
]
call[name[self].log, parameter[constant[Checking the given byte string]]]
name[self].result assign[=] call[name[ValidatorResult], parameter[]]
if call[name[self]._are_safety_checks_disabled, parameter[constant[check_raw_string]]] begin[:]
return[name[self].result]
if name[is_bstring] begin[:]
call[name[self]._check_utf8_encoding, parameter[name[string]]]
if <ast.UnaryOp object at 0x7da1b1511570> begin[:]
return[name[self].result]
variable[string] assign[=] call[name[gf].safe_unicode, parameter[name[string]]]
call[name[self]._check_not_empty, parameter[name[string]]]
if <ast.UnaryOp object at 0x7da1b1510640> begin[:]
return[name[self].result]
call[name[self]._check_reserved_characters, parameter[name[string]]]
return[name[self].result]
|
keyword[def] identifier[check_raw_string] ( identifier[self] , identifier[string] , identifier[is_bstring] = keyword[True] ):
literal[string]
identifier[self] . identifier[log] ( literal[string] )
identifier[self] . identifier[result] = identifier[ValidatorResult] ()
keyword[if] identifier[self] . identifier[_are_safety_checks_disabled] ( literal[string] ):
keyword[return] identifier[self] . identifier[result]
keyword[if] identifier[is_bstring] :
identifier[self] . identifier[_check_utf8_encoding] ( identifier[string] )
keyword[if] keyword[not] identifier[self] . identifier[result] . identifier[passed] :
keyword[return] identifier[self] . identifier[result]
identifier[string] = identifier[gf] . identifier[safe_unicode] ( identifier[string] )
identifier[self] . identifier[_check_not_empty] ( identifier[string] )
keyword[if] keyword[not] identifier[self] . identifier[result] . identifier[passed] :
keyword[return] identifier[self] . identifier[result]
identifier[self] . identifier[_check_reserved_characters] ( identifier[string] )
keyword[return] identifier[self] . identifier[result]
|
def check_raw_string(self, string, is_bstring=True):
"""
Check whether the given string
is properly UTF-8 encoded (if ``is_bytes`` is ``True``),
it is not empty, and
it does not contain reserved characters.
:param string string: the byte string or Unicode string to be checked
:param bool is_bstring: if True, string is a byte string
:rtype: :class:`~aeneas.validator.ValidatorResult`
"""
self.log(u'Checking the given byte string')
self.result = ValidatorResult()
if self._are_safety_checks_disabled(u'check_raw_string'):
return self.result # depends on [control=['if'], data=[]]
if is_bstring:
self._check_utf8_encoding(string)
if not self.result.passed:
return self.result # depends on [control=['if'], data=[]]
string = gf.safe_unicode(string) # depends on [control=['if'], data=[]]
self._check_not_empty(string)
if not self.result.passed:
return self.result # depends on [control=['if'], data=[]]
self._check_reserved_characters(string)
return self.result
|
def from_clock_time(cls, clock_time, epoch):
""" Convert from a `.ClockTime` relative to a given epoch.
"""
clock_time = ClockTime(*clock_time)
ts = clock_time.seconds % 86400
nanoseconds = int(1000000000 * ts + clock_time.nanoseconds)
return Time.from_ticks(epoch.time().ticks + nanoseconds / 1000000000)
|
def function[from_clock_time, parameter[cls, clock_time, epoch]]:
constant[ Convert from a `.ClockTime` relative to a given epoch.
]
variable[clock_time] assign[=] call[name[ClockTime], parameter[<ast.Starred object at 0x7da1b2581db0>]]
variable[ts] assign[=] binary_operation[name[clock_time].seconds <ast.Mod object at 0x7da2590d6920> constant[86400]]
variable[nanoseconds] assign[=] call[name[int], parameter[binary_operation[binary_operation[constant[1000000000] * name[ts]] + name[clock_time].nanoseconds]]]
return[call[name[Time].from_ticks, parameter[binary_operation[call[name[epoch].time, parameter[]].ticks + binary_operation[name[nanoseconds] / constant[1000000000]]]]]]
|
keyword[def] identifier[from_clock_time] ( identifier[cls] , identifier[clock_time] , identifier[epoch] ):
literal[string]
identifier[clock_time] = identifier[ClockTime] (* identifier[clock_time] )
identifier[ts] = identifier[clock_time] . identifier[seconds] % literal[int]
identifier[nanoseconds] = identifier[int] ( literal[int] * identifier[ts] + identifier[clock_time] . identifier[nanoseconds] )
keyword[return] identifier[Time] . identifier[from_ticks] ( identifier[epoch] . identifier[time] (). identifier[ticks] + identifier[nanoseconds] / literal[int] )
|
def from_clock_time(cls, clock_time, epoch):
""" Convert from a `.ClockTime` relative to a given epoch.
"""
clock_time = ClockTime(*clock_time)
ts = clock_time.seconds % 86400
nanoseconds = int(1000000000 * ts + clock_time.nanoseconds)
return Time.from_ticks(epoch.time().ticks + nanoseconds / 1000000000)
|
def register(cls, range_mixin):
"""
Decorator for registering range set mixins for global use. This works
the same as :meth:`~spans.settypes.MetaRangeSet.add`
:param range_mixin: A :class:`~spans.types.Range` mixin class to
to register a decorated range set mixin class for
:return: A decorator to use on a range set mixin class
"""
def decorator(range_set_mixin):
cls.add(range_mixin, range_set_mixin)
return range_set_mixin
return decorator
|
def function[register, parameter[cls, range_mixin]]:
constant[
Decorator for registering range set mixins for global use. This works
the same as :meth:`~spans.settypes.MetaRangeSet.add`
:param range_mixin: A :class:`~spans.types.Range` mixin class to
to register a decorated range set mixin class for
:return: A decorator to use on a range set mixin class
]
def function[decorator, parameter[range_set_mixin]]:
call[name[cls].add, parameter[name[range_mixin], name[range_set_mixin]]]
return[name[range_set_mixin]]
return[name[decorator]]
|
keyword[def] identifier[register] ( identifier[cls] , identifier[range_mixin] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[range_set_mixin] ):
identifier[cls] . identifier[add] ( identifier[range_mixin] , identifier[range_set_mixin] )
keyword[return] identifier[range_set_mixin]
keyword[return] identifier[decorator]
|
def register(cls, range_mixin):
"""
Decorator for registering range set mixins for global use. This works
the same as :meth:`~spans.settypes.MetaRangeSet.add`
:param range_mixin: A :class:`~spans.types.Range` mixin class to
to register a decorated range set mixin class for
:return: A decorator to use on a range set mixin class
"""
def decorator(range_set_mixin):
cls.add(range_mixin, range_set_mixin)
return range_set_mixin
return decorator
|
def delete_and_upload_images(client, image_type, language, base_dir):
"""
Delete and upload images with given image_type and language.
Function will stage delete and stage upload all
found images in matching folders.
"""
print('{0} {1}'.format(image_type, language))
files_in_dir = os.listdir(os.path.join(base_dir, language))
delete_result = client.deleteall(
'images', imageType=image_type, language=language)
deleted = delete_result.get('deleted', list())
for deleted_files in deleted:
print(' delete image: {0}'.format(deleted_files['id']))
for image_file in files_in_dir[:8]:
image_file_path = os.path.join(base_dir, language, image_file)
image_response = client.upload(
'images',
imageType=image_type,
language=language,
media_body=image_file_path)
print(" upload image {0} new id {1}".format(image_file, image_response['image']['id']))
|
def function[delete_and_upload_images, parameter[client, image_type, language, base_dir]]:
constant[
Delete and upload images with given image_type and language.
Function will stage delete and stage upload all
found images in matching folders.
]
call[name[print], parameter[call[constant[{0} {1}].format, parameter[name[image_type], name[language]]]]]
variable[files_in_dir] assign[=] call[name[os].listdir, parameter[call[name[os].path.join, parameter[name[base_dir], name[language]]]]]
variable[delete_result] assign[=] call[name[client].deleteall, parameter[constant[images]]]
variable[deleted] assign[=] call[name[delete_result].get, parameter[constant[deleted], call[name[list], parameter[]]]]
for taget[name[deleted_files]] in starred[name[deleted]] begin[:]
call[name[print], parameter[call[constant[ delete image: {0}].format, parameter[call[name[deleted_files]][constant[id]]]]]]
for taget[name[image_file]] in starred[call[name[files_in_dir]][<ast.Slice object at 0x7da18f58de40>]] begin[:]
variable[image_file_path] assign[=] call[name[os].path.join, parameter[name[base_dir], name[language], name[image_file]]]
variable[image_response] assign[=] call[name[client].upload, parameter[constant[images]]]
call[name[print], parameter[call[constant[ upload image {0} new id {1}].format, parameter[name[image_file], call[call[name[image_response]][constant[image]]][constant[id]]]]]]
|
keyword[def] identifier[delete_and_upload_images] ( identifier[client] , identifier[image_type] , identifier[language] , identifier[base_dir] ):
literal[string]
identifier[print] ( literal[string] . identifier[format] ( identifier[image_type] , identifier[language] ))
identifier[files_in_dir] = identifier[os] . identifier[listdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[base_dir] , identifier[language] ))
identifier[delete_result] = identifier[client] . identifier[deleteall] (
literal[string] , identifier[imageType] = identifier[image_type] , identifier[language] = identifier[language] )
identifier[deleted] = identifier[delete_result] . identifier[get] ( literal[string] , identifier[list] ())
keyword[for] identifier[deleted_files] keyword[in] identifier[deleted] :
identifier[print] ( literal[string] . identifier[format] ( identifier[deleted_files] [ literal[string] ]))
keyword[for] identifier[image_file] keyword[in] identifier[files_in_dir] [: literal[int] ]:
identifier[image_file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_dir] , identifier[language] , identifier[image_file] )
identifier[image_response] = identifier[client] . identifier[upload] (
literal[string] ,
identifier[imageType] = identifier[image_type] ,
identifier[language] = identifier[language] ,
identifier[media_body] = identifier[image_file_path] )
identifier[print] ( literal[string] . identifier[format] ( identifier[image_file] , identifier[image_response] [ literal[string] ][ literal[string] ]))
|
def delete_and_upload_images(client, image_type, language, base_dir):
"""
Delete and upload images with given image_type and language.
Function will stage delete and stage upload all
found images in matching folders.
"""
print('{0} {1}'.format(image_type, language))
files_in_dir = os.listdir(os.path.join(base_dir, language))
delete_result = client.deleteall('images', imageType=image_type, language=language)
deleted = delete_result.get('deleted', list())
for deleted_files in deleted:
print(' delete image: {0}'.format(deleted_files['id'])) # depends on [control=['for'], data=['deleted_files']]
for image_file in files_in_dir[:8]:
image_file_path = os.path.join(base_dir, language, image_file)
image_response = client.upload('images', imageType=image_type, language=language, media_body=image_file_path)
print(' upload image {0} new id {1}'.format(image_file, image_response['image']['id'])) # depends on [control=['for'], data=['image_file']]
|
def predict(self, X1new, X2new):
"""
Return the predictive mean and variance at a series of new points X1new, X2new
Only returns the diagonal of the predictive variance, for now.
:param X1new: The points at which to make a prediction
:type X1new: np.ndarray, Nnew x self.input_dim1
:param X2new: The points at which to make a prediction
:type X2new: np.ndarray, Nnew x self.input_dim2
"""
k1xf = self.kern1.K(X1new, self.X1)
k2xf = self.kern2.K(X2new, self.X2)
A = k1xf.dot(self.U1)
B = k2xf.dot(self.U2)
mu = A.dot(self.Ytilde.reshape(self.num_data1, self.num_data2, order='F')).dot(B.T).flatten(order='F')
k1xx = self.kern1.Kdiag(X1new)
k2xx = self.kern2.Kdiag(X2new)
BA = np.kron(B, A)
var = np.kron(k2xx, k1xx) - np.sum(BA**2*self.Wi, 1) + self.likelihood.variance
return mu[:, None], var[:, None]
|
def function[predict, parameter[self, X1new, X2new]]:
constant[
Return the predictive mean and variance at a series of new points X1new, X2new
Only returns the diagonal of the predictive variance, for now.
:param X1new: The points at which to make a prediction
:type X1new: np.ndarray, Nnew x self.input_dim1
:param X2new: The points at which to make a prediction
:type X2new: np.ndarray, Nnew x self.input_dim2
]
variable[k1xf] assign[=] call[name[self].kern1.K, parameter[name[X1new], name[self].X1]]
variable[k2xf] assign[=] call[name[self].kern2.K, parameter[name[X2new], name[self].X2]]
variable[A] assign[=] call[name[k1xf].dot, parameter[name[self].U1]]
variable[B] assign[=] call[name[k2xf].dot, parameter[name[self].U2]]
variable[mu] assign[=] call[call[call[name[A].dot, parameter[call[name[self].Ytilde.reshape, parameter[name[self].num_data1, name[self].num_data2]]]].dot, parameter[name[B].T]].flatten, parameter[]]
variable[k1xx] assign[=] call[name[self].kern1.Kdiag, parameter[name[X1new]]]
variable[k2xx] assign[=] call[name[self].kern2.Kdiag, parameter[name[X2new]]]
variable[BA] assign[=] call[name[np].kron, parameter[name[B], name[A]]]
variable[var] assign[=] binary_operation[binary_operation[call[name[np].kron, parameter[name[k2xx], name[k1xx]]] - call[name[np].sum, parameter[binary_operation[binary_operation[name[BA] ** constant[2]] * name[self].Wi], constant[1]]]] + name[self].likelihood.variance]
return[tuple[[<ast.Subscript object at 0x7da1b1cac550>, <ast.Subscript object at 0x7da1b1cacaf0>]]]
|
keyword[def] identifier[predict] ( identifier[self] , identifier[X1new] , identifier[X2new] ):
literal[string]
identifier[k1xf] = identifier[self] . identifier[kern1] . identifier[K] ( identifier[X1new] , identifier[self] . identifier[X1] )
identifier[k2xf] = identifier[self] . identifier[kern2] . identifier[K] ( identifier[X2new] , identifier[self] . identifier[X2] )
identifier[A] = identifier[k1xf] . identifier[dot] ( identifier[self] . identifier[U1] )
identifier[B] = identifier[k2xf] . identifier[dot] ( identifier[self] . identifier[U2] )
identifier[mu] = identifier[A] . identifier[dot] ( identifier[self] . identifier[Ytilde] . identifier[reshape] ( identifier[self] . identifier[num_data1] , identifier[self] . identifier[num_data2] , identifier[order] = literal[string] )). identifier[dot] ( identifier[B] . identifier[T] ). identifier[flatten] ( identifier[order] = literal[string] )
identifier[k1xx] = identifier[self] . identifier[kern1] . identifier[Kdiag] ( identifier[X1new] )
identifier[k2xx] = identifier[self] . identifier[kern2] . identifier[Kdiag] ( identifier[X2new] )
identifier[BA] = identifier[np] . identifier[kron] ( identifier[B] , identifier[A] )
identifier[var] = identifier[np] . identifier[kron] ( identifier[k2xx] , identifier[k1xx] )- identifier[np] . identifier[sum] ( identifier[BA] ** literal[int] * identifier[self] . identifier[Wi] , literal[int] )+ identifier[self] . identifier[likelihood] . identifier[variance]
keyword[return] identifier[mu] [:, keyword[None] ], identifier[var] [:, keyword[None] ]
|
def predict(self, X1new, X2new):
"""
Return the predictive mean and variance at a series of new points X1new, X2new
Only returns the diagonal of the predictive variance, for now.
:param X1new: The points at which to make a prediction
:type X1new: np.ndarray, Nnew x self.input_dim1
:param X2new: The points at which to make a prediction
:type X2new: np.ndarray, Nnew x self.input_dim2
"""
k1xf = self.kern1.K(X1new, self.X1)
k2xf = self.kern2.K(X2new, self.X2)
A = k1xf.dot(self.U1)
B = k2xf.dot(self.U2)
mu = A.dot(self.Ytilde.reshape(self.num_data1, self.num_data2, order='F')).dot(B.T).flatten(order='F')
k1xx = self.kern1.Kdiag(X1new)
k2xx = self.kern2.Kdiag(X2new)
BA = np.kron(B, A)
var = np.kron(k2xx, k1xx) - np.sum(BA ** 2 * self.Wi, 1) + self.likelihood.variance
return (mu[:, None], var[:, None])
|
def load_stack(stack):
"""
Loads the saved state of a CallStack and returns a whole instance given an instance with incomplete state.
:param caliendo.hooks.CallStack stack: The stack to load
:returns: A CallStack previously built in the context of a patch call.
:rtype: caliendo.hooks.CallStack
"""
global CACHE_
load_cache(True)
key = "{0}.{1}".format(stack.module, stack.caller)
if key in CACHE_['stacks']:
return pickle.loads(CACHE_['stacks'][key])
|
def function[load_stack, parameter[stack]]:
constant[
Loads the saved state of a CallStack and returns a whole instance given an instance with incomplete state.
:param caliendo.hooks.CallStack stack: The stack to load
:returns: A CallStack previously built in the context of a patch call.
:rtype: caliendo.hooks.CallStack
]
<ast.Global object at 0x7da1b09badd0>
call[name[load_cache], parameter[constant[True]]]
variable[key] assign[=] call[constant[{0}.{1}].format, parameter[name[stack].module, name[stack].caller]]
if compare[name[key] in call[name[CACHE_]][constant[stacks]]] begin[:]
return[call[name[pickle].loads, parameter[call[call[name[CACHE_]][constant[stacks]]][name[key]]]]]
|
keyword[def] identifier[load_stack] ( identifier[stack] ):
literal[string]
keyword[global] identifier[CACHE_]
identifier[load_cache] ( keyword[True] )
identifier[key] = literal[string] . identifier[format] ( identifier[stack] . identifier[module] , identifier[stack] . identifier[caller] )
keyword[if] identifier[key] keyword[in] identifier[CACHE_] [ literal[string] ]:
keyword[return] identifier[pickle] . identifier[loads] ( identifier[CACHE_] [ literal[string] ][ identifier[key] ])
|
def load_stack(stack):
"""
Loads the saved state of a CallStack and returns a whole instance given an instance with incomplete state.
:param caliendo.hooks.CallStack stack: The stack to load
:returns: A CallStack previously built in the context of a patch call.
:rtype: caliendo.hooks.CallStack
"""
global CACHE_
load_cache(True)
key = '{0}.{1}'.format(stack.module, stack.caller)
if key in CACHE_['stacks']:
return pickle.loads(CACHE_['stacks'][key]) # depends on [control=['if'], data=['key']]
|
def __add_relation(self, relation):
"""
<parRelation id="maz3377.1000" type="sequential">
<nucleus id="maz3377.1"/>
<nucleus id="maz3377.2"/>
</parRelation>
"""
rel_id = self.ns + ':' + relation.attrib['id']
rel_name = relation.attrib['type']
rel_type = relation.tag
self.add_node(rel_id, layers={self.ns, self.ns+':relation'},
attr_dict={self.ns+':rel_name': rel_name,
self.ns+':rel_type': rel_type})
rel_attrs = {self.ns+':rel_name': rel_name,
self.ns+':rel_type': rel_type,
'label': self.ns+':'+rel_name}
if rel_type == 'parRelation': # relation between two or more nucleii
for nucleus in relation:
nucleus_id = self.ns + ':' + nucleus.attrib['id']
self.add_edge(rel_id, nucleus_id, layers={self.ns},
attr_dict=rel_attrs,
edge_type=EdgeTypes.spanning_relation)
elif rel_type == 'hypRelation': # between nucleus and satellite
hyp_error = ("<hypRelation> can only contain one nucleus and one"
"satellite: {}".format(etree.tostring(relation)))
rel_elems = {elem.tag: elem.attrib['id'] for elem in relation}
assert len(relation) == 2, hyp_error
assert set(rel_elems.keys()) == {'nucleus', 'satellite'}, hyp_error
# add dominance from relation root node to nucleus
nucleus_id = self.ns + ':' + rel_elems['nucleus']
self.add_edge(rel_id, nucleus_id, layers={self.ns},
attr_dict=rel_attrs,
edge_type=EdgeTypes.dominance_relation)
# add dominance from nucleus to satellite
satellite_id = self.ns + ':' + rel_elems['satellite']
self.add_edge(nucleus_id, satellite_id,
layers={self.ns}, attr_dict=rel_attrs,
edge_type=EdgeTypes.dominance_relation)
else: # <relation>, <span>
raise NotImplementedError
|
def function[__add_relation, parameter[self, relation]]:
constant[
<parRelation id="maz3377.1000" type="sequential">
<nucleus id="maz3377.1"/>
<nucleus id="maz3377.2"/>
</parRelation>
]
variable[rel_id] assign[=] binary_operation[binary_operation[name[self].ns + constant[:]] + call[name[relation].attrib][constant[id]]]
variable[rel_name] assign[=] call[name[relation].attrib][constant[type]]
variable[rel_type] assign[=] name[relation].tag
call[name[self].add_node, parameter[name[rel_id]]]
variable[rel_attrs] assign[=] dictionary[[<ast.BinOp object at 0x7da1b264b520>, <ast.BinOp object at 0x7da1b264b340>, <ast.Constant object at 0x7da1b264aec0>], [<ast.Name object at 0x7da1b264a2f0>, <ast.Name object at 0x7da1b264b040>, <ast.BinOp object at 0x7da1b264b1c0>]]
if compare[name[rel_type] equal[==] constant[parRelation]] begin[:]
for taget[name[nucleus]] in starred[name[relation]] begin[:]
variable[nucleus_id] assign[=] binary_operation[binary_operation[name[self].ns + constant[:]] + call[name[nucleus].attrib][constant[id]]]
call[name[self].add_edge, parameter[name[rel_id], name[nucleus_id]]]
|
keyword[def] identifier[__add_relation] ( identifier[self] , identifier[relation] ):
literal[string]
identifier[rel_id] = identifier[self] . identifier[ns] + literal[string] + identifier[relation] . identifier[attrib] [ literal[string] ]
identifier[rel_name] = identifier[relation] . identifier[attrib] [ literal[string] ]
identifier[rel_type] = identifier[relation] . identifier[tag]
identifier[self] . identifier[add_node] ( identifier[rel_id] , identifier[layers] ={ identifier[self] . identifier[ns] , identifier[self] . identifier[ns] + literal[string] },
identifier[attr_dict] ={ identifier[self] . identifier[ns] + literal[string] : identifier[rel_name] ,
identifier[self] . identifier[ns] + literal[string] : identifier[rel_type] })
identifier[rel_attrs] ={ identifier[self] . identifier[ns] + literal[string] : identifier[rel_name] ,
identifier[self] . identifier[ns] + literal[string] : identifier[rel_type] ,
literal[string] : identifier[self] . identifier[ns] + literal[string] + identifier[rel_name] }
keyword[if] identifier[rel_type] == literal[string] :
keyword[for] identifier[nucleus] keyword[in] identifier[relation] :
identifier[nucleus_id] = identifier[self] . identifier[ns] + literal[string] + identifier[nucleus] . identifier[attrib] [ literal[string] ]
identifier[self] . identifier[add_edge] ( identifier[rel_id] , identifier[nucleus_id] , identifier[layers] ={ identifier[self] . identifier[ns] },
identifier[attr_dict] = identifier[rel_attrs] ,
identifier[edge_type] = identifier[EdgeTypes] . identifier[spanning_relation] )
keyword[elif] identifier[rel_type] == literal[string] :
identifier[hyp_error] =( literal[string]
literal[string] . identifier[format] ( identifier[etree] . identifier[tostring] ( identifier[relation] )))
identifier[rel_elems] ={ identifier[elem] . identifier[tag] : identifier[elem] . identifier[attrib] [ literal[string] ] keyword[for] identifier[elem] keyword[in] identifier[relation] }
keyword[assert] identifier[len] ( identifier[relation] )== literal[int] , identifier[hyp_error]
keyword[assert] identifier[set] ( identifier[rel_elems] . identifier[keys] ())=={ literal[string] , literal[string] }, identifier[hyp_error]
identifier[nucleus_id] = identifier[self] . identifier[ns] + literal[string] + identifier[rel_elems] [ literal[string] ]
identifier[self] . identifier[add_edge] ( identifier[rel_id] , identifier[nucleus_id] , identifier[layers] ={ identifier[self] . identifier[ns] },
identifier[attr_dict] = identifier[rel_attrs] ,
identifier[edge_type] = identifier[EdgeTypes] . identifier[dominance_relation] )
identifier[satellite_id] = identifier[self] . identifier[ns] + literal[string] + identifier[rel_elems] [ literal[string] ]
identifier[self] . identifier[add_edge] ( identifier[nucleus_id] , identifier[satellite_id] ,
identifier[layers] ={ identifier[self] . identifier[ns] }, identifier[attr_dict] = identifier[rel_attrs] ,
identifier[edge_type] = identifier[EdgeTypes] . identifier[dominance_relation] )
keyword[else] :
keyword[raise] identifier[NotImplementedError]
|
def __add_relation(self, relation):
"""
<parRelation id="maz3377.1000" type="sequential">
<nucleus id="maz3377.1"/>
<nucleus id="maz3377.2"/>
</parRelation>
"""
rel_id = self.ns + ':' + relation.attrib['id']
rel_name = relation.attrib['type']
rel_type = relation.tag
self.add_node(rel_id, layers={self.ns, self.ns + ':relation'}, attr_dict={self.ns + ':rel_name': rel_name, self.ns + ':rel_type': rel_type})
rel_attrs = {self.ns + ':rel_name': rel_name, self.ns + ':rel_type': rel_type, 'label': self.ns + ':' + rel_name}
if rel_type == 'parRelation': # relation between two or more nucleii
for nucleus in relation:
nucleus_id = self.ns + ':' + nucleus.attrib['id']
self.add_edge(rel_id, nucleus_id, layers={self.ns}, attr_dict=rel_attrs, edge_type=EdgeTypes.spanning_relation) # depends on [control=['for'], data=['nucleus']] # depends on [control=['if'], data=[]]
elif rel_type == 'hypRelation': # between nucleus and satellite
hyp_error = '<hypRelation> can only contain one nucleus and onesatellite: {}'.format(etree.tostring(relation))
rel_elems = {elem.tag: elem.attrib['id'] for elem in relation}
assert len(relation) == 2, hyp_error
assert set(rel_elems.keys()) == {'nucleus', 'satellite'}, hyp_error
# add dominance from relation root node to nucleus
nucleus_id = self.ns + ':' + rel_elems['nucleus']
self.add_edge(rel_id, nucleus_id, layers={self.ns}, attr_dict=rel_attrs, edge_type=EdgeTypes.dominance_relation)
# add dominance from nucleus to satellite
satellite_id = self.ns + ':' + rel_elems['satellite']
self.add_edge(nucleus_id, satellite_id, layers={self.ns}, attr_dict=rel_attrs, edge_type=EdgeTypes.dominance_relation) # depends on [control=['if'], data=[]]
else: # <relation>, <span>
raise NotImplementedError
|
def last_post(self):
""" Returns the latest post associated with the node or one of its descendants. """
posts = [n.last_post for n in self.children if n.last_post is not None]
children_last_post = max(posts, key=lambda p: p.created) if posts else None
if children_last_post and self.obj.last_post_id:
return max(self.obj.last_post, children_last_post, key=lambda p: p.created)
return children_last_post or self.obj.last_post
|
def function[last_post, parameter[self]]:
constant[ Returns the latest post associated with the node or one of its descendants. ]
variable[posts] assign[=] <ast.ListComp object at 0x7da1b2346980>
variable[children_last_post] assign[=] <ast.IfExp object at 0x7da1b23455d0>
if <ast.BoolOp object at 0x7da1b2347b50> begin[:]
return[call[name[max], parameter[name[self].obj.last_post, name[children_last_post]]]]
return[<ast.BoolOp object at 0x7da1b23460b0>]
|
keyword[def] identifier[last_post] ( identifier[self] ):
literal[string]
identifier[posts] =[ identifier[n] . identifier[last_post] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[children] keyword[if] identifier[n] . identifier[last_post] keyword[is] keyword[not] keyword[None] ]
identifier[children_last_post] = identifier[max] ( identifier[posts] , identifier[key] = keyword[lambda] identifier[p] : identifier[p] . identifier[created] ) keyword[if] identifier[posts] keyword[else] keyword[None]
keyword[if] identifier[children_last_post] keyword[and] identifier[self] . identifier[obj] . identifier[last_post_id] :
keyword[return] identifier[max] ( identifier[self] . identifier[obj] . identifier[last_post] , identifier[children_last_post] , identifier[key] = keyword[lambda] identifier[p] : identifier[p] . identifier[created] )
keyword[return] identifier[children_last_post] keyword[or] identifier[self] . identifier[obj] . identifier[last_post]
|
def last_post(self):
""" Returns the latest post associated with the node or one of its descendants. """
posts = [n.last_post for n in self.children if n.last_post is not None]
children_last_post = max(posts, key=lambda p: p.created) if posts else None
if children_last_post and self.obj.last_post_id:
return max(self.obj.last_post, children_last_post, key=lambda p: p.created) # depends on [control=['if'], data=[]]
return children_last_post or self.obj.last_post
|
def _get_pkg_ds_avail():
'''
Get the package information of the available packages, maintained by dselect.
Note, this will be not very useful, if dselect isn't installed.
:return:
'''
avail = "/var/lib/dpkg/available"
if not salt.utils.path.which('dselect') or not os.path.exists(avail):
return dict()
# Do not update with dselect, just read what is.
ret = dict()
pkg_mrk = "Package:"
pkg_name = "package"
with salt.utils.files.fopen(avail) as fp_:
for pkg_info in salt.utils.stringutils.to_unicode(fp_.read()).split(pkg_mrk):
nfo = dict()
for line in (pkg_mrk + pkg_info).split(os.linesep):
line = line.split(": ", 1)
if len(line) != 2:
continue
key, value = line
if value.strip():
nfo[key.lower()] = value
if nfo.get(pkg_name):
ret[nfo[pkg_name]] = nfo
return ret
|
def function[_get_pkg_ds_avail, parameter[]]:
constant[
Get the package information of the available packages, maintained by dselect.
Note, this will be not very useful, if dselect isn't installed.
:return:
]
variable[avail] assign[=] constant[/var/lib/dpkg/available]
if <ast.BoolOp object at 0x7da18eb55870> begin[:]
return[call[name[dict], parameter[]]]
variable[ret] assign[=] call[name[dict], parameter[]]
variable[pkg_mrk] assign[=] constant[Package:]
variable[pkg_name] assign[=] constant[package]
with call[name[salt].utils.files.fopen, parameter[name[avail]]] begin[:]
for taget[name[pkg_info]] in starred[call[call[name[salt].utils.stringutils.to_unicode, parameter[call[name[fp_].read, parameter[]]]].split, parameter[name[pkg_mrk]]]] begin[:]
variable[nfo] assign[=] call[name[dict], parameter[]]
for taget[name[line]] in starred[call[binary_operation[name[pkg_mrk] + name[pkg_info]].split, parameter[name[os].linesep]]] begin[:]
variable[line] assign[=] call[name[line].split, parameter[constant[: ], constant[1]]]
if compare[call[name[len], parameter[name[line]]] not_equal[!=] constant[2]] begin[:]
continue
<ast.Tuple object at 0x7da1b1c461d0> assign[=] name[line]
if call[name[value].strip, parameter[]] begin[:]
call[name[nfo]][call[name[key].lower, parameter[]]] assign[=] name[value]
if call[name[nfo].get, parameter[name[pkg_name]]] begin[:]
call[name[ret]][call[name[nfo]][name[pkg_name]]] assign[=] name[nfo]
return[name[ret]]
|
keyword[def] identifier[_get_pkg_ds_avail] ():
literal[string]
identifier[avail] = literal[string]
keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ) keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[avail] ):
keyword[return] identifier[dict] ()
identifier[ret] = identifier[dict] ()
identifier[pkg_mrk] = literal[string]
identifier[pkg_name] = literal[string]
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[avail] ) keyword[as] identifier[fp_] :
keyword[for] identifier[pkg_info] keyword[in] identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[fp_] . identifier[read] ()). identifier[split] ( identifier[pkg_mrk] ):
identifier[nfo] = identifier[dict] ()
keyword[for] identifier[line] keyword[in] ( identifier[pkg_mrk] + identifier[pkg_info] ). identifier[split] ( identifier[os] . identifier[linesep] ):
identifier[line] = identifier[line] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[len] ( identifier[line] )!= literal[int] :
keyword[continue]
identifier[key] , identifier[value] = identifier[line]
keyword[if] identifier[value] . identifier[strip] ():
identifier[nfo] [ identifier[key] . identifier[lower] ()]= identifier[value]
keyword[if] identifier[nfo] . identifier[get] ( identifier[pkg_name] ):
identifier[ret] [ identifier[nfo] [ identifier[pkg_name] ]]= identifier[nfo]
keyword[return] identifier[ret]
|
def _get_pkg_ds_avail():
"""
Get the package information of the available packages, maintained by dselect.
Note, this will be not very useful, if dselect isn't installed.
:return:
"""
avail = '/var/lib/dpkg/available'
if not salt.utils.path.which('dselect') or not os.path.exists(avail):
return dict() # depends on [control=['if'], data=[]]
# Do not update with dselect, just read what is.
ret = dict()
pkg_mrk = 'Package:'
pkg_name = 'package'
with salt.utils.files.fopen(avail) as fp_:
for pkg_info in salt.utils.stringutils.to_unicode(fp_.read()).split(pkg_mrk):
nfo = dict()
for line in (pkg_mrk + pkg_info).split(os.linesep):
line = line.split(': ', 1)
if len(line) != 2:
continue # depends on [control=['if'], data=[]]
(key, value) = line
if value.strip():
nfo[key.lower()] = value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
if nfo.get(pkg_name):
ret[nfo[pkg_name]] = nfo # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pkg_info']] # depends on [control=['with'], data=['fp_']]
return ret
|
def _send_and_wait(self, **kwargs):
"""
Send a frame to either the local ZigBee or a remote device and wait
for a pre-defined amount of time for its response.
"""
frame_id = self.next_frame_id
kwargs.update(dict(frame_id=frame_id))
self._send(**kwargs)
timeout = datetime.now() + const.RX_TIMEOUT
while datetime.now() < timeout:
try:
frame = self._rx_frames.pop(frame_id)
raise_if_error(frame)
return frame
except KeyError:
sleep(0.1)
continue
_LOGGER.exception(
"Did not receive response within configured timeout period.")
raise exceptions.ZigBeeResponseTimeout()
|
def function[_send_and_wait, parameter[self]]:
constant[
Send a frame to either the local ZigBee or a remote device and wait
for a pre-defined amount of time for its response.
]
variable[frame_id] assign[=] name[self].next_frame_id
call[name[kwargs].update, parameter[call[name[dict], parameter[]]]]
call[name[self]._send, parameter[]]
variable[timeout] assign[=] binary_operation[call[name[datetime].now, parameter[]] + name[const].RX_TIMEOUT]
while compare[call[name[datetime].now, parameter[]] less[<] name[timeout]] begin[:]
<ast.Try object at 0x7da1b23d21d0>
call[name[_LOGGER].exception, parameter[constant[Did not receive response within configured timeout period.]]]
<ast.Raise object at 0x7da1b23d1510>
|
keyword[def] identifier[_send_and_wait] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[frame_id] = identifier[self] . identifier[next_frame_id]
identifier[kwargs] . identifier[update] ( identifier[dict] ( identifier[frame_id] = identifier[frame_id] ))
identifier[self] . identifier[_send] (** identifier[kwargs] )
identifier[timeout] = identifier[datetime] . identifier[now] ()+ identifier[const] . identifier[RX_TIMEOUT]
keyword[while] identifier[datetime] . identifier[now] ()< identifier[timeout] :
keyword[try] :
identifier[frame] = identifier[self] . identifier[_rx_frames] . identifier[pop] ( identifier[frame_id] )
identifier[raise_if_error] ( identifier[frame] )
keyword[return] identifier[frame]
keyword[except] identifier[KeyError] :
identifier[sleep] ( literal[int] )
keyword[continue]
identifier[_LOGGER] . identifier[exception] (
literal[string] )
keyword[raise] identifier[exceptions] . identifier[ZigBeeResponseTimeout] ()
|
def _send_and_wait(self, **kwargs):
"""
Send a frame to either the local ZigBee or a remote device and wait
for a pre-defined amount of time for its response.
"""
frame_id = self.next_frame_id
kwargs.update(dict(frame_id=frame_id))
self._send(**kwargs)
timeout = datetime.now() + const.RX_TIMEOUT
while datetime.now() < timeout:
try:
frame = self._rx_frames.pop(frame_id)
raise_if_error(frame)
return frame # depends on [control=['try'], data=[]]
except KeyError:
sleep(0.1)
continue # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
_LOGGER.exception('Did not receive response within configured timeout period.')
raise exceptions.ZigBeeResponseTimeout()
|
def update(self, render, force = False):
"""
Update view
@param render: IRender
@param force: force update
"""
if not force:
return;
drawArea = QtGui.QImage(self._width, self._height, render.getImageFormat())
drawArea.fill(self._backgroundColor)
with QtGui.QPainter(drawArea) as qp:
qp.setFont(self._font)
qp.setPen(self._fontColor)
qp.drawText(drawArea.rect(), QtCore.Qt.AlignCenter, self._label)
render.drawImage(drawArea)
|
def function[update, parameter[self, render, force]]:
constant[
Update view
@param render: IRender
@param force: force update
]
if <ast.UnaryOp object at 0x7da18ede4b50> begin[:]
return[None]
variable[drawArea] assign[=] call[name[QtGui].QImage, parameter[name[self]._width, name[self]._height, call[name[render].getImageFormat, parameter[]]]]
call[name[drawArea].fill, parameter[name[self]._backgroundColor]]
with call[name[QtGui].QPainter, parameter[name[drawArea]]] begin[:]
call[name[qp].setFont, parameter[name[self]._font]]
call[name[qp].setPen, parameter[name[self]._fontColor]]
call[name[qp].drawText, parameter[call[name[drawArea].rect, parameter[]], name[QtCore].Qt.AlignCenter, name[self]._label]]
call[name[render].drawImage, parameter[name[drawArea]]]
|
keyword[def] identifier[update] ( identifier[self] , identifier[render] , identifier[force] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[force] :
keyword[return] ;
identifier[drawArea] = identifier[QtGui] . identifier[QImage] ( identifier[self] . identifier[_width] , identifier[self] . identifier[_height] , identifier[render] . identifier[getImageFormat] ())
identifier[drawArea] . identifier[fill] ( identifier[self] . identifier[_backgroundColor] )
keyword[with] identifier[QtGui] . identifier[QPainter] ( identifier[drawArea] ) keyword[as] identifier[qp] :
identifier[qp] . identifier[setFont] ( identifier[self] . identifier[_font] )
identifier[qp] . identifier[setPen] ( identifier[self] . identifier[_fontColor] )
identifier[qp] . identifier[drawText] ( identifier[drawArea] . identifier[rect] (), identifier[QtCore] . identifier[Qt] . identifier[AlignCenter] , identifier[self] . identifier[_label] )
identifier[render] . identifier[drawImage] ( identifier[drawArea] )
|
def update(self, render, force=False):
"""
Update view
@param render: IRender
@param force: force update
"""
if not force:
return # depends on [control=['if'], data=[]]
drawArea = QtGui.QImage(self._width, self._height, render.getImageFormat())
drawArea.fill(self._backgroundColor)
with QtGui.QPainter(drawArea) as qp:
qp.setFont(self._font)
qp.setPen(self._fontColor)
qp.drawText(drawArea.rect(), QtCore.Qt.AlignCenter, self._label) # depends on [control=['with'], data=['qp']]
render.drawImage(drawArea)
|
def stop(args):
"""
%prog stop
Stop EC2 instance.
"""
p = OptionParser(stop.__doc__)
p.add_option("--profile", default="mvrad-datasci-role", help="Profile name")
opts, args = p.parse_args(args)
if len(args) != 0:
sys.exit(not p.print_help())
role(["htang"])
session = boto3.Session(profile_name=opts.profile)
client = session.client('ec2')
s = InstanceSkeleton()
# Make sure the instance id is NOT empty
instance_id = s.instance_id
if instance_id == "":
logging.error("Cannot find instance_id {}".format(instance_id))
sys.exit(1)
block_device_mappings = []
for volume in s.volumes:
block_device_mappings.append(
{
"DeviceName": volume["Device"],
"NoDevice": ""
}
)
new_image_name = "htang-dev-{}-{}".format(timestamp(), int(time.time()))
response = client.create_image(
InstanceId=instance_id,
Name=new_image_name,
BlockDeviceMappings=block_device_mappings
)
print(response, file=sys.stderr)
new_image_id = response["ImageId"]
image_status = ""
while image_status != "available":
logging.debug("Waiting for image to be ready")
time.sleep(10)
response = client.describe_images(ImageIds=[new_image_id])
image_status = response["Images"][0]["State"]
# Delete old image, snapshot and shut down instance
old_image_id = s.image_id
response = client.describe_images(ImageIds=[old_image_id])
old_snapshot_id = response["Images"][0]["BlockDeviceMappings"][0]["Ebs"]["SnapshotId"]
response = client.deregister_image(ImageId=old_image_id)
print(response, file=sys.stderr)
response = client.delete_snapshot(SnapshotId=old_snapshot_id)
print(response, file=sys.stderr)
response = client.terminate_instances(InstanceIds=[instance_id])
print(response, file=sys.stderr)
# Save new image id
s.save_image_id(new_image_id)
s.save_instance_id("", "")
|
def function[stop, parameter[args]]:
constant[
%prog stop
Stop EC2 instance.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[stop].__doc__]]
call[name[p].add_option, parameter[constant[--profile]]]
<ast.Tuple object at 0x7da18f8113f0> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[0]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da18f8133d0>]]
call[name[role], parameter[list[[<ast.Constant object at 0x7da20c6e6050>]]]]
variable[session] assign[=] call[name[boto3].Session, parameter[]]
variable[client] assign[=] call[name[session].client, parameter[constant[ec2]]]
variable[s] assign[=] call[name[InstanceSkeleton], parameter[]]
variable[instance_id] assign[=] name[s].instance_id
if compare[name[instance_id] equal[==] constant[]] begin[:]
call[name[logging].error, parameter[call[constant[Cannot find instance_id {}].format, parameter[name[instance_id]]]]]
call[name[sys].exit, parameter[constant[1]]]
variable[block_device_mappings] assign[=] list[[]]
for taget[name[volume]] in starred[name[s].volumes] begin[:]
call[name[block_device_mappings].append, parameter[dictionary[[<ast.Constant object at 0x7da20c6e61a0>, <ast.Constant object at 0x7da20c6e6b90>], [<ast.Subscript object at 0x7da20c6e5780>, <ast.Constant object at 0x7da20c6e5840>]]]]
variable[new_image_name] assign[=] call[constant[htang-dev-{}-{}].format, parameter[call[name[timestamp], parameter[]], call[name[int], parameter[call[name[time].time, parameter[]]]]]]
variable[response] assign[=] call[name[client].create_image, parameter[]]
call[name[print], parameter[name[response]]]
variable[new_image_id] assign[=] call[name[response]][constant[ImageId]]
variable[image_status] assign[=] constant[]
while compare[name[image_status] not_equal[!=] constant[available]] begin[:]
call[name[logging].debug, parameter[constant[Waiting for image to be ready]]]
call[name[time].sleep, parameter[constant[10]]]
variable[response] assign[=] call[name[client].describe_images, parameter[]]
variable[image_status] assign[=] call[call[call[name[response]][constant[Images]]][constant[0]]][constant[State]]
variable[old_image_id] assign[=] name[s].image_id
variable[response] assign[=] call[name[client].describe_images, parameter[]]
variable[old_snapshot_id] assign[=] call[call[call[call[call[call[name[response]][constant[Images]]][constant[0]]][constant[BlockDeviceMappings]]][constant[0]]][constant[Ebs]]][constant[SnapshotId]]
variable[response] assign[=] call[name[client].deregister_image, parameter[]]
call[name[print], parameter[name[response]]]
variable[response] assign[=] call[name[client].delete_snapshot, parameter[]]
call[name[print], parameter[name[response]]]
variable[response] assign[=] call[name[client].terminate_instances, parameter[]]
call[name[print], parameter[name[response]]]
call[name[s].save_image_id, parameter[name[new_image_id]]]
call[name[s].save_instance_id, parameter[constant[], constant[]]]
|
keyword[def] identifier[stop] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[stop] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[role] ([ literal[string] ])
identifier[session] = identifier[boto3] . identifier[Session] ( identifier[profile_name] = identifier[opts] . identifier[profile] )
identifier[client] = identifier[session] . identifier[client] ( literal[string] )
identifier[s] = identifier[InstanceSkeleton] ()
identifier[instance_id] = identifier[s] . identifier[instance_id]
keyword[if] identifier[instance_id] == literal[string] :
identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[instance_id] ))
identifier[sys] . identifier[exit] ( literal[int] )
identifier[block_device_mappings] =[]
keyword[for] identifier[volume] keyword[in] identifier[s] . identifier[volumes] :
identifier[block_device_mappings] . identifier[append] (
{
literal[string] : identifier[volume] [ literal[string] ],
literal[string] : literal[string]
}
)
identifier[new_image_name] = literal[string] . identifier[format] ( identifier[timestamp] (), identifier[int] ( identifier[time] . identifier[time] ()))
identifier[response] = identifier[client] . identifier[create_image] (
identifier[InstanceId] = identifier[instance_id] ,
identifier[Name] = identifier[new_image_name] ,
identifier[BlockDeviceMappings] = identifier[block_device_mappings]
)
identifier[print] ( identifier[response] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[new_image_id] = identifier[response] [ literal[string] ]
identifier[image_status] = literal[string]
keyword[while] identifier[image_status] != literal[string] :
identifier[logging] . identifier[debug] ( literal[string] )
identifier[time] . identifier[sleep] ( literal[int] )
identifier[response] = identifier[client] . identifier[describe_images] ( identifier[ImageIds] =[ identifier[new_image_id] ])
identifier[image_status] = identifier[response] [ literal[string] ][ literal[int] ][ literal[string] ]
identifier[old_image_id] = identifier[s] . identifier[image_id]
identifier[response] = identifier[client] . identifier[describe_images] ( identifier[ImageIds] =[ identifier[old_image_id] ])
identifier[old_snapshot_id] = identifier[response] [ literal[string] ][ literal[int] ][ literal[string] ][ literal[int] ][ literal[string] ][ literal[string] ]
identifier[response] = identifier[client] . identifier[deregister_image] ( identifier[ImageId] = identifier[old_image_id] )
identifier[print] ( identifier[response] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[response] = identifier[client] . identifier[delete_snapshot] ( identifier[SnapshotId] = identifier[old_snapshot_id] )
identifier[print] ( identifier[response] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[response] = identifier[client] . identifier[terminate_instances] ( identifier[InstanceIds] =[ identifier[instance_id] ])
identifier[print] ( identifier[response] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[s] . identifier[save_image_id] ( identifier[new_image_id] )
identifier[s] . identifier[save_instance_id] ( literal[string] , literal[string] )
|
def stop(args):
"""
%prog stop
Stop EC2 instance.
"""
p = OptionParser(stop.__doc__)
p.add_option('--profile', default='mvrad-datasci-role', help='Profile name')
(opts, args) = p.parse_args(args)
if len(args) != 0:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
role(['htang'])
session = boto3.Session(profile_name=opts.profile)
client = session.client('ec2')
s = InstanceSkeleton()
# Make sure the instance id is NOT empty
instance_id = s.instance_id
if instance_id == '':
logging.error('Cannot find instance_id {}'.format(instance_id))
sys.exit(1) # depends on [control=['if'], data=['instance_id']]
block_device_mappings = []
for volume in s.volumes:
block_device_mappings.append({'DeviceName': volume['Device'], 'NoDevice': ''}) # depends on [control=['for'], data=['volume']]
new_image_name = 'htang-dev-{}-{}'.format(timestamp(), int(time.time()))
response = client.create_image(InstanceId=instance_id, Name=new_image_name, BlockDeviceMappings=block_device_mappings)
print(response, file=sys.stderr)
new_image_id = response['ImageId']
image_status = ''
while image_status != 'available':
logging.debug('Waiting for image to be ready')
time.sleep(10)
response = client.describe_images(ImageIds=[new_image_id])
image_status = response['Images'][0]['State'] # depends on [control=['while'], data=['image_status']]
# Delete old image, snapshot and shut down instance
old_image_id = s.image_id
response = client.describe_images(ImageIds=[old_image_id])
old_snapshot_id = response['Images'][0]['BlockDeviceMappings'][0]['Ebs']['SnapshotId']
response = client.deregister_image(ImageId=old_image_id)
print(response, file=sys.stderr)
response = client.delete_snapshot(SnapshotId=old_snapshot_id)
print(response, file=sys.stderr)
response = client.terminate_instances(InstanceIds=[instance_id])
print(response, file=sys.stderr)
# Save new image id
s.save_image_id(new_image_id)
s.save_instance_id('', '')
|
def convert_to_html(self,
file,
filename=None,
file_content_type=None,
model=None,
**kwargs):
"""
Convert document to HTML.
Converts a document to HTML.
:param file file: The document to convert.
:param str filename: The filename for file.
:param str file_content_type: The content type of file.
:param str model: The analysis model to be used by the service. For the **Element
classification** and **Compare two documents** methods, the default is
`contracts`. For the **Extract tables** method, the default is `tables`. These
defaults apply to the standalone methods as well as to the methods' use in
batch-processing requests.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if file is None:
raise ValueError('file must be provided')
headers = {}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
sdk_headers = get_sdk_headers('compare-comply', 'V1', 'convert_to_html')
headers.update(sdk_headers)
params = {'version': self.version, 'model': model}
form_data = {}
if not filename and hasattr(file, 'name'):
filename = basename(file.name)
if not filename:
raise ValueError('filename must be provided')
form_data['file'] = (filename, file, file_content_type or
'application/octet-stream')
url = '/v1/html_conversion'
response = self.request(
method='POST',
url=url,
headers=headers,
params=params,
files=form_data,
accept_json=True)
return response
|
def function[convert_to_html, parameter[self, file, filename, file_content_type, model]]:
constant[
Convert document to HTML.
Converts a document to HTML.
:param file file: The document to convert.
:param str filename: The filename for file.
:param str file_content_type: The content type of file.
:param str model: The analysis model to be used by the service. For the **Element
classification** and **Compare two documents** methods, the default is
`contracts`. For the **Extract tables** method, the default is `tables`. These
defaults apply to the standalone methods as well as to the methods' use in
batch-processing requests.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
]
if compare[name[file] is constant[None]] begin[:]
<ast.Raise object at 0x7da2044c2a70>
variable[headers] assign[=] dictionary[[], []]
if compare[constant[headers] in name[kwargs]] begin[:]
call[name[headers].update, parameter[call[name[kwargs].get, parameter[constant[headers]]]]]
variable[sdk_headers] assign[=] call[name[get_sdk_headers], parameter[constant[compare-comply], constant[V1], constant[convert_to_html]]]
call[name[headers].update, parameter[name[sdk_headers]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da204962680>, <ast.Constant object at 0x7da2049612a0>], [<ast.Attribute object at 0x7da204962c50>, <ast.Name object at 0x7da204960a30>]]
variable[form_data] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da2054a4f70> begin[:]
variable[filename] assign[=] call[name[basename], parameter[name[file].name]]
if <ast.UnaryOp object at 0x7da2054a6260> begin[:]
<ast.Raise object at 0x7da2054a66e0>
call[name[form_data]][constant[file]] assign[=] tuple[[<ast.Name object at 0x7da2054a7340>, <ast.Name object at 0x7da2054a78e0>, <ast.BoolOp object at 0x7da2054a4d30>]]
variable[url] assign[=] constant[/v1/html_conversion]
variable[response] assign[=] call[name[self].request, parameter[]]
return[name[response]]
|
keyword[def] identifier[convert_to_html] ( identifier[self] ,
identifier[file] ,
identifier[filename] = keyword[None] ,
identifier[file_content_type] = keyword[None] ,
identifier[model] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
keyword[if] identifier[file] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[headers] ={}
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[headers] . identifier[update] ( identifier[kwargs] . identifier[get] ( literal[string] ))
identifier[sdk_headers] = identifier[get_sdk_headers] ( literal[string] , literal[string] , literal[string] )
identifier[headers] . identifier[update] ( identifier[sdk_headers] )
identifier[params] ={ literal[string] : identifier[self] . identifier[version] , literal[string] : identifier[model] }
identifier[form_data] ={}
keyword[if] keyword[not] identifier[filename] keyword[and] identifier[hasattr] ( identifier[file] , literal[string] ):
identifier[filename] = identifier[basename] ( identifier[file] . identifier[name] )
keyword[if] keyword[not] identifier[filename] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[form_data] [ literal[string] ]=( identifier[filename] , identifier[file] , identifier[file_content_type] keyword[or]
literal[string] )
identifier[url] = literal[string]
identifier[response] = identifier[self] . identifier[request] (
identifier[method] = literal[string] ,
identifier[url] = identifier[url] ,
identifier[headers] = identifier[headers] ,
identifier[params] = identifier[params] ,
identifier[files] = identifier[form_data] ,
identifier[accept_json] = keyword[True] )
keyword[return] identifier[response]
|
def convert_to_html(self, file, filename=None, file_content_type=None, model=None, **kwargs):
"""
Convert document to HTML.
Converts a document to HTML.
:param file file: The document to convert.
:param str filename: The filename for file.
:param str file_content_type: The content type of file.
:param str model: The analysis model to be used by the service. For the **Element
classification** and **Compare two documents** methods, the default is
`contracts`. For the **Extract tables** method, the default is `tables`. These
defaults apply to the standalone methods as well as to the methods' use in
batch-processing requests.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if file is None:
raise ValueError('file must be provided') # depends on [control=['if'], data=[]]
headers = {}
if 'headers' in kwargs:
headers.update(kwargs.get('headers')) # depends on [control=['if'], data=['kwargs']]
sdk_headers = get_sdk_headers('compare-comply', 'V1', 'convert_to_html')
headers.update(sdk_headers)
params = {'version': self.version, 'model': model}
form_data = {}
if not filename and hasattr(file, 'name'):
filename = basename(file.name) # depends on [control=['if'], data=[]]
if not filename:
raise ValueError('filename must be provided') # depends on [control=['if'], data=[]]
form_data['file'] = (filename, file, file_content_type or 'application/octet-stream')
url = '/v1/html_conversion'
response = self.request(method='POST', url=url, headers=headers, params=params, files=form_data, accept_json=True)
return response
|
def from_unit_cube(self, x):
"""
Used by multinest
:param x: 0 < x < 1
:param lower_bound:
:param upper_bound:
:return:
"""
cosdec_min = np.cos(deg2rad*(90.0 + self.lower_bound.value))
cosdec_max = np.cos(deg2rad*(90.0 + self.upper_bound.value))
v = x * (cosdec_max - cosdec_min)
v += cosdec_min
v = np.clip(v, -1.0, 1.0)
# Now this generates on [0,pi)
dec = np.arccos(v)
# convert to degrees
dec = rad2deg * dec
# now in range [-90,90.0)
dec -= 90.0
return dec
|
def function[from_unit_cube, parameter[self, x]]:
constant[
Used by multinest
:param x: 0 < x < 1
:param lower_bound:
:param upper_bound:
:return:
]
variable[cosdec_min] assign[=] call[name[np].cos, parameter[binary_operation[name[deg2rad] * binary_operation[constant[90.0] + name[self].lower_bound.value]]]]
variable[cosdec_max] assign[=] call[name[np].cos, parameter[binary_operation[name[deg2rad] * binary_operation[constant[90.0] + name[self].upper_bound.value]]]]
variable[v] assign[=] binary_operation[name[x] * binary_operation[name[cosdec_max] - name[cosdec_min]]]
<ast.AugAssign object at 0x7da1b0ebf550>
variable[v] assign[=] call[name[np].clip, parameter[name[v], <ast.UnaryOp object at 0x7da1b0ebdd20>, constant[1.0]]]
variable[dec] assign[=] call[name[np].arccos, parameter[name[v]]]
variable[dec] assign[=] binary_operation[name[rad2deg] * name[dec]]
<ast.AugAssign object at 0x7da1b0ebd900>
return[name[dec]]
|
keyword[def] identifier[from_unit_cube] ( identifier[self] , identifier[x] ):
literal[string]
identifier[cosdec_min] = identifier[np] . identifier[cos] ( identifier[deg2rad] *( literal[int] + identifier[self] . identifier[lower_bound] . identifier[value] ))
identifier[cosdec_max] = identifier[np] . identifier[cos] ( identifier[deg2rad] *( literal[int] + identifier[self] . identifier[upper_bound] . identifier[value] ))
identifier[v] = identifier[x] *( identifier[cosdec_max] - identifier[cosdec_min] )
identifier[v] += identifier[cosdec_min]
identifier[v] = identifier[np] . identifier[clip] ( identifier[v] ,- literal[int] , literal[int] )
identifier[dec] = identifier[np] . identifier[arccos] ( identifier[v] )
identifier[dec] = identifier[rad2deg] * identifier[dec]
identifier[dec] -= literal[int]
keyword[return] identifier[dec]
|
def from_unit_cube(self, x):
"""
Used by multinest
:param x: 0 < x < 1
:param lower_bound:
:param upper_bound:
:return:
"""
cosdec_min = np.cos(deg2rad * (90.0 + self.lower_bound.value))
cosdec_max = np.cos(deg2rad * (90.0 + self.upper_bound.value))
v = x * (cosdec_max - cosdec_min)
v += cosdec_min
v = np.clip(v, -1.0, 1.0)
# Now this generates on [0,pi)
dec = np.arccos(v)
# convert to degrees
dec = rad2deg * dec
# now in range [-90,90.0)
dec -= 90.0
return dec
|
def soaproot(self, node):
"""
Get whether the specified I{node} is a soap encoded root.
This is determined by examining @soapenc:root='1'.
The node is considered to be a root when the attribute
is not specified.
@param node: A node to evaluate.
@type node: L{Element}
@return: True if a soap encoded root.
@rtype: bool
"""
root = node.getAttribute('root', ns=soapenc)
if root is None:
return True
else:
return root.value == '1'
|
def function[soaproot, parameter[self, node]]:
constant[
Get whether the specified I{node} is a soap encoded root.
This is determined by examining @soapenc:root='1'.
The node is considered to be a root when the attribute
is not specified.
@param node: A node to evaluate.
@type node: L{Element}
@return: True if a soap encoded root.
@rtype: bool
]
variable[root] assign[=] call[name[node].getAttribute, parameter[constant[root]]]
if compare[name[root] is constant[None]] begin[:]
return[constant[True]]
|
keyword[def] identifier[soaproot] ( identifier[self] , identifier[node] ):
literal[string]
identifier[root] = identifier[node] . identifier[getAttribute] ( literal[string] , identifier[ns] = identifier[soapenc] )
keyword[if] identifier[root] keyword[is] keyword[None] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] identifier[root] . identifier[value] == literal[string]
|
def soaproot(self, node):
"""
Get whether the specified I{node} is a soap encoded root.
This is determined by examining @soapenc:root='1'.
The node is considered to be a root when the attribute
is not specified.
@param node: A node to evaluate.
@type node: L{Element}
@return: True if a soap encoded root.
@rtype: bool
"""
root = node.getAttribute('root', ns=soapenc)
if root is None:
return True # depends on [control=['if'], data=[]]
else:
return root.value == '1'
|
def _process(self, envelope, session, mode, **kwargs):
""" :meth:`.WMessengerOnionLayerProto.process` implementation
"""
if mode == WMessengerOnionPackerLayerProto.Mode.pack:
return self.pack(envelope, session, **kwargs)
else: # mode == WMessengerOnionPackerLayerProto.Mode.unpack
return self.unpack(envelope, session, **kwargs)
|
def function[_process, parameter[self, envelope, session, mode]]:
constant[ :meth:`.WMessengerOnionLayerProto.process` implementation
]
if compare[name[mode] equal[==] name[WMessengerOnionPackerLayerProto].Mode.pack] begin[:]
return[call[name[self].pack, parameter[name[envelope], name[session]]]]
|
keyword[def] identifier[_process] ( identifier[self] , identifier[envelope] , identifier[session] , identifier[mode] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[mode] == identifier[WMessengerOnionPackerLayerProto] . identifier[Mode] . identifier[pack] :
keyword[return] identifier[self] . identifier[pack] ( identifier[envelope] , identifier[session] ,** identifier[kwargs] )
keyword[else] :
keyword[return] identifier[self] . identifier[unpack] ( identifier[envelope] , identifier[session] ,** identifier[kwargs] )
|
def _process(self, envelope, session, mode, **kwargs):
""" :meth:`.WMessengerOnionLayerProto.process` implementation
"""
if mode == WMessengerOnionPackerLayerProto.Mode.pack:
return self.pack(envelope, session, **kwargs) # depends on [control=['if'], data=[]]
else: # mode == WMessengerOnionPackerLayerProto.Mode.unpack
return self.unpack(envelope, session, **kwargs)
|
def _segment_normalized_cnvkit(cnr_file, work_dir, paired):
"""Segmentation of normalized inputs using CNVkit.
"""
cnvkit_base = os.path.join(utils.safe_makedir(os.path.join(work_dir, "cnvkit")),
dd.get_sample_name(paired.tumor_data))
cnr_file = chromhacks.bed_to_standardonly(cnr_file, paired.tumor_data, headers="chromosome",
include_sex_chroms=True,
out_dir=os.path.dirname(cnvkit_base))
cnr_file = _remove_overlaps(cnr_file, os.path.dirname(cnvkit_base), paired.tumor_data)
seg_file = cnvkit.segment_from_cnr(cnr_file, paired.tumor_data, cnvkit_base)
return cnr_file, seg_file
|
def function[_segment_normalized_cnvkit, parameter[cnr_file, work_dir, paired]]:
constant[Segmentation of normalized inputs using CNVkit.
]
variable[cnvkit_base] assign[=] call[name[os].path.join, parameter[call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[name[work_dir], constant[cnvkit]]]]], call[name[dd].get_sample_name, parameter[name[paired].tumor_data]]]]
variable[cnr_file] assign[=] call[name[chromhacks].bed_to_standardonly, parameter[name[cnr_file], name[paired].tumor_data]]
variable[cnr_file] assign[=] call[name[_remove_overlaps], parameter[name[cnr_file], call[name[os].path.dirname, parameter[name[cnvkit_base]]], name[paired].tumor_data]]
variable[seg_file] assign[=] call[name[cnvkit].segment_from_cnr, parameter[name[cnr_file], name[paired].tumor_data, name[cnvkit_base]]]
return[tuple[[<ast.Name object at 0x7da1b18fb010>, <ast.Name object at 0x7da1b18f89d0>]]]
|
keyword[def] identifier[_segment_normalized_cnvkit] ( identifier[cnr_file] , identifier[work_dir] , identifier[paired] ):
literal[string]
identifier[cnvkit_base] = identifier[os] . identifier[path] . identifier[join] ( identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[work_dir] , literal[string] )),
identifier[dd] . identifier[get_sample_name] ( identifier[paired] . identifier[tumor_data] ))
identifier[cnr_file] = identifier[chromhacks] . identifier[bed_to_standardonly] ( identifier[cnr_file] , identifier[paired] . identifier[tumor_data] , identifier[headers] = literal[string] ,
identifier[include_sex_chroms] = keyword[True] ,
identifier[out_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[cnvkit_base] ))
identifier[cnr_file] = identifier[_remove_overlaps] ( identifier[cnr_file] , identifier[os] . identifier[path] . identifier[dirname] ( identifier[cnvkit_base] ), identifier[paired] . identifier[tumor_data] )
identifier[seg_file] = identifier[cnvkit] . identifier[segment_from_cnr] ( identifier[cnr_file] , identifier[paired] . identifier[tumor_data] , identifier[cnvkit_base] )
keyword[return] identifier[cnr_file] , identifier[seg_file]
|
def _segment_normalized_cnvkit(cnr_file, work_dir, paired):
"""Segmentation of normalized inputs using CNVkit.
"""
cnvkit_base = os.path.join(utils.safe_makedir(os.path.join(work_dir, 'cnvkit')), dd.get_sample_name(paired.tumor_data))
cnr_file = chromhacks.bed_to_standardonly(cnr_file, paired.tumor_data, headers='chromosome', include_sex_chroms=True, out_dir=os.path.dirname(cnvkit_base))
cnr_file = _remove_overlaps(cnr_file, os.path.dirname(cnvkit_base), paired.tumor_data)
seg_file = cnvkit.segment_from_cnr(cnr_file, paired.tumor_data, cnvkit_base)
return (cnr_file, seg_file)
|
def _prompt_for_values(d):
"""Update the descriptive metadata interactively.
Uses values entered by the user. Note that the function keeps recursing
whenever a value is another ``CommentedMap`` or a ``list``. The
function works as passing dictionaries and lists into a function edits
the values in place.
"""
for key, value in d.items():
if isinstance(value, CommentedMap):
_prompt_for_values(value)
elif isinstance(value, list):
for item in value:
_prompt_for_values(item)
else:
typ = type(value)
if isinstance(value, ScalarFloat): # Deal with ruamel.yaml floats.
typ = float
new_value = click.prompt(key, type=typ, default=value)
d[key] = new_value
return d
|
def function[_prompt_for_values, parameter[d]]:
constant[Update the descriptive metadata interactively.
Uses values entered by the user. Note that the function keeps recursing
whenever a value is another ``CommentedMap`` or a ``list``. The
function works as passing dictionaries and lists into a function edits
the values in place.
]
for taget[tuple[[<ast.Name object at 0x7da204566530>, <ast.Name object at 0x7da204566410>]]] in starred[call[name[d].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[value], name[CommentedMap]]] begin[:]
call[name[_prompt_for_values], parameter[name[value]]]
return[name[d]]
|
keyword[def] identifier[_prompt_for_values] ( identifier[d] ):
literal[string]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[d] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[value] , identifier[CommentedMap] ):
identifier[_prompt_for_values] ( identifier[value] )
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[list] ):
keyword[for] identifier[item] keyword[in] identifier[value] :
identifier[_prompt_for_values] ( identifier[item] )
keyword[else] :
identifier[typ] = identifier[type] ( identifier[value] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[ScalarFloat] ):
identifier[typ] = identifier[float]
identifier[new_value] = identifier[click] . identifier[prompt] ( identifier[key] , identifier[type] = identifier[typ] , identifier[default] = identifier[value] )
identifier[d] [ identifier[key] ]= identifier[new_value]
keyword[return] identifier[d]
|
def _prompt_for_values(d):
"""Update the descriptive metadata interactively.
Uses values entered by the user. Note that the function keeps recursing
whenever a value is another ``CommentedMap`` or a ``list``. The
function works as passing dictionaries and lists into a function edits
the values in place.
"""
for (key, value) in d.items():
if isinstance(value, CommentedMap):
_prompt_for_values(value) # depends on [control=['if'], data=[]]
elif isinstance(value, list):
for item in value:
_prompt_for_values(item) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
else:
typ = type(value)
if isinstance(value, ScalarFloat): # Deal with ruamel.yaml floats.
typ = float # depends on [control=['if'], data=[]]
new_value = click.prompt(key, type=typ, default=value)
d[key] = new_value # depends on [control=['for'], data=[]]
return d
|
def html(self) -> str:
"""Return string representation of this.
Used in start tag of HTML representation of the Element node.
"""
if self._owner and self.name in self._owner._special_attr_boolean:
return self.name
else:
value = self.value
if isinstance(value, str):
value = html_.escape(value)
return '{name}="{value}"'.format(name=self.name, value=value)
|
def function[html, parameter[self]]:
constant[Return string representation of this.
Used in start tag of HTML representation of the Element node.
]
if <ast.BoolOp object at 0x7da20c7cac50> begin[:]
return[name[self].name]
|
keyword[def] identifier[html] ( identifier[self] )-> identifier[str] :
literal[string]
keyword[if] identifier[self] . identifier[_owner] keyword[and] identifier[self] . identifier[name] keyword[in] identifier[self] . identifier[_owner] . identifier[_special_attr_boolean] :
keyword[return] identifier[self] . identifier[name]
keyword[else] :
identifier[value] = identifier[self] . identifier[value]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[str] ):
identifier[value] = identifier[html_] . identifier[escape] ( identifier[value] )
keyword[return] literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[name] , identifier[value] = identifier[value] )
|
def html(self) -> str:
"""Return string representation of this.
Used in start tag of HTML representation of the Element node.
"""
if self._owner and self.name in self._owner._special_attr_boolean:
return self.name # depends on [control=['if'], data=[]]
else:
value = self.value
if isinstance(value, str):
value = html_.escape(value) # depends on [control=['if'], data=[]]
return '{name}="{value}"'.format(name=self.name, value=value)
|
def _get_win_folder_from_registry(csidl_name):
"""
This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names.
"""
import _winreg
shell_folder_name = {
"CSIDL_APPDATA": "AppData",
"CSIDL_COMMON_APPDATA": "Common AppData",
"CSIDL_LOCAL_APPDATA": "Local AppData",
}[csidl_name]
key = _winreg.OpenKey(
_winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders",
)
directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
return directory
|
def function[_get_win_folder_from_registry, parameter[csidl_name]]:
constant[
This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names.
]
import module[_winreg]
variable[shell_folder_name] assign[=] call[dictionary[[<ast.Constant object at 0x7da20e9b0cd0>, <ast.Constant object at 0x7da20e9b0f40>, <ast.Constant object at 0x7da20e9b0c40>], [<ast.Constant object at 0x7da20e9b2e90>, <ast.Constant object at 0x7da20e9b03a0>, <ast.Constant object at 0x7da20e9b2590>]]][name[csidl_name]]
variable[key] assign[=] call[name[_winreg].OpenKey, parameter[name[_winreg].HKEY_CURRENT_USER, constant[Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders]]]
<ast.Tuple object at 0x7da20e9b3490> assign[=] call[name[_winreg].QueryValueEx, parameter[name[key], name[shell_folder_name]]]
return[name[directory]]
|
keyword[def] identifier[_get_win_folder_from_registry] ( identifier[csidl_name] ):
literal[string]
keyword[import] identifier[_winreg]
identifier[shell_folder_name] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}[ identifier[csidl_name] ]
identifier[key] = identifier[_winreg] . identifier[OpenKey] (
identifier[_winreg] . identifier[HKEY_CURRENT_USER] ,
literal[string] ,
)
identifier[directory] , identifier[_type] = identifier[_winreg] . identifier[QueryValueEx] ( identifier[key] , identifier[shell_folder_name] )
keyword[return] identifier[directory]
|
def _get_win_folder_from_registry(csidl_name):
"""
This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names.
"""
import _winreg
shell_folder_name = {'CSIDL_APPDATA': 'AppData', 'CSIDL_COMMON_APPDATA': 'Common AppData', 'CSIDL_LOCAL_APPDATA': 'Local AppData'}[csidl_name]
key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders')
(directory, _type) = _winreg.QueryValueEx(key, shell_folder_name)
return directory
|
def get_mcc(self, ip):
''' Get mcc '''
rec = self.get_all(ip)
return rec and rec.mcc
|
def function[get_mcc, parameter[self, ip]]:
constant[ Get mcc ]
variable[rec] assign[=] call[name[self].get_all, parameter[name[ip]]]
return[<ast.BoolOp object at 0x7da1b0d206d0>]
|
keyword[def] identifier[get_mcc] ( identifier[self] , identifier[ip] ):
literal[string]
identifier[rec] = identifier[self] . identifier[get_all] ( identifier[ip] )
keyword[return] identifier[rec] keyword[and] identifier[rec] . identifier[mcc]
|
def get_mcc(self, ip):
""" Get mcc """
rec = self.get_all(ip)
return rec and rec.mcc
|
def toml(uncertainty):
"""
Converts an uncertainty node into a TOML string
"""
text = uncertainty.text.strip()
if not text.startswith('['): # a bare GSIM name was passed
text = '[%s]' % text
for k, v in uncertainty.attrib.items():
try:
v = ast.literal_eval(v)
except ValueError:
v = repr(v)
text += '\n%s = %s' % (k, v)
return text
|
def function[toml, parameter[uncertainty]]:
constant[
Converts an uncertainty node into a TOML string
]
variable[text] assign[=] call[name[uncertainty].text.strip, parameter[]]
if <ast.UnaryOp object at 0x7da204962a70> begin[:]
variable[text] assign[=] binary_operation[constant[[%s]] <ast.Mod object at 0x7da2590d6920> name[text]]
for taget[tuple[[<ast.Name object at 0x7da18f00de70>, <ast.Name object at 0x7da18f00d600>]]] in starred[call[name[uncertainty].attrib.items, parameter[]]] begin[:]
<ast.Try object at 0x7da18f00ceb0>
<ast.AugAssign object at 0x7da18f00ce50>
return[name[text]]
|
keyword[def] identifier[toml] ( identifier[uncertainty] ):
literal[string]
identifier[text] = identifier[uncertainty] . identifier[text] . identifier[strip] ()
keyword[if] keyword[not] identifier[text] . identifier[startswith] ( literal[string] ):
identifier[text] = literal[string] % identifier[text]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[uncertainty] . identifier[attrib] . identifier[items] ():
keyword[try] :
identifier[v] = identifier[ast] . identifier[literal_eval] ( identifier[v] )
keyword[except] identifier[ValueError] :
identifier[v] = identifier[repr] ( identifier[v] )
identifier[text] += literal[string] %( identifier[k] , identifier[v] )
keyword[return] identifier[text]
|
def toml(uncertainty):
"""
Converts an uncertainty node into a TOML string
"""
text = uncertainty.text.strip()
if not text.startswith('['): # a bare GSIM name was passed
text = '[%s]' % text # depends on [control=['if'], data=[]]
for (k, v) in uncertainty.attrib.items():
try:
v = ast.literal_eval(v) # depends on [control=['try'], data=[]]
except ValueError:
v = repr(v) # depends on [control=['except'], data=[]]
text += '\n%s = %s' % (k, v) # depends on [control=['for'], data=[]]
return text
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.