code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def hash(self, seed=0):
"""
Returns an SArray with a hash of each element. seed can be used
to change the hash function to allow this method to be used for
random number generation.
Parameters
----------
seed : int
Defaults to 0. Can be changed to different values to get
different hash results.
Returns
-------
out : SArray
An integer SArray with a hash value for each element. Identical
elements are hashed to the same value
"""
with cython_context():
return SArray(_proxy=self.__proxy__.hash(seed)) | def function[hash, parameter[self, seed]]:
constant[
Returns an SArray with a hash of each element. seed can be used
to change the hash function to allow this method to be used for
random number generation.
Parameters
----------
seed : int
Defaults to 0. Can be changed to different values to get
different hash results.
Returns
-------
out : SArray
An integer SArray with a hash value for each element. Identical
elements are hashed to the same value
]
with call[name[cython_context], parameter[]] begin[:]
return[call[name[SArray], parameter[]]] | keyword[def] identifier[hash] ( identifier[self] , identifier[seed] = literal[int] ):
literal[string]
keyword[with] identifier[cython_context] ():
keyword[return] identifier[SArray] ( identifier[_proxy] = identifier[self] . identifier[__proxy__] . identifier[hash] ( identifier[seed] )) | def hash(self, seed=0):
"""
Returns an SArray with a hash of each element. seed can be used
to change the hash function to allow this method to be used for
random number generation.
Parameters
----------
seed : int
Defaults to 0. Can be changed to different values to get
different hash results.
Returns
-------
out : SArray
An integer SArray with a hash value for each element. Identical
elements are hashed to the same value
"""
with cython_context():
return SArray(_proxy=self.__proxy__.hash(seed)) # depends on [control=['with'], data=[]] |
def dump_commands(commands, directory=None, sub_dir=None):
"""
Dump SQL commands to .sql files.
:param commands: List of SQL commands
:param directory: Directory to dump commands to
:param sub_dir: Sub directory
:return: Directory failed commands were dumped to
"""
print('\t' + str(len(commands)), 'failed commands')
# Create dump_dir directory
if directory and os.path.isfile(directory):
dump_dir = set_dump_directory(os.path.dirname(directory), sub_dir)
return_dir = dump_dir
elif directory:
dump_dir = set_dump_directory(directory, sub_dir)
return_dir = dump_dir
else:
dump_dir = TemporaryDirectory().name
return_dir = TemporaryDirectory()
# Create list of (path, content) tuples
command_filepath = [(fail, os.path.join(dump_dir, str(count) + '.sql')) for count, fail in enumerate(commands)]
# Dump failed commands to text file in the same directory as the commands
# Utilize's multiprocessing module if it is available
timer = Timer()
if MULTIPROCESS:
pool = Pool(cpu_count())
pool.map(write_text_tup, command_filepath)
pool.close()
print('\tDumped ', len(command_filepath), 'commands\n\t\tTime : {0}'.format(timer.end),
'\n\t\tMethod : (multiprocessing)\n\t\tDirectory : {0}'.format(dump_dir))
else:
for tup in command_filepath:
write_text_tup(tup)
print('\tDumped ', len(command_filepath), 'commands\n\t\tTime : {0}'.format(timer.end),
'\n\t\tMethod : (sequential)\n\t\tDirectory : {0}'.format(dump_dir))
# Return base directory of dumped commands
return return_dir | def function[dump_commands, parameter[commands, directory, sub_dir]]:
constant[
Dump SQL commands to .sql files.
:param commands: List of SQL commands
:param directory: Directory to dump commands to
:param sub_dir: Sub directory
:return: Directory failed commands were dumped to
]
call[name[print], parameter[binary_operation[constant[ ] + call[name[str], parameter[call[name[len], parameter[name[commands]]]]]], constant[failed commands]]]
if <ast.BoolOp object at 0x7da1b0a85750> begin[:]
variable[dump_dir] assign[=] call[name[set_dump_directory], parameter[call[name[os].path.dirname, parameter[name[directory]]], name[sub_dir]]]
variable[return_dir] assign[=] name[dump_dir]
variable[command_filepath] assign[=] <ast.ListComp object at 0x7da1b0a71f60>
variable[timer] assign[=] call[name[Timer], parameter[]]
if name[MULTIPROCESS] begin[:]
variable[pool] assign[=] call[name[Pool], parameter[call[name[cpu_count], parameter[]]]]
call[name[pool].map, parameter[name[write_text_tup], name[command_filepath]]]
call[name[pool].close, parameter[]]
call[name[print], parameter[constant[ Dumped ], call[name[len], parameter[name[command_filepath]]], call[constant[commands
Time : {0}].format, parameter[name[timer].end]], call[constant[
Method : (multiprocessing)
Directory : {0}].format, parameter[name[dump_dir]]]]]
return[name[return_dir]] | keyword[def] identifier[dump_commands] ( identifier[commands] , identifier[directory] = keyword[None] , identifier[sub_dir] = keyword[None] ):
literal[string]
identifier[print] ( literal[string] + identifier[str] ( identifier[len] ( identifier[commands] )), literal[string] )
keyword[if] identifier[directory] keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[directory] ):
identifier[dump_dir] = identifier[set_dump_directory] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[directory] ), identifier[sub_dir] )
identifier[return_dir] = identifier[dump_dir]
keyword[elif] identifier[directory] :
identifier[dump_dir] = identifier[set_dump_directory] ( identifier[directory] , identifier[sub_dir] )
identifier[return_dir] = identifier[dump_dir]
keyword[else] :
identifier[dump_dir] = identifier[TemporaryDirectory] (). identifier[name]
identifier[return_dir] = identifier[TemporaryDirectory] ()
identifier[command_filepath] =[( identifier[fail] , identifier[os] . identifier[path] . identifier[join] ( identifier[dump_dir] , identifier[str] ( identifier[count] )+ literal[string] )) keyword[for] identifier[count] , identifier[fail] keyword[in] identifier[enumerate] ( identifier[commands] )]
identifier[timer] = identifier[Timer] ()
keyword[if] identifier[MULTIPROCESS] :
identifier[pool] = identifier[Pool] ( identifier[cpu_count] ())
identifier[pool] . identifier[map] ( identifier[write_text_tup] , identifier[command_filepath] )
identifier[pool] . identifier[close] ()
identifier[print] ( literal[string] , identifier[len] ( identifier[command_filepath] ), literal[string] . identifier[format] ( identifier[timer] . identifier[end] ),
literal[string] . identifier[format] ( identifier[dump_dir] ))
keyword[else] :
keyword[for] identifier[tup] keyword[in] identifier[command_filepath] :
identifier[write_text_tup] ( identifier[tup] )
identifier[print] ( literal[string] , identifier[len] ( identifier[command_filepath] ), literal[string] . identifier[format] ( identifier[timer] . identifier[end] ),
literal[string] . identifier[format] ( identifier[dump_dir] ))
keyword[return] identifier[return_dir] | def dump_commands(commands, directory=None, sub_dir=None):
"""
Dump SQL commands to .sql files.
:param commands: List of SQL commands
:param directory: Directory to dump commands to
:param sub_dir: Sub directory
:return: Directory failed commands were dumped to
"""
print('\t' + str(len(commands)), 'failed commands')
# Create dump_dir directory
if directory and os.path.isfile(directory):
dump_dir = set_dump_directory(os.path.dirname(directory), sub_dir)
return_dir = dump_dir # depends on [control=['if'], data=[]]
elif directory:
dump_dir = set_dump_directory(directory, sub_dir)
return_dir = dump_dir # depends on [control=['if'], data=[]]
else:
dump_dir = TemporaryDirectory().name
return_dir = TemporaryDirectory()
# Create list of (path, content) tuples
command_filepath = [(fail, os.path.join(dump_dir, str(count) + '.sql')) for (count, fail) in enumerate(commands)]
# Dump failed commands to text file in the same directory as the commands
# Utilize's multiprocessing module if it is available
timer = Timer()
if MULTIPROCESS:
pool = Pool(cpu_count())
pool.map(write_text_tup, command_filepath)
pool.close()
print('\tDumped ', len(command_filepath), 'commands\n\t\tTime : {0}'.format(timer.end), '\n\t\tMethod : (multiprocessing)\n\t\tDirectory : {0}'.format(dump_dir)) # depends on [control=['if'], data=[]]
else:
for tup in command_filepath:
write_text_tup(tup) # depends on [control=['for'], data=['tup']]
print('\tDumped ', len(command_filepath), 'commands\n\t\tTime : {0}'.format(timer.end), '\n\t\tMethod : (sequential)\n\t\tDirectory : {0}'.format(dump_dir))
# Return base directory of dumped commands
return return_dir |
def _close_connection(self, report_id):
"""Sends a delete request for the report JSON hash
Parameters
----------
report_id : str
Hash of the report JSON as retrieved from :func:`~_get_report_hash`
"""
logger.debug(
"Closing connection and sending DELETE request to {}".format(
self.broadcast_address))
try:
r = requests.delete(self.broadcast_address,
json={"run_id": report_id})
if r.status_code != 202:
logger.error(colored_print(
"ERROR: There was a problem sending data to the server"
"with reason: {}".format(r.reason)))
except requests.exceptions.ConnectionError:
logger.error(colored_print(
"ERROR: Could not establish connection with server. The server"
" may be down or there is a problem with your internet "
"connection.", "red_bold"))
sys.exit(1) | def function[_close_connection, parameter[self, report_id]]:
constant[Sends a delete request for the report JSON hash
Parameters
----------
report_id : str
Hash of the report JSON as retrieved from :func:`~_get_report_hash`
]
call[name[logger].debug, parameter[call[constant[Closing connection and sending DELETE request to {}].format, parameter[name[self].broadcast_address]]]]
<ast.Try object at 0x7da1b03e3d30> | keyword[def] identifier[_close_connection] ( identifier[self] , identifier[report_id] ):
literal[string]
identifier[logger] . identifier[debug] (
literal[string] . identifier[format] (
identifier[self] . identifier[broadcast_address] ))
keyword[try] :
identifier[r] = identifier[requests] . identifier[delete] ( identifier[self] . identifier[broadcast_address] ,
identifier[json] ={ literal[string] : identifier[report_id] })
keyword[if] identifier[r] . identifier[status_code] != literal[int] :
identifier[logger] . identifier[error] ( identifier[colored_print] (
literal[string]
literal[string] . identifier[format] ( identifier[r] . identifier[reason] )))
keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] :
identifier[logger] . identifier[error] ( identifier[colored_print] (
literal[string]
literal[string]
literal[string] , literal[string] ))
identifier[sys] . identifier[exit] ( literal[int] ) | def _close_connection(self, report_id):
"""Sends a delete request for the report JSON hash
Parameters
----------
report_id : str
Hash of the report JSON as retrieved from :func:`~_get_report_hash`
"""
logger.debug('Closing connection and sending DELETE request to {}'.format(self.broadcast_address))
try:
r = requests.delete(self.broadcast_address, json={'run_id': report_id})
if r.status_code != 202:
logger.error(colored_print('ERROR: There was a problem sending data to the serverwith reason: {}'.format(r.reason))) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except requests.exceptions.ConnectionError:
logger.error(colored_print('ERROR: Could not establish connection with server. The server may be down or there is a problem with your internet connection.', 'red_bold'))
sys.exit(1) # depends on [control=['except'], data=[]] |
def _setup_stats_status_codes(self, spider_name):
'''
Sets up the status code stats collectors
'''
self.stats_dict[spider_name] = {
'status_codes': {}
}
self.stats_dict[spider_name]['status_codes'] = {}
hostname = self._get_hostname()
# we chose to handle 504's here as well as in the middleware
# in case the middleware is disabled
for status_code in self.settings['STATS_RESPONSE_CODES']:
temp_key = 'stats:crawler:{h}:{n}:{s}'.format(
h=hostname, n=spider_name, s=status_code)
self.stats_dict[spider_name]['status_codes'][status_code] = {}
for item in self.settings['STATS_TIMES']:
try:
time = getattr(StatsCollector, item)
self.stats_dict[spider_name]['status_codes'][status_code][time] = StatsCollector \
.get_rolling_time_window(
redis_conn=self.redis_conn,
key='{k}:{t}'.format(k=temp_key, t=time),
window=time,
cycle_time=self.settings['STATS_CYCLE'])
self.logger.debug("Set up status code {s}, {n} spider,"\
" host {h} Stats Collector '{i}'"\
.format(h=hostname, n=spider_name, s=status_code, i=item))
except AttributeError as e:
self.logger.warning("Unable to find Stats Time '{s}'"\
.format(s=item))
total = StatsCollector.get_hll_counter(redis_conn=self.redis_conn,
key='{k}:lifetime'.format(k=temp_key),
cycle_time=self.settings['STATS_CYCLE'],
roll=False)
self.logger.debug("Set up status code {s}, {n} spider,"\
"host {h} Stats Collector 'lifetime'"\
.format(h=hostname, n=spider_name, s=status_code))
self.stats_dict[spider_name]['status_codes'][status_code]['lifetime'] = total | def function[_setup_stats_status_codes, parameter[self, spider_name]]:
constant[
Sets up the status code stats collectors
]
call[name[self].stats_dict][name[spider_name]] assign[=] dictionary[[<ast.Constant object at 0x7da1b18dd510>], [<ast.Dict object at 0x7da1b18dcbb0>]]
call[call[name[self].stats_dict][name[spider_name]]][constant[status_codes]] assign[=] dictionary[[], []]
variable[hostname] assign[=] call[name[self]._get_hostname, parameter[]]
for taget[name[status_code]] in starred[call[name[self].settings][constant[STATS_RESPONSE_CODES]]] begin[:]
variable[temp_key] assign[=] call[constant[stats:crawler:{h}:{n}:{s}].format, parameter[]]
call[call[call[name[self].stats_dict][name[spider_name]]][constant[status_codes]]][name[status_code]] assign[=] dictionary[[], []]
for taget[name[item]] in starred[call[name[self].settings][constant[STATS_TIMES]]] begin[:]
<ast.Try object at 0x7da1b18ee2f0>
variable[total] assign[=] call[name[StatsCollector].get_hll_counter, parameter[]]
call[name[self].logger.debug, parameter[call[constant[Set up status code {s}, {n} spider,host {h} Stats Collector 'lifetime'].format, parameter[]]]]
call[call[call[call[name[self].stats_dict][name[spider_name]]][constant[status_codes]]][name[status_code]]][constant[lifetime]] assign[=] name[total] | keyword[def] identifier[_setup_stats_status_codes] ( identifier[self] , identifier[spider_name] ):
literal[string]
identifier[self] . identifier[stats_dict] [ identifier[spider_name] ]={
literal[string] :{}
}
identifier[self] . identifier[stats_dict] [ identifier[spider_name] ][ literal[string] ]={}
identifier[hostname] = identifier[self] . identifier[_get_hostname] ()
keyword[for] identifier[status_code] keyword[in] identifier[self] . identifier[settings] [ literal[string] ]:
identifier[temp_key] = literal[string] . identifier[format] (
identifier[h] = identifier[hostname] , identifier[n] = identifier[spider_name] , identifier[s] = identifier[status_code] )
identifier[self] . identifier[stats_dict] [ identifier[spider_name] ][ literal[string] ][ identifier[status_code] ]={}
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[settings] [ literal[string] ]:
keyword[try] :
identifier[time] = identifier[getattr] ( identifier[StatsCollector] , identifier[item] )
identifier[self] . identifier[stats_dict] [ identifier[spider_name] ][ literal[string] ][ identifier[status_code] ][ identifier[time] ]= identifier[StatsCollector] . identifier[get_rolling_time_window] (
identifier[redis_conn] = identifier[self] . identifier[redis_conn] ,
identifier[key] = literal[string] . identifier[format] ( identifier[k] = identifier[temp_key] , identifier[t] = identifier[time] ),
identifier[window] = identifier[time] ,
identifier[cycle_time] = identifier[self] . identifier[settings] [ literal[string] ])
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] literal[string] . identifier[format] ( identifier[h] = identifier[hostname] , identifier[n] = identifier[spider_name] , identifier[s] = identifier[status_code] , identifier[i] = identifier[item] ))
keyword[except] identifier[AttributeError] keyword[as] identifier[e] :
identifier[self] . identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[s] = identifier[item] ))
identifier[total] = identifier[StatsCollector] . identifier[get_hll_counter] ( identifier[redis_conn] = identifier[self] . identifier[redis_conn] ,
identifier[key] = literal[string] . identifier[format] ( identifier[k] = identifier[temp_key] ),
identifier[cycle_time] = identifier[self] . identifier[settings] [ literal[string] ],
identifier[roll] = keyword[False] )
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] literal[string] . identifier[format] ( identifier[h] = identifier[hostname] , identifier[n] = identifier[spider_name] , identifier[s] = identifier[status_code] ))
identifier[self] . identifier[stats_dict] [ identifier[spider_name] ][ literal[string] ][ identifier[status_code] ][ literal[string] ]= identifier[total] | def _setup_stats_status_codes(self, spider_name):
"""
Sets up the status code stats collectors
"""
self.stats_dict[spider_name] = {'status_codes': {}}
self.stats_dict[spider_name]['status_codes'] = {}
hostname = self._get_hostname()
# we chose to handle 504's here as well as in the middleware
# in case the middleware is disabled
for status_code in self.settings['STATS_RESPONSE_CODES']:
temp_key = 'stats:crawler:{h}:{n}:{s}'.format(h=hostname, n=spider_name, s=status_code)
self.stats_dict[spider_name]['status_codes'][status_code] = {}
for item in self.settings['STATS_TIMES']:
try:
time = getattr(StatsCollector, item)
self.stats_dict[spider_name]['status_codes'][status_code][time] = StatsCollector.get_rolling_time_window(redis_conn=self.redis_conn, key='{k}:{t}'.format(k=temp_key, t=time), window=time, cycle_time=self.settings['STATS_CYCLE'])
self.logger.debug("Set up status code {s}, {n} spider, host {h} Stats Collector '{i}'".format(h=hostname, n=spider_name, s=status_code, i=item)) # depends on [control=['try'], data=[]]
except AttributeError as e:
self.logger.warning("Unable to find Stats Time '{s}'".format(s=item)) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['item']]
total = StatsCollector.get_hll_counter(redis_conn=self.redis_conn, key='{k}:lifetime'.format(k=temp_key), cycle_time=self.settings['STATS_CYCLE'], roll=False)
self.logger.debug("Set up status code {s}, {n} spider,host {h} Stats Collector 'lifetime'".format(h=hostname, n=spider_name, s=status_code))
self.stats_dict[spider_name]['status_codes'][status_code]['lifetime'] = total # depends on [control=['for'], data=['status_code']] |
def _speak_as_normal(self, element):
"""
Use the default speak configuration of user agent for element and
descendants.
:param element: The element.
:type element: hatemile.util.html.htmldomelement.HTMLDOMElement
"""
self._reverse_speak_as(element, 'spell-out')
self._reverse_speak_as(element, 'literal-punctuation')
self._reverse_speak_as(element, 'no-punctuation')
self._reverse_speak_as(element, 'digits') | def function[_speak_as_normal, parameter[self, element]]:
constant[
Use the default speak configuration of user agent for element and
descendants.
:param element: The element.
:type element: hatemile.util.html.htmldomelement.HTMLDOMElement
]
call[name[self]._reverse_speak_as, parameter[name[element], constant[spell-out]]]
call[name[self]._reverse_speak_as, parameter[name[element], constant[literal-punctuation]]]
call[name[self]._reverse_speak_as, parameter[name[element], constant[no-punctuation]]]
call[name[self]._reverse_speak_as, parameter[name[element], constant[digits]]] | keyword[def] identifier[_speak_as_normal] ( identifier[self] , identifier[element] ):
literal[string]
identifier[self] . identifier[_reverse_speak_as] ( identifier[element] , literal[string] )
identifier[self] . identifier[_reverse_speak_as] ( identifier[element] , literal[string] )
identifier[self] . identifier[_reverse_speak_as] ( identifier[element] , literal[string] )
identifier[self] . identifier[_reverse_speak_as] ( identifier[element] , literal[string] ) | def _speak_as_normal(self, element):
"""
Use the default speak configuration of user agent for element and
descendants.
:param element: The element.
:type element: hatemile.util.html.htmldomelement.HTMLDOMElement
"""
self._reverse_speak_as(element, 'spell-out')
self._reverse_speak_as(element, 'literal-punctuation')
self._reverse_speak_as(element, 'no-punctuation')
self._reverse_speak_as(element, 'digits') |
def __nt_relpath(path, start=os.curdir):
"""Return a relative version of a path"""
if not path: raise ValueError("no path specified")
start_list = os.path.abspath(start).split(os.sep)
path_list = os.path.abspath(path).split(os.sep)
if start_list[0].lower() != path_list[0].lower():
unc_path, rest = os.path.splitunc(path)
unc_start, rest = os.path.splitunc(start)
if bool(unc_path) ^ bool(unc_start):
raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" \
% (path, start))
else: raise ValueError("path is on drive %s, start on drive %s" \
% (path_list[0], start_list[0]))
# Work out how much of the filepath is shared by start and path.
for i in range(min(len(start_list), len(path_list))):
if start_list[i].lower() != path_list[i].lower():
break
else: i += 1
pass
rel_list = [os.pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list: return os.curdir
return os.path.join(*rel_list) | def function[__nt_relpath, parameter[path, start]]:
constant[Return a relative version of a path]
if <ast.UnaryOp object at 0x7da1b16484c0> begin[:]
<ast.Raise object at 0x7da1b1649e10>
variable[start_list] assign[=] call[call[name[os].path.abspath, parameter[name[start]]].split, parameter[name[os].sep]]
variable[path_list] assign[=] call[call[name[os].path.abspath, parameter[name[path]]].split, parameter[name[os].sep]]
if compare[call[call[name[start_list]][constant[0]].lower, parameter[]] not_equal[!=] call[call[name[path_list]][constant[0]].lower, parameter[]]] begin[:]
<ast.Tuple object at 0x7da1b1649000> assign[=] call[name[os].path.splitunc, parameter[name[path]]]
<ast.Tuple object at 0x7da1b1648fd0> assign[=] call[name[os].path.splitunc, parameter[name[start]]]
if binary_operation[call[name[bool], parameter[name[unc_path]]] <ast.BitXor object at 0x7da2590d6b00> call[name[bool], parameter[name[unc_start]]]] begin[:]
<ast.Raise object at 0x7da1b164a200>
for taget[name[i]] in starred[call[name[range], parameter[call[name[min], parameter[call[name[len], parameter[name[start_list]]], call[name[len], parameter[name[path_list]]]]]]]] begin[:]
if compare[call[call[name[start_list]][name[i]].lower, parameter[]] not_equal[!=] call[call[name[path_list]][name[i]].lower, parameter[]]] begin[:]
break
pass
variable[rel_list] assign[=] binary_operation[binary_operation[list[[<ast.Attribute object at 0x7da20e9b2860>]] * binary_operation[call[name[len], parameter[name[start_list]]] - name[i]]] + call[name[path_list]][<ast.Slice object at 0x7da20e956890>]]
if <ast.UnaryOp object at 0x7da20e956920> begin[:]
return[name[os].curdir]
return[call[name[os].path.join, parameter[<ast.Starred object at 0x7da20e955270>]]] | keyword[def] identifier[__nt_relpath] ( identifier[path] , identifier[start] = identifier[os] . identifier[curdir] ):
literal[string]
keyword[if] keyword[not] identifier[path] : keyword[raise] identifier[ValueError] ( literal[string] )
identifier[start_list] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[start] ). identifier[split] ( identifier[os] . identifier[sep] )
identifier[path_list] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] ). identifier[split] ( identifier[os] . identifier[sep] )
keyword[if] identifier[start_list] [ literal[int] ]. identifier[lower] ()!= identifier[path_list] [ literal[int] ]. identifier[lower] ():
identifier[unc_path] , identifier[rest] = identifier[os] . identifier[path] . identifier[splitunc] ( identifier[path] )
identifier[unc_start] , identifier[rest] = identifier[os] . identifier[path] . identifier[splitunc] ( identifier[start] )
keyword[if] identifier[bool] ( identifier[unc_path] )^ identifier[bool] ( identifier[unc_start] ):
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[path] , identifier[start] ))
keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[path_list] [ literal[int] ], identifier[start_list] [ literal[int] ]))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[min] ( identifier[len] ( identifier[start_list] ), identifier[len] ( identifier[path_list] ))):
keyword[if] identifier[start_list] [ identifier[i] ]. identifier[lower] ()!= identifier[path_list] [ identifier[i] ]. identifier[lower] ():
keyword[break]
keyword[else] : identifier[i] += literal[int]
keyword[pass]
identifier[rel_list] =[ identifier[os] . identifier[pardir] ]*( identifier[len] ( identifier[start_list] )- identifier[i] )+ identifier[path_list] [ identifier[i] :]
keyword[if] keyword[not] identifier[rel_list] : keyword[return] identifier[os] . identifier[curdir]
keyword[return] identifier[os] . identifier[path] . identifier[join] (* identifier[rel_list] ) | def __nt_relpath(path, start=os.curdir):
"""Return a relative version of a path"""
if not path:
raise ValueError('no path specified') # depends on [control=['if'], data=[]]
start_list = os.path.abspath(start).split(os.sep)
path_list = os.path.abspath(path).split(os.sep)
if start_list[0].lower() != path_list[0].lower():
(unc_path, rest) = os.path.splitunc(path)
(unc_start, rest) = os.path.splitunc(start)
if bool(unc_path) ^ bool(unc_start):
raise ValueError('Cannot mix UNC and non-UNC paths (%s and %s)' % (path, start)) # depends on [control=['if'], data=[]]
else:
raise ValueError('path is on drive %s, start on drive %s' % (path_list[0], start_list[0])) # depends on [control=['if'], data=[]]
# Work out how much of the filepath is shared by start and path.
for i in range(min(len(start_list), len(path_list))):
if start_list[i].lower() != path_list[i].lower():
break # depends on [control=['if'], data=[]]
else:
i += 1
pass # depends on [control=['for'], data=['i']]
rel_list = [os.pardir] * (len(start_list) - i) + path_list[i:]
if not rel_list:
return os.curdir # depends on [control=['if'], data=[]]
return os.path.join(*rel_list) |
def missing_node_ids(self):
"""Get a list of nodes not found in OSM data."""
present_node_ids = self.nodes.keys()
for nid in self.node_ids:
if nid not in present_node_ids:
yield nid | def function[missing_node_ids, parameter[self]]:
constant[Get a list of nodes not found in OSM data.]
variable[present_node_ids] assign[=] call[name[self].nodes.keys, parameter[]]
for taget[name[nid]] in starred[name[self].node_ids] begin[:]
if compare[name[nid] <ast.NotIn object at 0x7da2590d7190> name[present_node_ids]] begin[:]
<ast.Yield object at 0x7da1b033e3e0> | keyword[def] identifier[missing_node_ids] ( identifier[self] ):
literal[string]
identifier[present_node_ids] = identifier[self] . identifier[nodes] . identifier[keys] ()
keyword[for] identifier[nid] keyword[in] identifier[self] . identifier[node_ids] :
keyword[if] identifier[nid] keyword[not] keyword[in] identifier[present_node_ids] :
keyword[yield] identifier[nid] | def missing_node_ids(self):
"""Get a list of nodes not found in OSM data."""
present_node_ids = self.nodes.keys()
for nid in self.node_ids:
if nid not in present_node_ids:
yield nid # depends on [control=['if'], data=['nid']] # depends on [control=['for'], data=['nid']] |
def _ip_is_usable(self, current_ip):
"""
Check if the current Tor's IP is usable.
:argument current_ip: current Tor IP
:type current_ip: str
:returns bool
"""
# Consider IP addresses only.
try:
ipaddress.ip_address(current_ip)
except ValueError:
return False
# Never use real IP.
if current_ip == self.real_ip:
return False
# Do dot allow IP reuse.
if not self._ip_is_safe(current_ip):
return False
return True | def function[_ip_is_usable, parameter[self, current_ip]]:
constant[
Check if the current Tor's IP is usable.
:argument current_ip: current Tor IP
:type current_ip: str
:returns bool
]
<ast.Try object at 0x7da1b1040730>
if compare[name[current_ip] equal[==] name[self].real_ip] begin[:]
return[constant[False]]
if <ast.UnaryOp object at 0x7da1b1042050> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[_ip_is_usable] ( identifier[self] , identifier[current_ip] ):
literal[string]
keyword[try] :
identifier[ipaddress] . identifier[ip_address] ( identifier[current_ip] )
keyword[except] identifier[ValueError] :
keyword[return] keyword[False]
keyword[if] identifier[current_ip] == identifier[self] . identifier[real_ip] :
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[self] . identifier[_ip_is_safe] ( identifier[current_ip] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def _ip_is_usable(self, current_ip):
"""
Check if the current Tor's IP is usable.
:argument current_ip: current Tor IP
:type current_ip: str
:returns bool
"""
# Consider IP addresses only.
try:
ipaddress.ip_address(current_ip) # depends on [control=['try'], data=[]]
except ValueError:
return False # depends on [control=['except'], data=[]]
# Never use real IP.
if current_ip == self.real_ip:
return False # depends on [control=['if'], data=[]]
# Do dot allow IP reuse.
if not self._ip_is_safe(current_ip):
return False # depends on [control=['if'], data=[]]
return True |
def distance(self, there):
"""
Calculate the distance from this location to there.
Parameters
----------
there : Location
Returns
-------
distance_in_m : float
"""
return haversine_distance((self.latitude, self.longitude),
(there.latitude, there.longitude)) | def function[distance, parameter[self, there]]:
constant[
Calculate the distance from this location to there.
Parameters
----------
there : Location
Returns
-------
distance_in_m : float
]
return[call[name[haversine_distance], parameter[tuple[[<ast.Attribute object at 0x7da18ede4310>, <ast.Attribute object at 0x7da18ede4370>]], tuple[[<ast.Attribute object at 0x7da18ede5a20>, <ast.Attribute object at 0x7da18ede6f80>]]]]] | keyword[def] identifier[distance] ( identifier[self] , identifier[there] ):
literal[string]
keyword[return] identifier[haversine_distance] (( identifier[self] . identifier[latitude] , identifier[self] . identifier[longitude] ),
( identifier[there] . identifier[latitude] , identifier[there] . identifier[longitude] )) | def distance(self, there):
"""
Calculate the distance from this location to there.
Parameters
----------
there : Location
Returns
-------
distance_in_m : float
"""
return haversine_distance((self.latitude, self.longitude), (there.latitude, there.longitude)) |
def temperature(temp: Number, unit: str = 'C') -> str:
"""
Formats a temperature element into a string with both C and F values
Used for both Temp and Dew
Ex: 34°C (93°F)
"""
unit = unit.upper()
if not (temp and unit in ('C', 'F')):
return ''
if unit == 'C':
converted = temp.value * 1.8 + 32
converted = str(int(round(converted))) + '°F' # type: ignore
elif unit == 'F':
converted = (temp.value - 32) / 1.8
converted = str(int(round(converted))) + '°C' # type: ignore
return f'{temp.value}°{unit} ({converted})' | def function[temperature, parameter[temp, unit]]:
constant[
Formats a temperature element into a string with both C and F values
Used for both Temp and Dew
Ex: 34°C (93°F)
]
variable[unit] assign[=] call[name[unit].upper, parameter[]]
if <ast.UnaryOp object at 0x7da2047e8400> begin[:]
return[constant[]]
if compare[name[unit] equal[==] constant[C]] begin[:]
variable[converted] assign[=] binary_operation[binary_operation[name[temp].value * constant[1.8]] + constant[32]]
variable[converted] assign[=] binary_operation[call[name[str], parameter[call[name[int], parameter[call[name[round], parameter[name[converted]]]]]]] + constant[°F]]
return[<ast.JoinedStr object at 0x7da2047ead40>] | keyword[def] identifier[temperature] ( identifier[temp] : identifier[Number] , identifier[unit] : identifier[str] = literal[string] )-> identifier[str] :
literal[string]
identifier[unit] = identifier[unit] . identifier[upper] ()
keyword[if] keyword[not] ( identifier[temp] keyword[and] identifier[unit] keyword[in] ( literal[string] , literal[string] )):
keyword[return] literal[string]
keyword[if] identifier[unit] == literal[string] :
identifier[converted] = identifier[temp] . identifier[value] * literal[int] + literal[int]
identifier[converted] = identifier[str] ( identifier[int] ( identifier[round] ( identifier[converted] )))+ literal[string]
keyword[elif] identifier[unit] == literal[string] :
identifier[converted] =( identifier[temp] . identifier[value] - literal[int] )/ literal[int]
identifier[converted] = identifier[str] ( identifier[int] ( identifier[round] ( identifier[converted] )))+ literal[string]
keyword[return] literal[string] | def temperature(temp: Number, unit: str='C') -> str:
"""
Formats a temperature element into a string with both C and F values
Used for both Temp and Dew
Ex: 34°C (93°F)
"""
unit = unit.upper()
if not (temp and unit in ('C', 'F')):
return '' # depends on [control=['if'], data=[]]
if unit == 'C':
converted = temp.value * 1.8 + 32
converted = str(int(round(converted))) + '°F' # type: ignore # depends on [control=['if'], data=[]]
elif unit == 'F':
converted = (temp.value - 32) / 1.8
converted = str(int(round(converted))) + '°C' # type: ignore # depends on [control=['if'], data=[]]
return f'{temp.value}°{unit} ({converted})' |
def _area_is_empty(self, screen, write_position):
"""
Return True when the area below the write position is still empty.
(For floats that should not hide content underneath.)
"""
wp = write_position
Transparent = Token.Transparent
for y in range(wp.ypos, wp.ypos + wp.height):
if y in screen.data_buffer:
row = screen.data_buffer[y]
for x in range(wp.xpos, wp.xpos + wp.width):
c = row[x]
if c.char != ' ' or c.token != Transparent:
return False
return True | def function[_area_is_empty, parameter[self, screen, write_position]]:
constant[
Return True when the area below the write position is still empty.
(For floats that should not hide content underneath.)
]
variable[wp] assign[=] name[write_position]
variable[Transparent] assign[=] name[Token].Transparent
for taget[name[y]] in starred[call[name[range], parameter[name[wp].ypos, binary_operation[name[wp].ypos + name[wp].height]]]] begin[:]
if compare[name[y] in name[screen].data_buffer] begin[:]
variable[row] assign[=] call[name[screen].data_buffer][name[y]]
for taget[name[x]] in starred[call[name[range], parameter[name[wp].xpos, binary_operation[name[wp].xpos + name[wp].width]]]] begin[:]
variable[c] assign[=] call[name[row]][name[x]]
if <ast.BoolOp object at 0x7da204344dc0> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[_area_is_empty] ( identifier[self] , identifier[screen] , identifier[write_position] ):
literal[string]
identifier[wp] = identifier[write_position]
identifier[Transparent] = identifier[Token] . identifier[Transparent]
keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[wp] . identifier[ypos] , identifier[wp] . identifier[ypos] + identifier[wp] . identifier[height] ):
keyword[if] identifier[y] keyword[in] identifier[screen] . identifier[data_buffer] :
identifier[row] = identifier[screen] . identifier[data_buffer] [ identifier[y] ]
keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[wp] . identifier[xpos] , identifier[wp] . identifier[xpos] + identifier[wp] . identifier[width] ):
identifier[c] = identifier[row] [ identifier[x] ]
keyword[if] identifier[c] . identifier[char] != literal[string] keyword[or] identifier[c] . identifier[token] != identifier[Transparent] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def _area_is_empty(self, screen, write_position):
"""
Return True when the area below the write position is still empty.
(For floats that should not hide content underneath.)
"""
wp = write_position
Transparent = Token.Transparent
for y in range(wp.ypos, wp.ypos + wp.height):
if y in screen.data_buffer:
row = screen.data_buffer[y]
for x in range(wp.xpos, wp.xpos + wp.width):
c = row[x]
if c.char != ' ' or c.token != Transparent:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=['y']] # depends on [control=['for'], data=['y']]
return True |
def find_primitive(self):
"""
Find a primitive version of the unit cell.
Returns:
A primitive cell in the input cell is searched and returned
as an Structure object. If no primitive cell is found, None is
returned.
"""
lattice, scaled_positions, numbers = spglib.find_primitive(
self._cell, symprec=self._symprec)
species = [self._unique_species[i - 1] for i in numbers]
return Structure(lattice, species, scaled_positions,
to_unit_cell=True).get_reduced_structure() | def function[find_primitive, parameter[self]]:
constant[
Find a primitive version of the unit cell.
Returns:
A primitive cell in the input cell is searched and returned
as an Structure object. If no primitive cell is found, None is
returned.
]
<ast.Tuple object at 0x7da1b1c5b850> assign[=] call[name[spglib].find_primitive, parameter[name[self]._cell]]
variable[species] assign[=] <ast.ListComp object at 0x7da1b1c59b70>
return[call[call[name[Structure], parameter[name[lattice], name[species], name[scaled_positions]]].get_reduced_structure, parameter[]]] | keyword[def] identifier[find_primitive] ( identifier[self] ):
literal[string]
identifier[lattice] , identifier[scaled_positions] , identifier[numbers] = identifier[spglib] . identifier[find_primitive] (
identifier[self] . identifier[_cell] , identifier[symprec] = identifier[self] . identifier[_symprec] )
identifier[species] =[ identifier[self] . identifier[_unique_species] [ identifier[i] - literal[int] ] keyword[for] identifier[i] keyword[in] identifier[numbers] ]
keyword[return] identifier[Structure] ( identifier[lattice] , identifier[species] , identifier[scaled_positions] ,
identifier[to_unit_cell] = keyword[True] ). identifier[get_reduced_structure] () | def find_primitive(self):
"""
Find a primitive version of the unit cell.
Returns:
A primitive cell in the input cell is searched and returned
as an Structure object. If no primitive cell is found, None is
returned.
"""
(lattice, scaled_positions, numbers) = spglib.find_primitive(self._cell, symprec=self._symprec)
species = [self._unique_species[i - 1] for i in numbers]
return Structure(lattice, species, scaled_positions, to_unit_cell=True).get_reduced_structure() |
def create(cidr_block, instance_tenancy=None, vpc_name=None,
enable_dns_support=None, enable_dns_hostnames=None, tags=None,
region=None, key=None, keyid=None, profile=None):
'''
Given a valid CIDR block, create a VPC.
An optional instance_tenancy argument can be provided. If provided, the
valid values are 'default' or 'dedicated'
An optional vpc_name argument can be provided.
Returns {created: true} if the VPC was created and returns
{created: False} if the VPC was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.create '10.0.0.0/24'
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
vpc = conn.create_vpc(cidr_block, instance_tenancy=instance_tenancy)
if vpc:
log.info('The newly created VPC id is %s', vpc.id)
_maybe_set_name_tag(vpc_name, vpc)
_maybe_set_tags(tags, vpc)
_maybe_set_dns(conn, vpc.id, enable_dns_support, enable_dns_hostnames)
_maybe_name_route_table(conn, vpc.id, vpc_name)
if vpc_name:
_cache_id(vpc_name, vpc.id,
region=region, key=key,
keyid=keyid, profile=profile)
return {'created': True, 'id': vpc.id}
else:
log.warning('VPC was not created')
return {'created': False}
except BotoServerError as e:
return {'created': False, 'error': __utils__['boto.get_error'](e)} | def function[create, parameter[cidr_block, instance_tenancy, vpc_name, enable_dns_support, enable_dns_hostnames, tags, region, key, keyid, profile]]:
constant[
Given a valid CIDR block, create a VPC.
An optional instance_tenancy argument can be provided. If provided, the
valid values are 'default' or 'dedicated'
An optional vpc_name argument can be provided.
Returns {created: true} if the VPC was created and returns
{created: False} if the VPC was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.create '10.0.0.0/24'
]
<ast.Try object at 0x7da20c6a8c10> | keyword[def] identifier[create] ( identifier[cidr_block] , identifier[instance_tenancy] = keyword[None] , identifier[vpc_name] = keyword[None] ,
identifier[enable_dns_support] = keyword[None] , identifier[enable_dns_hostnames] = keyword[None] , identifier[tags] = keyword[None] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
keyword[try] :
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
identifier[vpc] = identifier[conn] . identifier[create_vpc] ( identifier[cidr_block] , identifier[instance_tenancy] = identifier[instance_tenancy] )
keyword[if] identifier[vpc] :
identifier[log] . identifier[info] ( literal[string] , identifier[vpc] . identifier[id] )
identifier[_maybe_set_name_tag] ( identifier[vpc_name] , identifier[vpc] )
identifier[_maybe_set_tags] ( identifier[tags] , identifier[vpc] )
identifier[_maybe_set_dns] ( identifier[conn] , identifier[vpc] . identifier[id] , identifier[enable_dns_support] , identifier[enable_dns_hostnames] )
identifier[_maybe_name_route_table] ( identifier[conn] , identifier[vpc] . identifier[id] , identifier[vpc_name] )
keyword[if] identifier[vpc_name] :
identifier[_cache_id] ( identifier[vpc_name] , identifier[vpc] . identifier[id] ,
identifier[region] = identifier[region] , identifier[key] = identifier[key] ,
identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[return] { literal[string] : keyword[True] , literal[string] : identifier[vpc] . identifier[id] }
keyword[else] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[return] { literal[string] : keyword[False] }
keyword[except] identifier[BotoServerError] keyword[as] identifier[e] :
keyword[return] { literal[string] : keyword[False] , literal[string] : identifier[__utils__] [ literal[string] ]( identifier[e] )} | def create(cidr_block, instance_tenancy=None, vpc_name=None, enable_dns_support=None, enable_dns_hostnames=None, tags=None, region=None, key=None, keyid=None, profile=None):
"""
Given a valid CIDR block, create a VPC.
An optional instance_tenancy argument can be provided. If provided, the
valid values are 'default' or 'dedicated'
An optional vpc_name argument can be provided.
Returns {created: true} if the VPC was created and returns
{created: False} if the VPC was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.create '10.0.0.0/24'
"""
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
vpc = conn.create_vpc(cidr_block, instance_tenancy=instance_tenancy)
if vpc:
log.info('The newly created VPC id is %s', vpc.id)
_maybe_set_name_tag(vpc_name, vpc)
_maybe_set_tags(tags, vpc)
_maybe_set_dns(conn, vpc.id, enable_dns_support, enable_dns_hostnames)
_maybe_name_route_table(conn, vpc.id, vpc_name)
if vpc_name:
_cache_id(vpc_name, vpc.id, region=region, key=key, keyid=keyid, profile=profile) # depends on [control=['if'], data=[]]
return {'created': True, 'id': vpc.id} # depends on [control=['if'], data=[]]
else:
log.warning('VPC was not created')
return {'created': False} # depends on [control=['try'], data=[]]
except BotoServerError as e:
return {'created': False, 'error': __utils__['boto.get_error'](e)} # depends on [control=['except'], data=['e']] |
def _parse_requestline(line):
"""
http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5
>>> Entry._parse_requestline('GET / HTTP/1.0') == ('GET', '/', '1.0')
True
>>> Entry._parse_requestline('post /testurl htTP/1.1') == ('POST', '/testurl', '1.1')
True
>>> Entry._parse_requestline('Im not a RequestLine')
Traceback (most recent call last):
...
ValueError: Not a Request-Line
"""
m = re.match(r'({})\s+(.*)\s+HTTP/(1.[0|1])'.format('|'.join(Entry.METHODS)), line, re.I)
if m:
return m.group(1).upper(), m.group(2), m.group(3)
else:
raise ValueError('Not a Request-Line') | def function[_parse_requestline, parameter[line]]:
constant[
http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5
>>> Entry._parse_requestline('GET / HTTP/1.0') == ('GET', '/', '1.0')
True
>>> Entry._parse_requestline('post /testurl htTP/1.1') == ('POST', '/testurl', '1.1')
True
>>> Entry._parse_requestline('Im not a RequestLine')
Traceback (most recent call last):
...
ValueError: Not a Request-Line
]
variable[m] assign[=] call[name[re].match, parameter[call[constant[({})\s+(.*)\s+HTTP/(1.[0|1])].format, parameter[call[constant[|].join, parameter[name[Entry].METHODS]]]], name[line], name[re].I]]
if name[m] begin[:]
return[tuple[[<ast.Call object at 0x7da18ede7730>, <ast.Call object at 0x7da18ede7070>, <ast.Call object at 0x7da18ede4b80>]]] | keyword[def] identifier[_parse_requestline] ( identifier[line] ):
literal[string]
identifier[m] = identifier[re] . identifier[match] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[Entry] . identifier[METHODS] )), identifier[line] , identifier[re] . identifier[I] )
keyword[if] identifier[m] :
keyword[return] identifier[m] . identifier[group] ( literal[int] ). identifier[upper] (), identifier[m] . identifier[group] ( literal[int] ), identifier[m] . identifier[group] ( literal[int] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def _parse_requestline(line):
"""
http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5
>>> Entry._parse_requestline('GET / HTTP/1.0') == ('GET', '/', '1.0')
True
>>> Entry._parse_requestline('post /testurl htTP/1.1') == ('POST', '/testurl', '1.1')
True
>>> Entry._parse_requestline('Im not a RequestLine')
Traceback (most recent call last):
...
ValueError: Not a Request-Line
"""
m = re.match('({})\\s+(.*)\\s+HTTP/(1.[0|1])'.format('|'.join(Entry.METHODS)), line, re.I)
if m:
return (m.group(1).upper(), m.group(2), m.group(3)) # depends on [control=['if'], data=[]]
else:
raise ValueError('Not a Request-Line') |
def on_error_close(logger):
"""
Decorator for callback methods that implement `IProtocol`.
Any uncaught exception is logged and the connection is closed
forcefully.
Usage::
import logger
logger = logging.getLogger(__name__)
class MyProtocol(Protocol):
@on_error_close(logger.error)
def connectionMade():
...
The argument passed to `on_error_close` will be invoked with a
string message.
The motivation behind this decorator is as follows:
Due to bugs it sometimes happens that exceptions are thrown out out
callback methods in protocols. Twisted ignores them, at best they
are logged. This is always a bug, as errors should be handled in the
callback and not let to continue up the call stack. As such, the
behaviour after this occured is typically not well defined and
unpredictable.
A well made protocol implementation can handle unexpected connection
losses as they may occur at any time in a real world environment.
By closing the connection, there is a certain chance
that we enter a code path that can recover, or at least gracefully
cleanup.
In my experience, this often means that unit-tests fail with a more
useful error message. Without it, I sometimes get the case that a
unit-test (or even the final application) just blocks forever
with no information on what is going wrong.
"""
def make_wrapper(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
d = defer.maybeDeferred(func, self, *args, **kwargs)
def on_error(err):
logger("Unhandled failure in %r:%s" % (func, err. getTraceback()))
if hasattr(self, "transport"):
if hasattr(self.transport, "abortConnection"):
self.transport.abortConnection()
elif hasattr(self.transport, "loseConnection"):
self.transport.loseConnection()
d.addErrback(on_error)
return wrapper
return make_wrapper | def function[on_error_close, parameter[logger]]:
constant[
Decorator for callback methods that implement `IProtocol`.
Any uncaught exception is logged and the connection is closed
forcefully.
Usage::
import logger
logger = logging.getLogger(__name__)
class MyProtocol(Protocol):
@on_error_close(logger.error)
def connectionMade():
...
The argument passed to `on_error_close` will be invoked with a
string message.
The motivation behind this decorator is as follows:
Due to bugs it sometimes happens that exceptions are thrown out out
callback methods in protocols. Twisted ignores them, at best they
are logged. This is always a bug, as errors should be handled in the
callback and not let to continue up the call stack. As such, the
behaviour after this occured is typically not well defined and
unpredictable.
A well made protocol implementation can handle unexpected connection
losses as they may occur at any time in a real world environment.
By closing the connection, there is a certain chance
that we enter a code path that can recover, or at least gracefully
cleanup.
In my experience, this often means that unit-tests fail with a more
useful error message. Without it, I sometimes get the case that a
unit-test (or even the final application) just blocks forever
with no information on what is going wrong.
]
def function[make_wrapper, parameter[func]]:
def function[wrapper, parameter[self]]:
variable[d] assign[=] call[name[defer].maybeDeferred, parameter[name[func], name[self], <ast.Starred object at 0x7da1b143f100>]]
def function[on_error, parameter[err]]:
call[name[logger], parameter[binary_operation[constant[Unhandled failure in %r:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b143fe20>, <ast.Call object at 0x7da1b143dba0>]]]]]
if call[name[hasattr], parameter[name[self], constant[transport]]] begin[:]
if call[name[hasattr], parameter[name[self].transport, constant[abortConnection]]] begin[:]
call[name[self].transport.abortConnection, parameter[]]
call[name[d].addErrback, parameter[name[on_error]]]
return[name[wrapper]]
return[name[make_wrapper]] | keyword[def] identifier[on_error_close] ( identifier[logger] ):
literal[string]
keyword[def] identifier[make_wrapper] ( identifier[func] ):
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
identifier[d] = identifier[defer] . identifier[maybeDeferred] ( identifier[func] , identifier[self] ,* identifier[args] ,** identifier[kwargs] )
keyword[def] identifier[on_error] ( identifier[err] ):
identifier[logger] ( literal[string] %( identifier[func] , identifier[err] . identifier[getTraceback] ()))
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[if] identifier[hasattr] ( identifier[self] . identifier[transport] , literal[string] ):
identifier[self] . identifier[transport] . identifier[abortConnection] ()
keyword[elif] identifier[hasattr] ( identifier[self] . identifier[transport] , literal[string] ):
identifier[self] . identifier[transport] . identifier[loseConnection] ()
identifier[d] . identifier[addErrback] ( identifier[on_error] )
keyword[return] identifier[wrapper]
keyword[return] identifier[make_wrapper] | def on_error_close(logger):
"""
Decorator for callback methods that implement `IProtocol`.
Any uncaught exception is logged and the connection is closed
forcefully.
Usage::
import logger
logger = logging.getLogger(__name__)
class MyProtocol(Protocol):
@on_error_close(logger.error)
def connectionMade():
...
The argument passed to `on_error_close` will be invoked with a
string message.
The motivation behind this decorator is as follows:
Due to bugs it sometimes happens that exceptions are thrown out out
callback methods in protocols. Twisted ignores them, at best they
are logged. This is always a bug, as errors should be handled in the
callback and not let to continue up the call stack. As such, the
behaviour after this occured is typically not well defined and
unpredictable.
A well made protocol implementation can handle unexpected connection
losses as they may occur at any time in a real world environment.
By closing the connection, there is a certain chance
that we enter a code path that can recover, or at least gracefully
cleanup.
In my experience, this often means that unit-tests fail with a more
useful error message. Without it, I sometimes get the case that a
unit-test (or even the final application) just blocks forever
with no information on what is going wrong.
"""
def make_wrapper(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
d = defer.maybeDeferred(func, self, *args, **kwargs)
def on_error(err):
logger('Unhandled failure in %r:%s' % (func, err.getTraceback()))
if hasattr(self, 'transport'):
if hasattr(self.transport, 'abortConnection'):
self.transport.abortConnection() # depends on [control=['if'], data=[]]
elif hasattr(self.transport, 'loseConnection'):
self.transport.loseConnection() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
d.addErrback(on_error)
return wrapper
return make_wrapper |
def update(desc, other=None, **kwds):
"""Update sections in a Project description"""
other = other and _as_dict(other) or {}
for i in other, kwds:
for k, v in i.items():
if isinstance(v, dict):
# Only for dicts, merge instead of overwriting
old_v = desc[k]
for k2, v2 in v.items():
if v2 is None:
old_v.pop(k2, None)
else:
old_v[k2] = v2
else:
set_one(desc, k, v) | def function[update, parameter[desc, other]]:
constant[Update sections in a Project description]
variable[other] assign[=] <ast.BoolOp object at 0x7da1b013dc60>
for taget[name[i]] in starred[tuple[[<ast.Name object at 0x7da1b013c790>, <ast.Name object at 0x7da1b013d570>]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b013cb20>, <ast.Name object at 0x7da1b013d8d0>]]] in starred[call[name[i].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[v], name[dict]]] begin[:]
variable[old_v] assign[=] call[name[desc]][name[k]]
for taget[tuple[[<ast.Name object at 0x7da1b013c220>, <ast.Name object at 0x7da1b013e950>]]] in starred[call[name[v].items, parameter[]]] begin[:]
if compare[name[v2] is constant[None]] begin[:]
call[name[old_v].pop, parameter[name[k2], constant[None]]] | keyword[def] identifier[update] ( identifier[desc] , identifier[other] = keyword[None] ,** identifier[kwds] ):
literal[string]
identifier[other] = identifier[other] keyword[and] identifier[_as_dict] ( identifier[other] ) keyword[or] {}
keyword[for] identifier[i] keyword[in] identifier[other] , identifier[kwds] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[i] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[v] , identifier[dict] ):
identifier[old_v] = identifier[desc] [ identifier[k] ]
keyword[for] identifier[k2] , identifier[v2] keyword[in] identifier[v] . identifier[items] ():
keyword[if] identifier[v2] keyword[is] keyword[None] :
identifier[old_v] . identifier[pop] ( identifier[k2] , keyword[None] )
keyword[else] :
identifier[old_v] [ identifier[k2] ]= identifier[v2]
keyword[else] :
identifier[set_one] ( identifier[desc] , identifier[k] , identifier[v] ) | def update(desc, other=None, **kwds):
"""Update sections in a Project description"""
other = other and _as_dict(other) or {}
for i in (other, kwds):
for (k, v) in i.items():
if isinstance(v, dict):
# Only for dicts, merge instead of overwriting
old_v = desc[k]
for (k2, v2) in v.items():
if v2 is None:
old_v.pop(k2, None) # depends on [control=['if'], data=[]]
else:
old_v[k2] = v2 # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
set_one(desc, k, v) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['i']] |
def _ipv4_text_to_int(self, ip_text):
"""convert ip v4 string to integer."""
if ip_text is None:
return None
assert isinstance(ip_text, str)
return struct.unpack('!I', addrconv.ipv4.text_to_bin(ip_text))[0] | def function[_ipv4_text_to_int, parameter[self, ip_text]]:
constant[convert ip v4 string to integer.]
if compare[name[ip_text] is constant[None]] begin[:]
return[constant[None]]
assert[call[name[isinstance], parameter[name[ip_text], name[str]]]]
return[call[call[name[struct].unpack, parameter[constant[!I], call[name[addrconv].ipv4.text_to_bin, parameter[name[ip_text]]]]]][constant[0]]] | keyword[def] identifier[_ipv4_text_to_int] ( identifier[self] , identifier[ip_text] ):
literal[string]
keyword[if] identifier[ip_text] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[assert] identifier[isinstance] ( identifier[ip_text] , identifier[str] )
keyword[return] identifier[struct] . identifier[unpack] ( literal[string] , identifier[addrconv] . identifier[ipv4] . identifier[text_to_bin] ( identifier[ip_text] ))[ literal[int] ] | def _ipv4_text_to_int(self, ip_text):
"""convert ip v4 string to integer."""
if ip_text is None:
return None # depends on [control=['if'], data=[]]
assert isinstance(ip_text, str)
return struct.unpack('!I', addrconv.ipv4.text_to_bin(ip_text))[0] |
def get_client_identity_from_certificate(certificate):
"""
Given an X.509 certificate, extract and return the client identity.
"""
client_ids = get_common_names_from_certificate(certificate)
if len(client_ids) > 0:
if len(client_ids) > 1:
raise exceptions.PermissionDenied(
"Multiple client identities found."
)
return client_ids[0]
else:
raise exceptions.PermissionDenied(
"The certificate does not define any subject common names. "
"Client identity unavailable."
) | def function[get_client_identity_from_certificate, parameter[certificate]]:
constant[
Given an X.509 certificate, extract and return the client identity.
]
variable[client_ids] assign[=] call[name[get_common_names_from_certificate], parameter[name[certificate]]]
if compare[call[name[len], parameter[name[client_ids]]] greater[>] constant[0]] begin[:]
if compare[call[name[len], parameter[name[client_ids]]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da1b0294430>
return[call[name[client_ids]][constant[0]]] | keyword[def] identifier[get_client_identity_from_certificate] ( identifier[certificate] ):
literal[string]
identifier[client_ids] = identifier[get_common_names_from_certificate] ( identifier[certificate] )
keyword[if] identifier[len] ( identifier[client_ids] )> literal[int] :
keyword[if] identifier[len] ( identifier[client_ids] )> literal[int] :
keyword[raise] identifier[exceptions] . identifier[PermissionDenied] (
literal[string]
)
keyword[return] identifier[client_ids] [ literal[int] ]
keyword[else] :
keyword[raise] identifier[exceptions] . identifier[PermissionDenied] (
literal[string]
literal[string]
) | def get_client_identity_from_certificate(certificate):
"""
Given an X.509 certificate, extract and return the client identity.
"""
client_ids = get_common_names_from_certificate(certificate)
if len(client_ids) > 0:
if len(client_ids) > 1:
raise exceptions.PermissionDenied('Multiple client identities found.') # depends on [control=['if'], data=[]]
return client_ids[0] # depends on [control=['if'], data=[]]
else:
raise exceptions.PermissionDenied('The certificate does not define any subject common names. Client identity unavailable.') |
def crop_image(image, threshold):
"""
Найти непрозрачную область на изображении и вырезать её
:param image: Изображение
:param threshold: Порог прозрачности для обрезания
:return: cropped_image - вырезанное изображение
x, y, width, height - координаты и размер вырезаннго прямоугольника
"""
cropper = CropTransparent(image.width(), image.height(), threshold, str(image.constBits()))
x = cropper.getCroppedOffsetX()
y = cropper.getCroppedOffsetY()
width = cropper.getCroppedWidth()
height = cropper.getCroppedHeight()
cropped_image = image.copy(x, y, width, height)
return cropped_image, x, y, width, height | def function[crop_image, parameter[image, threshold]]:
constant[
Найти непрозрачную область на изображении и вырезать её
:param image: Изображение
:param threshold: Порог прозрачности для обрезания
:return: cropped_image - вырезанное изображение
x, y, width, height - координаты и размер вырезаннго прямоугольника
]
variable[cropper] assign[=] call[name[CropTransparent], parameter[call[name[image].width, parameter[]], call[name[image].height, parameter[]], name[threshold], call[name[str], parameter[call[name[image].constBits, parameter[]]]]]]
variable[x] assign[=] call[name[cropper].getCroppedOffsetX, parameter[]]
variable[y] assign[=] call[name[cropper].getCroppedOffsetY, parameter[]]
variable[width] assign[=] call[name[cropper].getCroppedWidth, parameter[]]
variable[height] assign[=] call[name[cropper].getCroppedHeight, parameter[]]
variable[cropped_image] assign[=] call[name[image].copy, parameter[name[x], name[y], name[width], name[height]]]
return[tuple[[<ast.Name object at 0x7da1b14d1e10>, <ast.Name object at 0x7da1b14d2050>, <ast.Name object at 0x7da1b14d2e60>, <ast.Name object at 0x7da1b14d06d0>, <ast.Name object at 0x7da1b14d1c00>]]] | keyword[def] identifier[crop_image] ( identifier[image] , identifier[threshold] ):
literal[string]
identifier[cropper] = identifier[CropTransparent] ( identifier[image] . identifier[width] (), identifier[image] . identifier[height] (), identifier[threshold] , identifier[str] ( identifier[image] . identifier[constBits] ()))
identifier[x] = identifier[cropper] . identifier[getCroppedOffsetX] ()
identifier[y] = identifier[cropper] . identifier[getCroppedOffsetY] ()
identifier[width] = identifier[cropper] . identifier[getCroppedWidth] ()
identifier[height] = identifier[cropper] . identifier[getCroppedHeight] ()
identifier[cropped_image] = identifier[image] . identifier[copy] ( identifier[x] , identifier[y] , identifier[width] , identifier[height] )
keyword[return] identifier[cropped_image] , identifier[x] , identifier[y] , identifier[width] , identifier[height] | def crop_image(image, threshold):
"""
Найти непрозрачную область на изображении и вырезать её
:param image: Изображение
:param threshold: Порог прозрачности для обрезания
:return: cropped_image - вырезанное изображение
x, y, width, height - координаты и размер вырезаннго прямоугольника
"""
cropper = CropTransparent(image.width(), image.height(), threshold, str(image.constBits()))
x = cropper.getCroppedOffsetX()
y = cropper.getCroppedOffsetY()
width = cropper.getCroppedWidth()
height = cropper.getCroppedHeight()
cropped_image = image.copy(x, y, width, height)
return (cropped_image, x, y, width, height) |
def _check_unmask(name, unmask, unmask_runtime, root=None):
'''
Common code for conditionally removing masks before making changes to a
service's state.
'''
if unmask:
unmask_(name, runtime=False, root=root)
if unmask_runtime:
unmask_(name, runtime=True, root=root) | def function[_check_unmask, parameter[name, unmask, unmask_runtime, root]]:
constant[
Common code for conditionally removing masks before making changes to a
service's state.
]
if name[unmask] begin[:]
call[name[unmask_], parameter[name[name]]]
if name[unmask_runtime] begin[:]
call[name[unmask_], parameter[name[name]]] | keyword[def] identifier[_check_unmask] ( identifier[name] , identifier[unmask] , identifier[unmask_runtime] , identifier[root] = keyword[None] ):
literal[string]
keyword[if] identifier[unmask] :
identifier[unmask_] ( identifier[name] , identifier[runtime] = keyword[False] , identifier[root] = identifier[root] )
keyword[if] identifier[unmask_runtime] :
identifier[unmask_] ( identifier[name] , identifier[runtime] = keyword[True] , identifier[root] = identifier[root] ) | def _check_unmask(name, unmask, unmask_runtime, root=None):
"""
Common code for conditionally removing masks before making changes to a
service's state.
"""
if unmask:
unmask_(name, runtime=False, root=root) # depends on [control=['if'], data=[]]
if unmask_runtime:
unmask_(name, runtime=True, root=root) # depends on [control=['if'], data=[]] |
def base64(self, charset=None):
'''Data encoded as base 64'''
return b64encode(self.bytes()).decode(charset or self.charset) | def function[base64, parameter[self, charset]]:
constant[Data encoded as base 64]
return[call[call[name[b64encode], parameter[call[name[self].bytes, parameter[]]]].decode, parameter[<ast.BoolOp object at 0x7da204564c70>]]] | keyword[def] identifier[base64] ( identifier[self] , identifier[charset] = keyword[None] ):
literal[string]
keyword[return] identifier[b64encode] ( identifier[self] . identifier[bytes] ()). identifier[decode] ( identifier[charset] keyword[or] identifier[self] . identifier[charset] ) | def base64(self, charset=None):
"""Data encoded as base 64"""
return b64encode(self.bytes()).decode(charset or self.charset) |
def node_number(self, *, count_pnode=True) -> int:
"""Return the number of node"""
return (sum(1 for n in self.nodes())
+ (sum(1 for n in self.powernodes()) if count_pnode else 0)) | def function[node_number, parameter[self]]:
constant[Return the number of node]
return[binary_operation[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da207f9b1f0>]] + <ast.IfExp object at 0x7da207f99420>]] | keyword[def] identifier[node_number] ( identifier[self] ,*, identifier[count_pnode] = keyword[True] )-> identifier[int] :
literal[string]
keyword[return] ( identifier[sum] ( literal[int] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[nodes] ())
+( identifier[sum] ( literal[int] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[powernodes] ()) keyword[if] identifier[count_pnode] keyword[else] literal[int] )) | def node_number(self, *, count_pnode=True) -> int:
"""Return the number of node"""
return sum((1 for n in self.nodes())) + (sum((1 for n in self.powernodes())) if count_pnode else 0) |
def optimizeTraitByTrait(self, G, verbose=False, n_times=10, factr=1e3):
""" Optimize trait by trait """
assert self.nullST is not None, 'fit null model beforehand'
RV = {}
self.infoOptST = {}
for p in range(self.P):
trait_id = self.traitID[p]
self.stSet.Y = self.Y[:, p:p+1]
self.stSet.setNull(self.nullST[trait_id])
RV[trait_id] = self.stSet.optimize(G, n_times=n_times, factr=factr, verbose=verbose)
self.infoOptST[trait_id] = self.stSet.getInfoOpt()
return RV | def function[optimizeTraitByTrait, parameter[self, G, verbose, n_times, factr]]:
constant[ Optimize trait by trait ]
assert[compare[name[self].nullST is_not constant[None]]]
variable[RV] assign[=] dictionary[[], []]
name[self].infoOptST assign[=] dictionary[[], []]
for taget[name[p]] in starred[call[name[range], parameter[name[self].P]]] begin[:]
variable[trait_id] assign[=] call[name[self].traitID][name[p]]
name[self].stSet.Y assign[=] call[name[self].Y][tuple[[<ast.Slice object at 0x7da20e74b070>, <ast.Slice object at 0x7da20e74beb0>]]]
call[name[self].stSet.setNull, parameter[call[name[self].nullST][name[trait_id]]]]
call[name[RV]][name[trait_id]] assign[=] call[name[self].stSet.optimize, parameter[name[G]]]
call[name[self].infoOptST][name[trait_id]] assign[=] call[name[self].stSet.getInfoOpt, parameter[]]
return[name[RV]] | keyword[def] identifier[optimizeTraitByTrait] ( identifier[self] , identifier[G] , identifier[verbose] = keyword[False] , identifier[n_times] = literal[int] , identifier[factr] = literal[int] ):
literal[string]
keyword[assert] identifier[self] . identifier[nullST] keyword[is] keyword[not] keyword[None] , literal[string]
identifier[RV] ={}
identifier[self] . identifier[infoOptST] ={}
keyword[for] identifier[p] keyword[in] identifier[range] ( identifier[self] . identifier[P] ):
identifier[trait_id] = identifier[self] . identifier[traitID] [ identifier[p] ]
identifier[self] . identifier[stSet] . identifier[Y] = identifier[self] . identifier[Y] [:, identifier[p] : identifier[p] + literal[int] ]
identifier[self] . identifier[stSet] . identifier[setNull] ( identifier[self] . identifier[nullST] [ identifier[trait_id] ])
identifier[RV] [ identifier[trait_id] ]= identifier[self] . identifier[stSet] . identifier[optimize] ( identifier[G] , identifier[n_times] = identifier[n_times] , identifier[factr] = identifier[factr] , identifier[verbose] = identifier[verbose] )
identifier[self] . identifier[infoOptST] [ identifier[trait_id] ]= identifier[self] . identifier[stSet] . identifier[getInfoOpt] ()
keyword[return] identifier[RV] | def optimizeTraitByTrait(self, G, verbose=False, n_times=10, factr=1000.0):
""" Optimize trait by trait """
assert self.nullST is not None, 'fit null model beforehand'
RV = {}
self.infoOptST = {}
for p in range(self.P):
trait_id = self.traitID[p]
self.stSet.Y = self.Y[:, p:p + 1]
self.stSet.setNull(self.nullST[trait_id])
RV[trait_id] = self.stSet.optimize(G, n_times=n_times, factr=factr, verbose=verbose)
self.infoOptST[trait_id] = self.stSet.getInfoOpt() # depends on [control=['for'], data=['p']]
return RV |
def get_posts_tags(subscribers, object_list, feed, tag_name):
'''Adds a qtags property in every post object in a page.
Use "qtags" instead of "tags" in templates to avoid unnecesary DB hits.'''
tagd = dict()
user_obj = None
tag_obj = None
tags = models.Tag.objects.extra(
select=dict(post_id='{0}.{1}'.format(
*it.imap( connection.ops.quote_name,
('feedjack_post_tags', 'post_id') ) )),
tables=['feedjack_post_tags'],
where=[
'{0}.{1}={2}.{3}'.format(*it.imap( connection.ops.quote_name,
('feedjack_tag', 'id', 'feedjack_post_tags', 'tag_id') )),
'{0}.{1} IN ({2})'.format(
connection.ops.quote_name('feedjack_post_tags'),
connection.ops.quote_name('post_id'),
', '.join([str(post.id) for post in object_list]) ) ] )
for tag in tags:
if tag.post_id not in tagd: tagd[tag.post_id] = list()
tagd[tag.post_id].append(tag)
if tag_name and tag.name == tag_name: tag_obj = tag
subd = dict()
for sub in subscribers: subd[sub.feed.id] = sub
for post in object_list:
if post.id in tagd: post.qtags = tagd[post.id]
else: post.qtags = list()
post.subscriber = subd[post.feed.id]
if feed == post.feed: user_obj = post.subscriber
return user_obj, tag_obj | def function[get_posts_tags, parameter[subscribers, object_list, feed, tag_name]]:
constant[Adds a qtags property in every post object in a page.
Use "qtags" instead of "tags" in templates to avoid unnecesary DB hits.]
variable[tagd] assign[=] call[name[dict], parameter[]]
variable[user_obj] assign[=] constant[None]
variable[tag_obj] assign[=] constant[None]
variable[tags] assign[=] call[name[models].Tag.objects.extra, parameter[]]
for taget[name[tag]] in starred[name[tags]] begin[:]
if compare[name[tag].post_id <ast.NotIn object at 0x7da2590d7190> name[tagd]] begin[:]
call[name[tagd]][name[tag].post_id] assign[=] call[name[list], parameter[]]
call[call[name[tagd]][name[tag].post_id].append, parameter[name[tag]]]
if <ast.BoolOp object at 0x7da18f00cc40> begin[:]
variable[tag_obj] assign[=] name[tag]
variable[subd] assign[=] call[name[dict], parameter[]]
for taget[name[sub]] in starred[name[subscribers]] begin[:]
call[name[subd]][name[sub].feed.id] assign[=] name[sub]
for taget[name[post]] in starred[name[object_list]] begin[:]
if compare[name[post].id in name[tagd]] begin[:]
name[post].qtags assign[=] call[name[tagd]][name[post].id]
name[post].subscriber assign[=] call[name[subd]][name[post].feed.id]
if compare[name[feed] equal[==] name[post].feed] begin[:]
variable[user_obj] assign[=] name[post].subscriber
return[tuple[[<ast.Name object at 0x7da18f00d630>, <ast.Name object at 0x7da18f00e0e0>]]] | keyword[def] identifier[get_posts_tags] ( identifier[subscribers] , identifier[object_list] , identifier[feed] , identifier[tag_name] ):
literal[string]
identifier[tagd] = identifier[dict] ()
identifier[user_obj] = keyword[None]
identifier[tag_obj] = keyword[None]
identifier[tags] = identifier[models] . identifier[Tag] . identifier[objects] . identifier[extra] (
identifier[select] = identifier[dict] ( identifier[post_id] = literal[string] . identifier[format] (
* identifier[it] . identifier[imap] ( identifier[connection] . identifier[ops] . identifier[quote_name] ,
( literal[string] , literal[string] )))),
identifier[tables] =[ literal[string] ],
identifier[where] =[
literal[string] . identifier[format] (* identifier[it] . identifier[imap] ( identifier[connection] . identifier[ops] . identifier[quote_name] ,
( literal[string] , literal[string] , literal[string] , literal[string] ))),
literal[string] . identifier[format] (
identifier[connection] . identifier[ops] . identifier[quote_name] ( literal[string] ),
identifier[connection] . identifier[ops] . identifier[quote_name] ( literal[string] ),
literal[string] . identifier[join] ([ identifier[str] ( identifier[post] . identifier[id] ) keyword[for] identifier[post] keyword[in] identifier[object_list] ]))])
keyword[for] identifier[tag] keyword[in] identifier[tags] :
keyword[if] identifier[tag] . identifier[post_id] keyword[not] keyword[in] identifier[tagd] : identifier[tagd] [ identifier[tag] . identifier[post_id] ]= identifier[list] ()
identifier[tagd] [ identifier[tag] . identifier[post_id] ]. identifier[append] ( identifier[tag] )
keyword[if] identifier[tag_name] keyword[and] identifier[tag] . identifier[name] == identifier[tag_name] : identifier[tag_obj] = identifier[tag]
identifier[subd] = identifier[dict] ()
keyword[for] identifier[sub] keyword[in] identifier[subscribers] : identifier[subd] [ identifier[sub] . identifier[feed] . identifier[id] ]= identifier[sub]
keyword[for] identifier[post] keyword[in] identifier[object_list] :
keyword[if] identifier[post] . identifier[id] keyword[in] identifier[tagd] : identifier[post] . identifier[qtags] = identifier[tagd] [ identifier[post] . identifier[id] ]
keyword[else] : identifier[post] . identifier[qtags] = identifier[list] ()
identifier[post] . identifier[subscriber] = identifier[subd] [ identifier[post] . identifier[feed] . identifier[id] ]
keyword[if] identifier[feed] == identifier[post] . identifier[feed] : identifier[user_obj] = identifier[post] . identifier[subscriber]
keyword[return] identifier[user_obj] , identifier[tag_obj] | def get_posts_tags(subscribers, object_list, feed, tag_name):
"""Adds a qtags property in every post object in a page.
Use "qtags" instead of "tags" in templates to avoid unnecesary DB hits."""
tagd = dict()
user_obj = None
tag_obj = None
tags = models.Tag.objects.extra(select=dict(post_id='{0}.{1}'.format(*it.imap(connection.ops.quote_name, ('feedjack_post_tags', 'post_id')))), tables=['feedjack_post_tags'], where=['{0}.{1}={2}.{3}'.format(*it.imap(connection.ops.quote_name, ('feedjack_tag', 'id', 'feedjack_post_tags', 'tag_id'))), '{0}.{1} IN ({2})'.format(connection.ops.quote_name('feedjack_post_tags'), connection.ops.quote_name('post_id'), ', '.join([str(post.id) for post in object_list]))])
for tag in tags:
if tag.post_id not in tagd:
tagd[tag.post_id] = list() # depends on [control=['if'], data=['tagd']]
tagd[tag.post_id].append(tag)
if tag_name and tag.name == tag_name:
tag_obj = tag # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tag']]
subd = dict()
for sub in subscribers:
subd[sub.feed.id] = sub # depends on [control=['for'], data=['sub']]
for post in object_list:
if post.id in tagd:
post.qtags = tagd[post.id] # depends on [control=['if'], data=['tagd']]
else:
post.qtags = list()
post.subscriber = subd[post.feed.id]
if feed == post.feed:
user_obj = post.subscriber # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['post']]
return (user_obj, tag_obj) |
def to_curve_spline(obj):
'''
to_curve_spline(obj) obj if obj is a curve spline and otherwise attempts to coerce obj into a
curve spline, raising an error if it cannot.
'''
if is_curve_spline(obj): return obj
elif is_tuple(obj) and len(obj) == 2: (crds,opts) = obj
else: (crds,opts) = (obj,{})
if pimms.is_matrix(crds) or is_curve_spline(crds): crds = [crds]
spls = [c for c in crds if is_curve_spline(c)]
opts = dict(opts)
if 'weights' not in opts and len(spls) == len(crds):
if all(c.weights is not None for c in crds):
opts['weights'] = np.concatenate([c.weights for c in crds])
if 'order' not in opts and len(spls) > 0:
opts['order'] = np.min([c.order for c in spls])
if 'smoothing' not in opts and len(spls) > 0:
sm = set([c.smoothing for c in spls])
if len(sm) == 1: opts['smoothing'] = list(sm)[0]
else: opts['smoothing'] = None
crds = [x.crds if is_curve_spline(crds) else np.asarray(x) for x in crds]
crds = [x if x.shape[0] == 2 else x.T for x in crds]
crds = np.hstack(crds)
return curve_spline(crds, **opts) | def function[to_curve_spline, parameter[obj]]:
constant[
to_curve_spline(obj) obj if obj is a curve spline and otherwise attempts to coerce obj into a
curve spline, raising an error if it cannot.
]
if call[name[is_curve_spline], parameter[name[obj]]] begin[:]
return[name[obj]]
if <ast.BoolOp object at 0x7da2045655d0> begin[:]
variable[crds] assign[=] list[[<ast.Name object at 0x7da204566f20>]]
variable[spls] assign[=] <ast.ListComp object at 0x7da2045640a0>
variable[opts] assign[=] call[name[dict], parameter[name[opts]]]
if <ast.BoolOp object at 0x7da2045649d0> begin[:]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da204566b00>]] begin[:]
call[name[opts]][constant[weights]] assign[=] call[name[np].concatenate, parameter[<ast.ListComp object at 0x7da2045657e0>]]
if <ast.BoolOp object at 0x7da204567760> begin[:]
call[name[opts]][constant[order]] assign[=] call[name[np].min, parameter[<ast.ListComp object at 0x7da204564640>]]
if <ast.BoolOp object at 0x7da204567bb0> begin[:]
variable[sm] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da204566ec0>]]
if compare[call[name[len], parameter[name[sm]]] equal[==] constant[1]] begin[:]
call[name[opts]][constant[smoothing]] assign[=] call[call[name[list], parameter[name[sm]]]][constant[0]]
variable[crds] assign[=] <ast.ListComp object at 0x7da204564130>
variable[crds] assign[=] <ast.ListComp object at 0x7da204565e40>
variable[crds] assign[=] call[name[np].hstack, parameter[name[crds]]]
return[call[name[curve_spline], parameter[name[crds]]]] | keyword[def] identifier[to_curve_spline] ( identifier[obj] ):
literal[string]
keyword[if] identifier[is_curve_spline] ( identifier[obj] ): keyword[return] identifier[obj]
keyword[elif] identifier[is_tuple] ( identifier[obj] ) keyword[and] identifier[len] ( identifier[obj] )== literal[int] :( identifier[crds] , identifier[opts] )= identifier[obj]
keyword[else] :( identifier[crds] , identifier[opts] )=( identifier[obj] ,{})
keyword[if] identifier[pimms] . identifier[is_matrix] ( identifier[crds] ) keyword[or] identifier[is_curve_spline] ( identifier[crds] ): identifier[crds] =[ identifier[crds] ]
identifier[spls] =[ identifier[c] keyword[for] identifier[c] keyword[in] identifier[crds] keyword[if] identifier[is_curve_spline] ( identifier[c] )]
identifier[opts] = identifier[dict] ( identifier[opts] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[opts] keyword[and] identifier[len] ( identifier[spls] )== identifier[len] ( identifier[crds] ):
keyword[if] identifier[all] ( identifier[c] . identifier[weights] keyword[is] keyword[not] keyword[None] keyword[for] identifier[c] keyword[in] identifier[crds] ):
identifier[opts] [ literal[string] ]= identifier[np] . identifier[concatenate] ([ identifier[c] . identifier[weights] keyword[for] identifier[c] keyword[in] identifier[crds] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[opts] keyword[and] identifier[len] ( identifier[spls] )> literal[int] :
identifier[opts] [ literal[string] ]= identifier[np] . identifier[min] ([ identifier[c] . identifier[order] keyword[for] identifier[c] keyword[in] identifier[spls] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[opts] keyword[and] identifier[len] ( identifier[spls] )> literal[int] :
identifier[sm] = identifier[set] ([ identifier[c] . identifier[smoothing] keyword[for] identifier[c] keyword[in] identifier[spls] ])
keyword[if] identifier[len] ( identifier[sm] )== literal[int] : identifier[opts] [ literal[string] ]= identifier[list] ( identifier[sm] )[ literal[int] ]
keyword[else] : identifier[opts] [ literal[string] ]= keyword[None]
identifier[crds] =[ identifier[x] . identifier[crds] keyword[if] identifier[is_curve_spline] ( identifier[crds] ) keyword[else] identifier[np] . identifier[asarray] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[crds] ]
identifier[crds] =[ identifier[x] keyword[if] identifier[x] . identifier[shape] [ literal[int] ]== literal[int] keyword[else] identifier[x] . identifier[T] keyword[for] identifier[x] keyword[in] identifier[crds] ]
identifier[crds] = identifier[np] . identifier[hstack] ( identifier[crds] )
keyword[return] identifier[curve_spline] ( identifier[crds] ,** identifier[opts] ) | def to_curve_spline(obj):
"""
to_curve_spline(obj) obj if obj is a curve spline and otherwise attempts to coerce obj into a
curve spline, raising an error if it cannot.
"""
if is_curve_spline(obj):
return obj # depends on [control=['if'], data=[]]
elif is_tuple(obj) and len(obj) == 2:
(crds, opts) = obj # depends on [control=['if'], data=[]]
else:
(crds, opts) = (obj, {})
if pimms.is_matrix(crds) or is_curve_spline(crds):
crds = [crds] # depends on [control=['if'], data=[]]
spls = [c for c in crds if is_curve_spline(c)]
opts = dict(opts)
if 'weights' not in opts and len(spls) == len(crds):
if all((c.weights is not None for c in crds)):
opts['weights'] = np.concatenate([c.weights for c in crds]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if 'order' not in opts and len(spls) > 0:
opts['order'] = np.min([c.order for c in spls]) # depends on [control=['if'], data=[]]
if 'smoothing' not in opts and len(spls) > 0:
sm = set([c.smoothing for c in spls])
if len(sm) == 1:
opts['smoothing'] = list(sm)[0] # depends on [control=['if'], data=[]]
else:
opts['smoothing'] = None # depends on [control=['if'], data=[]]
crds = [x.crds if is_curve_spline(crds) else np.asarray(x) for x in crds]
crds = [x if x.shape[0] == 2 else x.T for x in crds]
crds = np.hstack(crds)
return curve_spline(crds, **opts) |
def unregister_controllers(self):
"""Destroy controller objects and clear internal registry.
This will be called after each test class.
"""
# TODO(xpconanfan): actually record these errors instead of just
# logging them.
for name, module in self._controller_modules.items():
logging.debug('Destroying %s.', name)
with expects.expect_no_raises(
'Exception occurred destroying %s.' % name):
module.destroy(self._controller_objects[name])
self._controller_objects = collections.OrderedDict()
self._controller_modules = {} | def function[unregister_controllers, parameter[self]]:
constant[Destroy controller objects and clear internal registry.
This will be called after each test class.
]
for taget[tuple[[<ast.Name object at 0x7da1b08ca140>, <ast.Name object at 0x7da1b08cab30>]]] in starred[call[name[self]._controller_modules.items, parameter[]]] begin[:]
call[name[logging].debug, parameter[constant[Destroying %s.], name[name]]]
with call[name[expects].expect_no_raises, parameter[binary_operation[constant[Exception occurred destroying %s.] <ast.Mod object at 0x7da2590d6920> name[name]]]] begin[:]
call[name[module].destroy, parameter[call[name[self]._controller_objects][name[name]]]]
name[self]._controller_objects assign[=] call[name[collections].OrderedDict, parameter[]]
name[self]._controller_modules assign[=] dictionary[[], []] | keyword[def] identifier[unregister_controllers] ( identifier[self] ):
literal[string]
keyword[for] identifier[name] , identifier[module] keyword[in] identifier[self] . identifier[_controller_modules] . identifier[items] ():
identifier[logging] . identifier[debug] ( literal[string] , identifier[name] )
keyword[with] identifier[expects] . identifier[expect_no_raises] (
literal[string] % identifier[name] ):
identifier[module] . identifier[destroy] ( identifier[self] . identifier[_controller_objects] [ identifier[name] ])
identifier[self] . identifier[_controller_objects] = identifier[collections] . identifier[OrderedDict] ()
identifier[self] . identifier[_controller_modules] ={} | def unregister_controllers(self):
"""Destroy controller objects and clear internal registry.
This will be called after each test class.
"""
# TODO(xpconanfan): actually record these errors instead of just
# logging them.
for (name, module) in self._controller_modules.items():
logging.debug('Destroying %s.', name)
with expects.expect_no_raises('Exception occurred destroying %s.' % name):
module.destroy(self._controller_objects[name]) # depends on [control=['with'], data=[]] # depends on [control=['for'], data=[]]
self._controller_objects = collections.OrderedDict()
self._controller_modules = {} |
def aes_pad(s, block_size=32, padding='{'):
""" Adds padding to get the correct block sizes for AES encryption
@s: #str being AES encrypted or decrypted
@block_size: the AES block size
@padding: character to pad with
-> padded #str
..
from vital.security import aes_pad
aes_pad("swing")
# -> 'swing{{{{{{{{{{{{{{{{{{{{{{{{{{{'
..
"""
return s + (block_size - len(s) % block_size) * padding | def function[aes_pad, parameter[s, block_size, padding]]:
constant[ Adds padding to get the correct block sizes for AES encryption
@s: #str being AES encrypted or decrypted
@block_size: the AES block size
@padding: character to pad with
-> padded #str
..
from vital.security import aes_pad
aes_pad("swing")
# -> 'swing{{{{{{{{{{{{{{{{{{{{{{{{{{{'
..
]
return[binary_operation[name[s] + binary_operation[binary_operation[name[block_size] - binary_operation[call[name[len], parameter[name[s]]] <ast.Mod object at 0x7da2590d6920> name[block_size]]] * name[padding]]]] | keyword[def] identifier[aes_pad] ( identifier[s] , identifier[block_size] = literal[int] , identifier[padding] = literal[string] ):
literal[string]
keyword[return] identifier[s] +( identifier[block_size] - identifier[len] ( identifier[s] )% identifier[block_size] )* identifier[padding] | def aes_pad(s, block_size=32, padding='{'):
""" Adds padding to get the correct block sizes for AES encryption
@s: #str being AES encrypted or decrypted
@block_size: the AES block size
@padding: character to pad with
-> padded #str
..
from vital.security import aes_pad
aes_pad("swing")
# -> 'swing{{{{{{{{{{{{{{{{{{{{{{{{{{{'
..
"""
return s + (block_size - len(s) % block_size) * padding |
def wb010(self, value=None):
""" Corresponds to IDD Field `wb010`
Wet-bulb temperature corresponding to 1.0% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `wb010`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `wb010`'.format(value))
self._wb010 = value | def function[wb010, parameter[self, value]]:
constant[ Corresponds to IDD Field `wb010`
Wet-bulb temperature corresponding to 1.0% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `wb010`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0f91ba0>
name[self]._wb010 assign[=] name[value] | keyword[def] identifier[wb010] ( identifier[self] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[float] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[value] ))
identifier[self] . identifier[_wb010] = identifier[value] | def wb010(self, value=None):
""" Corresponds to IDD Field `wb010`
Wet-bulb temperature corresponding to 1.0% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `wb010`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type float for field `wb010`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']]
self._wb010 = value |
def ancestors(self, start, generations=None):
"""
Return the subgraph of all nodes from which the given vertex is
reachable, including that vertex.
If specified, the optional `generations` argument specifies how
many generations to limit to.
"""
visited = self.vertex_set()
visited.add(start)
to_visit = deque([(start, 0)])
while to_visit:
vertex, depth = to_visit.popleft()
if depth == generations:
continue
for parent in self.parents(vertex):
if parent not in visited:
visited.add(parent)
to_visit.append((parent, depth+1))
return self.full_subgraph(visited) | def function[ancestors, parameter[self, start, generations]]:
constant[
Return the subgraph of all nodes from which the given vertex is
reachable, including that vertex.
If specified, the optional `generations` argument specifies how
many generations to limit to.
]
variable[visited] assign[=] call[name[self].vertex_set, parameter[]]
call[name[visited].add, parameter[name[start]]]
variable[to_visit] assign[=] call[name[deque], parameter[list[[<ast.Tuple object at 0x7da207f01cf0>]]]]
while name[to_visit] begin[:]
<ast.Tuple object at 0x7da18bc72b00> assign[=] call[name[to_visit].popleft, parameter[]]
if compare[name[depth] equal[==] name[generations]] begin[:]
continue
for taget[name[parent]] in starred[call[name[self].parents, parameter[name[vertex]]]] begin[:]
if compare[name[parent] <ast.NotIn object at 0x7da2590d7190> name[visited]] begin[:]
call[name[visited].add, parameter[name[parent]]]
call[name[to_visit].append, parameter[tuple[[<ast.Name object at 0x7da18bc71690>, <ast.BinOp object at 0x7da18bc73e80>]]]]
return[call[name[self].full_subgraph, parameter[name[visited]]]] | keyword[def] identifier[ancestors] ( identifier[self] , identifier[start] , identifier[generations] = keyword[None] ):
literal[string]
identifier[visited] = identifier[self] . identifier[vertex_set] ()
identifier[visited] . identifier[add] ( identifier[start] )
identifier[to_visit] = identifier[deque] ([( identifier[start] , literal[int] )])
keyword[while] identifier[to_visit] :
identifier[vertex] , identifier[depth] = identifier[to_visit] . identifier[popleft] ()
keyword[if] identifier[depth] == identifier[generations] :
keyword[continue]
keyword[for] identifier[parent] keyword[in] identifier[self] . identifier[parents] ( identifier[vertex] ):
keyword[if] identifier[parent] keyword[not] keyword[in] identifier[visited] :
identifier[visited] . identifier[add] ( identifier[parent] )
identifier[to_visit] . identifier[append] (( identifier[parent] , identifier[depth] + literal[int] ))
keyword[return] identifier[self] . identifier[full_subgraph] ( identifier[visited] ) | def ancestors(self, start, generations=None):
"""
Return the subgraph of all nodes from which the given vertex is
reachable, including that vertex.
If specified, the optional `generations` argument specifies how
many generations to limit to.
"""
visited = self.vertex_set()
visited.add(start)
to_visit = deque([(start, 0)])
while to_visit:
(vertex, depth) = to_visit.popleft()
if depth == generations:
continue # depends on [control=['if'], data=[]]
for parent in self.parents(vertex):
if parent not in visited:
visited.add(parent)
to_visit.append((parent, depth + 1)) # depends on [control=['if'], data=['parent', 'visited']] # depends on [control=['for'], data=['parent']] # depends on [control=['while'], data=[]]
return self.full_subgraph(visited) |
def plot_i(self, colorbar=True, cb_orientation='vertical',
cb_label=None, ax=None, show=True, fname=None, **kwargs):
"""
Plot the dimensionless quantity I of Pedersen and Rasmussen (1990)
I = -(I2/2)**2 / (I1/3)**3
that is bounded by 0 and 1.
Usage
-----
x.plot_i([tick_interval, xlabel, ylabel, ax, colorbar, cb_orientation,
cb_label, show, fname])
Parameters
----------
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
colorbar : bool, optional, default = True
If True, plot a colorbar.
cb_orientation : str, optional, default = 'vertical'
Orientation of the colorbar: either 'vertical' or 'horizontal'.
cb_label : str, optional, default = '$-(I_2/2)^{2} / (I_1/3)^{3}$'
Text label for the colorbar.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
kwargs : optional
Keyword arguements that will be sent to the SHGrid.plot()
and plt.imshow() methods.
"""
if cb_label is None:
cb_label = self._i_label
if self.i is None:
self.compute_invar()
if ax is None:
fig, axes = self.i.plot(colorbar=colorbar,
cb_orientation=cb_orientation,
cb_label=cb_label, show=False, **kwargs)
if show:
fig.show()
if fname is not None:
fig.savefig(fname)
return fig, axes
else:
self.i.plot(colorbar=colorbar, cb_orientation=cb_orientation,
cb_label=cb_label, ax=ax, **kwargs) | def function[plot_i, parameter[self, colorbar, cb_orientation, cb_label, ax, show, fname]]:
constant[
Plot the dimensionless quantity I of Pedersen and Rasmussen (1990)
I = -(I2/2)**2 / (I1/3)**3
that is bounded by 0 and 1.
Usage
-----
x.plot_i([tick_interval, xlabel, ylabel, ax, colorbar, cb_orientation,
cb_label, show, fname])
Parameters
----------
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
colorbar : bool, optional, default = True
If True, plot a colorbar.
cb_orientation : str, optional, default = 'vertical'
Orientation of the colorbar: either 'vertical' or 'horizontal'.
cb_label : str, optional, default = '$-(I_2/2)^{2} / (I_1/3)^{3}$'
Text label for the colorbar.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
kwargs : optional
Keyword arguements that will be sent to the SHGrid.plot()
and plt.imshow() methods.
]
if compare[name[cb_label] is constant[None]] begin[:]
variable[cb_label] assign[=] name[self]._i_label
if compare[name[self].i is constant[None]] begin[:]
call[name[self].compute_invar, parameter[]]
if compare[name[ax] is constant[None]] begin[:]
<ast.Tuple object at 0x7da1b079bcd0> assign[=] call[name[self].i.plot, parameter[]]
if name[show] begin[:]
call[name[fig].show, parameter[]]
if compare[name[fname] is_not constant[None]] begin[:]
call[name[fig].savefig, parameter[name[fname]]]
return[tuple[[<ast.Name object at 0x7da20c6aab30>, <ast.Name object at 0x7da20c6ab580>]]] | keyword[def] identifier[plot_i] ( identifier[self] , identifier[colorbar] = keyword[True] , identifier[cb_orientation] = literal[string] ,
identifier[cb_label] = keyword[None] , identifier[ax] = keyword[None] , identifier[show] = keyword[True] , identifier[fname] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[cb_label] keyword[is] keyword[None] :
identifier[cb_label] = identifier[self] . identifier[_i_label]
keyword[if] identifier[self] . identifier[i] keyword[is] keyword[None] :
identifier[self] . identifier[compute_invar] ()
keyword[if] identifier[ax] keyword[is] keyword[None] :
identifier[fig] , identifier[axes] = identifier[self] . identifier[i] . identifier[plot] ( identifier[colorbar] = identifier[colorbar] ,
identifier[cb_orientation] = identifier[cb_orientation] ,
identifier[cb_label] = identifier[cb_label] , identifier[show] = keyword[False] ,** identifier[kwargs] )
keyword[if] identifier[show] :
identifier[fig] . identifier[show] ()
keyword[if] identifier[fname] keyword[is] keyword[not] keyword[None] :
identifier[fig] . identifier[savefig] ( identifier[fname] )
keyword[return] identifier[fig] , identifier[axes]
keyword[else] :
identifier[self] . identifier[i] . identifier[plot] ( identifier[colorbar] = identifier[colorbar] , identifier[cb_orientation] = identifier[cb_orientation] ,
identifier[cb_label] = identifier[cb_label] , identifier[ax] = identifier[ax] ,** identifier[kwargs] ) | def plot_i(self, colorbar=True, cb_orientation='vertical', cb_label=None, ax=None, show=True, fname=None, **kwargs):
"""
Plot the dimensionless quantity I of Pedersen and Rasmussen (1990)
I = -(I2/2)**2 / (I1/3)**3
that is bounded by 0 and 1.
Usage
-----
x.plot_i([tick_interval, xlabel, ylabel, ax, colorbar, cb_orientation,
cb_label, show, fname])
Parameters
----------
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
colorbar : bool, optional, default = True
If True, plot a colorbar.
cb_orientation : str, optional, default = 'vertical'
Orientation of the colorbar: either 'vertical' or 'horizontal'.
cb_label : str, optional, default = '$-(I_2/2)^{2} / (I_1/3)^{3}$'
Text label for the colorbar.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
kwargs : optional
Keyword arguements that will be sent to the SHGrid.plot()
and plt.imshow() methods.
"""
if cb_label is None:
cb_label = self._i_label # depends on [control=['if'], data=['cb_label']]
if self.i is None:
self.compute_invar() # depends on [control=['if'], data=[]]
if ax is None:
(fig, axes) = self.i.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, show=False, **kwargs)
if show:
fig.show() # depends on [control=['if'], data=[]]
if fname is not None:
fig.savefig(fname) # depends on [control=['if'], data=['fname']]
return (fig, axes) # depends on [control=['if'], data=[]]
else:
self.i.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, ax=ax, **kwargs) |
def get_process_pids(self, process):
"""Returns PIDs of all processes with process name.
If the process doesn't exist, returns an empty list"""
pids = []
cmd_line_glob = "/proc/[0-9]*/cmdline"
cmd_line_paths = glob.glob(cmd_line_glob)
for path in cmd_line_paths:
try:
with open(path, 'r') as f:
cmd_line = f.read().strip()
if process in cmd_line:
pids.append(path.split("/")[2])
except IOError as e:
continue
return pids | def function[get_process_pids, parameter[self, process]]:
constant[Returns PIDs of all processes with process name.
If the process doesn't exist, returns an empty list]
variable[pids] assign[=] list[[]]
variable[cmd_line_glob] assign[=] constant[/proc/[0-9]*/cmdline]
variable[cmd_line_paths] assign[=] call[name[glob].glob, parameter[name[cmd_line_glob]]]
for taget[name[path]] in starred[name[cmd_line_paths]] begin[:]
<ast.Try object at 0x7da18f58d780>
return[name[pids]] | keyword[def] identifier[get_process_pids] ( identifier[self] , identifier[process] ):
literal[string]
identifier[pids] =[]
identifier[cmd_line_glob] = literal[string]
identifier[cmd_line_paths] = identifier[glob] . identifier[glob] ( identifier[cmd_line_glob] )
keyword[for] identifier[path] keyword[in] identifier[cmd_line_paths] :
keyword[try] :
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] :
identifier[cmd_line] = identifier[f] . identifier[read] (). identifier[strip] ()
keyword[if] identifier[process] keyword[in] identifier[cmd_line] :
identifier[pids] . identifier[append] ( identifier[path] . identifier[split] ( literal[string] )[ literal[int] ])
keyword[except] identifier[IOError] keyword[as] identifier[e] :
keyword[continue]
keyword[return] identifier[pids] | def get_process_pids(self, process):
"""Returns PIDs of all processes with process name.
If the process doesn't exist, returns an empty list"""
pids = []
cmd_line_glob = '/proc/[0-9]*/cmdline'
cmd_line_paths = glob.glob(cmd_line_glob)
for path in cmd_line_paths:
try:
with open(path, 'r') as f:
cmd_line = f.read().strip()
if process in cmd_line:
pids.append(path.split('/')[2]) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except IOError as e:
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['path']]
return pids |
def on_press(self, event):
'on but-ton press we will see if the mouse is over us and store data'
if event.inaxes != self.ax:
return
# contains, attrd = self.rect.contains(event)
# if not contains: return
# print('event contains', self.rect.xy)
# x0, y0 = self.rect.xy
self.press = [event.xdata], [event.ydata], event.button | def function[on_press, parameter[self, event]]:
constant[on but-ton press we will see if the mouse is over us and store data]
if compare[name[event].inaxes not_equal[!=] name[self].ax] begin[:]
return[None]
name[self].press assign[=] tuple[[<ast.List object at 0x7da1b2714a90>, <ast.List object at 0x7da1b2714640>, <ast.Attribute object at 0x7da1b2717fd0>]] | keyword[def] identifier[on_press] ( identifier[self] , identifier[event] ):
literal[string]
keyword[if] identifier[event] . identifier[inaxes] != identifier[self] . identifier[ax] :
keyword[return]
identifier[self] . identifier[press] =[ identifier[event] . identifier[xdata] ],[ identifier[event] . identifier[ydata] ], identifier[event] . identifier[button] | def on_press(self, event):
"""on but-ton press we will see if the mouse is over us and store data"""
if event.inaxes != self.ax:
return # depends on [control=['if'], data=[]] # contains, attrd = self.rect.contains(event)
# if not contains: return
# print('event contains', self.rect.xy)
# x0, y0 = self.rect.xy
self.press = ([event.xdata], [event.ydata], event.button) |
def parse_manifest(path_to_manifest):
"""
Parses manifest file for Toil Germline Pipeline
:param str path_to_manifest: Path to sample manifest file
:return: List of GermlineSample namedtuples
:rtype: list[GermlineSample]
"""
bam_re = r"^(?P<uuid>\S+)\s(?P<url>\S+[bsc][r]?am)"
fq_re = r"^(?P<uuid>\S+)\s(?P<url>\S+)\s(?P<paired_url>\S+)?\s?(?P<rg_line>@RG\S+)"
samples = []
with open(path_to_manifest, 'r') as f:
for line in f.readlines():
line = line.strip()
if line.startswith('#'):
continue
bam_match = re.match(bam_re, line)
fastq_match = re.match(fq_re, line)
if bam_match:
uuid = bam_match.group('uuid')
url = bam_match.group('url')
paired_url = None
rg_line = None
require('.bam' in url.lower(),
'Expected .bam extension:\n{}:\t{}'.format(uuid, url))
elif fastq_match:
uuid = fastq_match.group('uuid')
url = fastq_match.group('url')
paired_url = fastq_match.group('paired_url')
rg_line = fastq_match.group('rg_line')
require('.fq' in url.lower() or '.fastq' in url.lower(),
'Expected .fq extension:\n{}:\t{}'.format(uuid, url))
else:
raise ValueError('Could not parse entry in manifest: %s\n%s' % (f.name, line))
# Checks that URL has a scheme
require(urlparse(url).scheme, 'Invalid URL passed for {}'.format(url))
samples.append(GermlineSample(uuid, url, paired_url, rg_line))
return samples | def function[parse_manifest, parameter[path_to_manifest]]:
constant[
Parses manifest file for Toil Germline Pipeline
:param str path_to_manifest: Path to sample manifest file
:return: List of GermlineSample namedtuples
:rtype: list[GermlineSample]
]
variable[bam_re] assign[=] constant[^(?P<uuid>\S+)\s(?P<url>\S+[bsc][r]?am)]
variable[fq_re] assign[=] constant[^(?P<uuid>\S+)\s(?P<url>\S+)\s(?P<paired_url>\S+)?\s?(?P<rg_line>@RG\S+)]
variable[samples] assign[=] list[[]]
with call[name[open], parameter[name[path_to_manifest], constant[r]]] begin[:]
for taget[name[line]] in starred[call[name[f].readlines, parameter[]]] begin[:]
variable[line] assign[=] call[name[line].strip, parameter[]]
if call[name[line].startswith, parameter[constant[#]]] begin[:]
continue
variable[bam_match] assign[=] call[name[re].match, parameter[name[bam_re], name[line]]]
variable[fastq_match] assign[=] call[name[re].match, parameter[name[fq_re], name[line]]]
if name[bam_match] begin[:]
variable[uuid] assign[=] call[name[bam_match].group, parameter[constant[uuid]]]
variable[url] assign[=] call[name[bam_match].group, parameter[constant[url]]]
variable[paired_url] assign[=] constant[None]
variable[rg_line] assign[=] constant[None]
call[name[require], parameter[compare[constant[.bam] in call[name[url].lower, parameter[]]], call[constant[Expected .bam extension:
{}: {}].format, parameter[name[uuid], name[url]]]]]
call[name[require], parameter[call[name[urlparse], parameter[name[url]]].scheme, call[constant[Invalid URL passed for {}].format, parameter[name[url]]]]]
call[name[samples].append, parameter[call[name[GermlineSample], parameter[name[uuid], name[url], name[paired_url], name[rg_line]]]]]
return[name[samples]] | keyword[def] identifier[parse_manifest] ( identifier[path_to_manifest] ):
literal[string]
identifier[bam_re] = literal[string]
identifier[fq_re] = literal[string]
identifier[samples] =[]
keyword[with] identifier[open] ( identifier[path_to_manifest] , literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[line] keyword[in] identifier[f] . identifier[readlines] ():
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[continue]
identifier[bam_match] = identifier[re] . identifier[match] ( identifier[bam_re] , identifier[line] )
identifier[fastq_match] = identifier[re] . identifier[match] ( identifier[fq_re] , identifier[line] )
keyword[if] identifier[bam_match] :
identifier[uuid] = identifier[bam_match] . identifier[group] ( literal[string] )
identifier[url] = identifier[bam_match] . identifier[group] ( literal[string] )
identifier[paired_url] = keyword[None]
identifier[rg_line] = keyword[None]
identifier[require] ( literal[string] keyword[in] identifier[url] . identifier[lower] (),
literal[string] . identifier[format] ( identifier[uuid] , identifier[url] ))
keyword[elif] identifier[fastq_match] :
identifier[uuid] = identifier[fastq_match] . identifier[group] ( literal[string] )
identifier[url] = identifier[fastq_match] . identifier[group] ( literal[string] )
identifier[paired_url] = identifier[fastq_match] . identifier[group] ( literal[string] )
identifier[rg_line] = identifier[fastq_match] . identifier[group] ( literal[string] )
identifier[require] ( literal[string] keyword[in] identifier[url] . identifier[lower] () keyword[or] literal[string] keyword[in] identifier[url] . identifier[lower] (),
literal[string] . identifier[format] ( identifier[uuid] , identifier[url] ))
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[f] . identifier[name] , identifier[line] ))
identifier[require] ( identifier[urlparse] ( identifier[url] ). identifier[scheme] , literal[string] . identifier[format] ( identifier[url] ))
identifier[samples] . identifier[append] ( identifier[GermlineSample] ( identifier[uuid] , identifier[url] , identifier[paired_url] , identifier[rg_line] ))
keyword[return] identifier[samples] | def parse_manifest(path_to_manifest):
"""
Parses manifest file for Toil Germline Pipeline
:param str path_to_manifest: Path to sample manifest file
:return: List of GermlineSample namedtuples
:rtype: list[GermlineSample]
"""
bam_re = '^(?P<uuid>\\S+)\\s(?P<url>\\S+[bsc][r]?am)'
fq_re = '^(?P<uuid>\\S+)\\s(?P<url>\\S+)\\s(?P<paired_url>\\S+)?\\s?(?P<rg_line>@RG\\S+)'
samples = []
with open(path_to_manifest, 'r') as f:
for line in f.readlines():
line = line.strip()
if line.startswith('#'):
continue # depends on [control=['if'], data=[]]
bam_match = re.match(bam_re, line)
fastq_match = re.match(fq_re, line)
if bam_match:
uuid = bam_match.group('uuid')
url = bam_match.group('url')
paired_url = None
rg_line = None
require('.bam' in url.lower(), 'Expected .bam extension:\n{}:\t{}'.format(uuid, url)) # depends on [control=['if'], data=[]]
elif fastq_match:
uuid = fastq_match.group('uuid')
url = fastq_match.group('url')
paired_url = fastq_match.group('paired_url')
rg_line = fastq_match.group('rg_line')
require('.fq' in url.lower() or '.fastq' in url.lower(), 'Expected .fq extension:\n{}:\t{}'.format(uuid, url)) # depends on [control=['if'], data=[]]
else:
raise ValueError('Could not parse entry in manifest: %s\n%s' % (f.name, line))
# Checks that URL has a scheme
require(urlparse(url).scheme, 'Invalid URL passed for {}'.format(url))
samples.append(GermlineSample(uuid, url, paired_url, rg_line)) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']]
return samples |
def _eight_byte_real(value):
"""
Convert a number into the GDSII 8 byte real format.
Parameters
----------
value : number
The number to be converted.
Returns
-------
out : string
The GDSII binary string that represents ``value``.
"""
if value == 0:
return b'\x00\x00\x00\x00\x00\x00\x00\x00'
if value < 0:
byte1 = 0x80
value = -value
else:
byte1 = 0x00
fexp = numpy.log2(value) / 4
exponent = int(numpy.ceil(fexp))
if fexp == exponent:
exponent += 1
mantissa = int(value * 16.**(14 - exponent))
byte1 += exponent + 64
byte2 = (mantissa // 281474976710656)
short3 = (mantissa % 281474976710656) // 4294967296
long4 = mantissa % 4294967296
return struct.pack(">HHL", byte1 * 256 + byte2, short3, long4) | def function[_eight_byte_real, parameter[value]]:
constant[
Convert a number into the GDSII 8 byte real format.
Parameters
----------
value : number
The number to be converted.
Returns
-------
out : string
The GDSII binary string that represents ``value``.
]
if compare[name[value] equal[==] constant[0]] begin[:]
return[constant[b'\x00\x00\x00\x00\x00\x00\x00\x00']]
if compare[name[value] less[<] constant[0]] begin[:]
variable[byte1] assign[=] constant[128]
variable[value] assign[=] <ast.UnaryOp object at 0x7da18fe914e0>
variable[fexp] assign[=] binary_operation[call[name[numpy].log2, parameter[name[value]]] / constant[4]]
variable[exponent] assign[=] call[name[int], parameter[call[name[numpy].ceil, parameter[name[fexp]]]]]
if compare[name[fexp] equal[==] name[exponent]] begin[:]
<ast.AugAssign object at 0x7da18fe91cf0>
variable[mantissa] assign[=] call[name[int], parameter[binary_operation[name[value] * binary_operation[constant[16.0] ** binary_operation[constant[14] - name[exponent]]]]]]
<ast.AugAssign object at 0x7da18fe906d0>
variable[byte2] assign[=] binary_operation[name[mantissa] <ast.FloorDiv object at 0x7da2590d6bc0> constant[281474976710656]]
variable[short3] assign[=] binary_operation[binary_operation[name[mantissa] <ast.Mod object at 0x7da2590d6920> constant[281474976710656]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4294967296]]
variable[long4] assign[=] binary_operation[name[mantissa] <ast.Mod object at 0x7da2590d6920> constant[4294967296]]
return[call[name[struct].pack, parameter[constant[>HHL], binary_operation[binary_operation[name[byte1] * constant[256]] + name[byte2]], name[short3], name[long4]]]] | keyword[def] identifier[_eight_byte_real] ( identifier[value] ):
literal[string]
keyword[if] identifier[value] == literal[int] :
keyword[return] literal[string]
keyword[if] identifier[value] < literal[int] :
identifier[byte1] = literal[int]
identifier[value] =- identifier[value]
keyword[else] :
identifier[byte1] = literal[int]
identifier[fexp] = identifier[numpy] . identifier[log2] ( identifier[value] )/ literal[int]
identifier[exponent] = identifier[int] ( identifier[numpy] . identifier[ceil] ( identifier[fexp] ))
keyword[if] identifier[fexp] == identifier[exponent] :
identifier[exponent] += literal[int]
identifier[mantissa] = identifier[int] ( identifier[value] * literal[int] **( literal[int] - identifier[exponent] ))
identifier[byte1] += identifier[exponent] + literal[int]
identifier[byte2] =( identifier[mantissa] // literal[int] )
identifier[short3] =( identifier[mantissa] % literal[int] )// literal[int]
identifier[long4] = identifier[mantissa] % literal[int]
keyword[return] identifier[struct] . identifier[pack] ( literal[string] , identifier[byte1] * literal[int] + identifier[byte2] , identifier[short3] , identifier[long4] ) | def _eight_byte_real(value):
"""
Convert a number into the GDSII 8 byte real format.
Parameters
----------
value : number
The number to be converted.
Returns
-------
out : string
The GDSII binary string that represents ``value``.
"""
if value == 0:
return b'\x00\x00\x00\x00\x00\x00\x00\x00' # depends on [control=['if'], data=[]]
if value < 0:
byte1 = 128
value = -value # depends on [control=['if'], data=['value']]
else:
byte1 = 0
fexp = numpy.log2(value) / 4
exponent = int(numpy.ceil(fexp))
if fexp == exponent:
exponent += 1 # depends on [control=['if'], data=['exponent']]
mantissa = int(value * 16.0 ** (14 - exponent))
byte1 += exponent + 64
byte2 = mantissa // 281474976710656
short3 = mantissa % 281474976710656 // 4294967296
long4 = mantissa % 4294967296
return struct.pack('>HHL', byte1 * 256 + byte2, short3, long4) |
def infer(source, headers=1, limit=100, confidence=0.75, **options):
"""https://github.com/frictionlessdata/tableschema-py#schema
"""
# Deprecated arguments order
is_string = lambda value: isinstance(value, six.string_types)
if isinstance(source, list) and all(map(is_string, source)):
warnings.warn('Correct arguments order infer(source, headers)', UserWarning)
source, headers = headers, source
table = Table(source, headers=headers, **options)
descriptor = table.infer(limit=limit, confidence=confidence)
return descriptor | def function[infer, parameter[source, headers, limit, confidence]]:
constant[https://github.com/frictionlessdata/tableschema-py#schema
]
variable[is_string] assign[=] <ast.Lambda object at 0x7da20c6a81c0>
if <ast.BoolOp object at 0x7da20c6a9f60> begin[:]
call[name[warnings].warn, parameter[constant[Correct arguments order infer(source, headers)], name[UserWarning]]]
<ast.Tuple object at 0x7da20c6a8670> assign[=] tuple[[<ast.Name object at 0x7da20c6a8bb0>, <ast.Name object at 0x7da20c6a9630>]]
variable[table] assign[=] call[name[Table], parameter[name[source]]]
variable[descriptor] assign[=] call[name[table].infer, parameter[]]
return[name[descriptor]] | keyword[def] identifier[infer] ( identifier[source] , identifier[headers] = literal[int] , identifier[limit] = literal[int] , identifier[confidence] = literal[int] ,** identifier[options] ):
literal[string]
identifier[is_string] = keyword[lambda] identifier[value] : identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] )
keyword[if] identifier[isinstance] ( identifier[source] , identifier[list] ) keyword[and] identifier[all] ( identifier[map] ( identifier[is_string] , identifier[source] )):
identifier[warnings] . identifier[warn] ( literal[string] , identifier[UserWarning] )
identifier[source] , identifier[headers] = identifier[headers] , identifier[source]
identifier[table] = identifier[Table] ( identifier[source] , identifier[headers] = identifier[headers] ,** identifier[options] )
identifier[descriptor] = identifier[table] . identifier[infer] ( identifier[limit] = identifier[limit] , identifier[confidence] = identifier[confidence] )
keyword[return] identifier[descriptor] | def infer(source, headers=1, limit=100, confidence=0.75, **options):
"""https://github.com/frictionlessdata/tableschema-py#schema
"""
# Deprecated arguments order
is_string = lambda value: isinstance(value, six.string_types)
if isinstance(source, list) and all(map(is_string, source)):
warnings.warn('Correct arguments order infer(source, headers)', UserWarning)
(source, headers) = (headers, source) # depends on [control=['if'], data=[]]
table = Table(source, headers=headers, **options)
descriptor = table.infer(limit=limit, confidence=confidence)
return descriptor |
def download(cls, filename, input_dir, dl_dir=None):
"""Download the resource from the storage."""
file_info = cls.parse_remote(filename)
if not dl_dir:
dl_dir = os.path.join(input_dir, file_info.container,
os.path.dirname(file_info.blob))
utils.safe_makedir(dl_dir)
out_file = os.path.join(dl_dir, os.path.basename(file_info.blob))
if not utils.file_exists(out_file):
with file_transaction({}, out_file) as tx_out_file:
blob_service = cls.connect(filename)
blob_service.get_blob_to_path(
container_name=file_info.container,
blob_name=file_info.blob,
file_path=tx_out_file)
return out_file | def function[download, parameter[cls, filename, input_dir, dl_dir]]:
constant[Download the resource from the storage.]
variable[file_info] assign[=] call[name[cls].parse_remote, parameter[name[filename]]]
if <ast.UnaryOp object at 0x7da1b1709de0> begin[:]
variable[dl_dir] assign[=] call[name[os].path.join, parameter[name[input_dir], name[file_info].container, call[name[os].path.dirname, parameter[name[file_info].blob]]]]
call[name[utils].safe_makedir, parameter[name[dl_dir]]]
variable[out_file] assign[=] call[name[os].path.join, parameter[name[dl_dir], call[name[os].path.basename, parameter[name[file_info].blob]]]]
if <ast.UnaryOp object at 0x7da1b19ba0e0> begin[:]
with call[name[file_transaction], parameter[dictionary[[], []], name[out_file]]] begin[:]
variable[blob_service] assign[=] call[name[cls].connect, parameter[name[filename]]]
call[name[blob_service].get_blob_to_path, parameter[]]
return[name[out_file]] | keyword[def] identifier[download] ( identifier[cls] , identifier[filename] , identifier[input_dir] , identifier[dl_dir] = keyword[None] ):
literal[string]
identifier[file_info] = identifier[cls] . identifier[parse_remote] ( identifier[filename] )
keyword[if] keyword[not] identifier[dl_dir] :
identifier[dl_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[input_dir] , identifier[file_info] . identifier[container] ,
identifier[os] . identifier[path] . identifier[dirname] ( identifier[file_info] . identifier[blob] ))
identifier[utils] . identifier[safe_makedir] ( identifier[dl_dir] )
identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[dl_dir] , identifier[os] . identifier[path] . identifier[basename] ( identifier[file_info] . identifier[blob] ))
keyword[if] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[out_file] ):
keyword[with] identifier[file_transaction] ({}, identifier[out_file] ) keyword[as] identifier[tx_out_file] :
identifier[blob_service] = identifier[cls] . identifier[connect] ( identifier[filename] )
identifier[blob_service] . identifier[get_blob_to_path] (
identifier[container_name] = identifier[file_info] . identifier[container] ,
identifier[blob_name] = identifier[file_info] . identifier[blob] ,
identifier[file_path] = identifier[tx_out_file] )
keyword[return] identifier[out_file] | def download(cls, filename, input_dir, dl_dir=None):
"""Download the resource from the storage."""
file_info = cls.parse_remote(filename)
if not dl_dir:
dl_dir = os.path.join(input_dir, file_info.container, os.path.dirname(file_info.blob))
utils.safe_makedir(dl_dir) # depends on [control=['if'], data=[]]
out_file = os.path.join(dl_dir, os.path.basename(file_info.blob))
if not utils.file_exists(out_file):
with file_transaction({}, out_file) as tx_out_file:
blob_service = cls.connect(filename)
blob_service.get_blob_to_path(container_name=file_info.container, blob_name=file_info.blob, file_path=tx_out_file) # depends on [control=['with'], data=['tx_out_file']] # depends on [control=['if'], data=[]]
return out_file |
def set_data_length(self, length):
# type: (int) -> None
'''
A method to set the length of data for this El Torito Entry.
Parameters:
length - The new length for the El Torito Entry.
Returns:
Nothing.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('El Torito Entry not initialized')
self.sector_count = utils.ceiling_div(length, 512) | def function[set_data_length, parameter[self, length]]:
constant[
A method to set the length of data for this El Torito Entry.
Parameters:
length - The new length for the El Torito Entry.
Returns:
Nothing.
]
if <ast.UnaryOp object at 0x7da1b0f61960> begin[:]
<ast.Raise object at 0x7da1b0f606d0>
name[self].sector_count assign[=] call[name[utils].ceiling_div, parameter[name[length], constant[512]]] | keyword[def] identifier[set_data_length] ( identifier[self] , identifier[length] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_initialized] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] )
identifier[self] . identifier[sector_count] = identifier[utils] . identifier[ceiling_div] ( identifier[length] , literal[int] ) | def set_data_length(self, length):
# type: (int) -> None
'\n A method to set the length of data for this El Torito Entry.\n\n Parameters:\n length - The new length for the El Torito Entry.\n Returns:\n Nothing.\n '
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('El Torito Entry not initialized') # depends on [control=['if'], data=[]]
self.sector_count = utils.ceiling_div(length, 512) |
def solar_longitude( day, orb=const.orb_present, days_per_year = None ):
"""Estimates solar longitude from calendar day.
Method is using an approximation from :cite:`Berger_1978` section 3
(lambda = 0 at spring equinox).
**Function-call arguments** \n
:param array day: Indicator of time of year.
:param dict orb: a dictionary with three members (as provided by
:class:`~climlab.solar.orbital.OrbitalTable`)
* ``'ecc'`` - eccentricity
* unit: dimensionless
* default value: ``0.017236``
* ``'long_peri'`` - longitude of perihelion
(precession angle)
* unit: degrees
* default value: ``281.37``
* ``'obliquity'`` - obliquity angle
* unit: degrees
* default value: ``23.446``
:param float days_per_year: number of days in a year (optional)
(default: 365.2422)
Reads the length of the year from
:mod:`~climlab.utils.constants` if available.
:returns: solar longitude ``lambda_long``
in dimension``( day.size, ecc.size )``
:rtype: array
Works for both scalar and vector orbital parameters.
"""
if days_per_year is None:
days_per_year = const.days_per_year
ecc = orb['ecc']
long_peri_rad = deg2rad( orb['long_peri'])
delta_lambda = (day - 80.) * 2*pi/days_per_year
beta = sqrt(1 - ecc**2)
lambda_long_m = -2*((ecc/2 + (ecc**3)/8 ) * (1+beta) * sin(-long_peri_rad) -
(ecc**2)/4 * (1/2 + beta) * sin(-2*long_peri_rad) + (ecc**3)/8 *
(1/3 + beta) * sin(-3*long_peri_rad)) + delta_lambda
lambda_long = ( lambda_long_m + (2*ecc - (ecc**3)/4)*sin(lambda_long_m - long_peri_rad) +
(5/4)*(ecc**2) * sin(2*(lambda_long_m - long_peri_rad)) + (13/12)*(ecc**3)
* sin(3*(lambda_long_m - long_peri_rad)) )
return lambda_long | def function[solar_longitude, parameter[day, orb, days_per_year]]:
constant[Estimates solar longitude from calendar day.
Method is using an approximation from :cite:`Berger_1978` section 3
(lambda = 0 at spring equinox).
**Function-call arguments**
:param array day: Indicator of time of year.
:param dict orb: a dictionary with three members (as provided by
:class:`~climlab.solar.orbital.OrbitalTable`)
* ``'ecc'`` - eccentricity
* unit: dimensionless
* default value: ``0.017236``
* ``'long_peri'`` - longitude of perihelion
(precession angle)
* unit: degrees
* default value: ``281.37``
* ``'obliquity'`` - obliquity angle
* unit: degrees
* default value: ``23.446``
:param float days_per_year: number of days in a year (optional)
(default: 365.2422)
Reads the length of the year from
:mod:`~climlab.utils.constants` if available.
:returns: solar longitude ``lambda_long``
in dimension``( day.size, ecc.size )``
:rtype: array
Works for both scalar and vector orbital parameters.
]
if compare[name[days_per_year] is constant[None]] begin[:]
variable[days_per_year] assign[=] name[const].days_per_year
variable[ecc] assign[=] call[name[orb]][constant[ecc]]
variable[long_peri_rad] assign[=] call[name[deg2rad], parameter[call[name[orb]][constant[long_peri]]]]
variable[delta_lambda] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[day] - constant[80.0]] * constant[2]] * name[pi]] / name[days_per_year]]
variable[beta] assign[=] call[name[sqrt], parameter[binary_operation[constant[1] - binary_operation[name[ecc] ** constant[2]]]]]
variable[lambda_long_m] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b1209060> * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[ecc] / constant[2]] + binary_operation[binary_operation[name[ecc] ** constant[3]] / constant[8]]] * binary_operation[constant[1] + name[beta]]] * call[name[sin], parameter[<ast.UnaryOp object at 0x7da1b120a350>]]] - binary_operation[binary_operation[binary_operation[binary_operation[name[ecc] ** constant[2]] / constant[4]] * binary_operation[binary_operation[constant[1] / constant[2]] + name[beta]]] * call[name[sin], parameter[binary_operation[<ast.UnaryOp object at 0x7da1b1393670> * name[long_peri_rad]]]]]] + binary_operation[binary_operation[binary_operation[binary_operation[name[ecc] ** constant[3]] / constant[8]] * binary_operation[binary_operation[constant[1] / constant[3]] + name[beta]]] * call[name[sin], parameter[binary_operation[<ast.UnaryOp object at 0x7da1b1393010> * name[long_peri_rad]]]]]]] + name[delta_lambda]]
variable[lambda_long] assign[=] binary_operation[binary_operation[binary_operation[name[lambda_long_m] + binary_operation[binary_operation[binary_operation[constant[2] * name[ecc]] - binary_operation[binary_operation[name[ecc] ** constant[3]] / constant[4]]] * call[name[sin], parameter[binary_operation[name[lambda_long_m] - name[long_peri_rad]]]]]] + binary_operation[binary_operation[binary_operation[constant[5] / constant[4]] * binary_operation[name[ecc] ** constant[2]]] * call[name[sin], parameter[binary_operation[constant[2] * binary_operation[name[lambda_long_m] - name[long_peri_rad]]]]]]] + binary_operation[binary_operation[binary_operation[constant[13] / constant[12]] * binary_operation[name[ecc] ** constant[3]]] * call[name[sin], parameter[binary_operation[constant[3] * binary_operation[name[lambda_long_m] - name[long_peri_rad]]]]]]]
return[name[lambda_long]] | keyword[def] identifier[solar_longitude] ( identifier[day] , identifier[orb] = identifier[const] . identifier[orb_present] , identifier[days_per_year] = keyword[None] ):
literal[string]
keyword[if] identifier[days_per_year] keyword[is] keyword[None] :
identifier[days_per_year] = identifier[const] . identifier[days_per_year]
identifier[ecc] = identifier[orb] [ literal[string] ]
identifier[long_peri_rad] = identifier[deg2rad] ( identifier[orb] [ literal[string] ])
identifier[delta_lambda] =( identifier[day] - literal[int] )* literal[int] * identifier[pi] / identifier[days_per_year]
identifier[beta] = identifier[sqrt] ( literal[int] - identifier[ecc] ** literal[int] )
identifier[lambda_long_m] =- literal[int] *(( identifier[ecc] / literal[int] +( identifier[ecc] ** literal[int] )/ literal[int] )*( literal[int] + identifier[beta] )* identifier[sin] (- identifier[long_peri_rad] )-
( identifier[ecc] ** literal[int] )/ literal[int] *( literal[int] / literal[int] + identifier[beta] )* identifier[sin] (- literal[int] * identifier[long_peri_rad] )+( identifier[ecc] ** literal[int] )/ literal[int] *
( literal[int] / literal[int] + identifier[beta] )* identifier[sin] (- literal[int] * identifier[long_peri_rad] ))+ identifier[delta_lambda]
identifier[lambda_long] =( identifier[lambda_long_m] +( literal[int] * identifier[ecc] -( identifier[ecc] ** literal[int] )/ literal[int] )* identifier[sin] ( identifier[lambda_long_m] - identifier[long_peri_rad] )+
( literal[int] / literal[int] )*( identifier[ecc] ** literal[int] )* identifier[sin] ( literal[int] *( identifier[lambda_long_m] - identifier[long_peri_rad] ))+( literal[int] / literal[int] )*( identifier[ecc] ** literal[int] )
* identifier[sin] ( literal[int] *( identifier[lambda_long_m] - identifier[long_peri_rad] )))
keyword[return] identifier[lambda_long] | def solar_longitude(day, orb=const.orb_present, days_per_year=None):
"""Estimates solar longitude from calendar day.
Method is using an approximation from :cite:`Berger_1978` section 3
(lambda = 0 at spring equinox).
**Function-call arguments**
:param array day: Indicator of time of year.
:param dict orb: a dictionary with three members (as provided by
:class:`~climlab.solar.orbital.OrbitalTable`)
* ``'ecc'`` - eccentricity
* unit: dimensionless
* default value: ``0.017236``
* ``'long_peri'`` - longitude of perihelion
(precession angle)
* unit: degrees
* default value: ``281.37``
* ``'obliquity'`` - obliquity angle
* unit: degrees
* default value: ``23.446``
:param float days_per_year: number of days in a year (optional)
(default: 365.2422)
Reads the length of the year from
:mod:`~climlab.utils.constants` if available.
:returns: solar longitude ``lambda_long``
in dimension``( day.size, ecc.size )``
:rtype: array
Works for both scalar and vector orbital parameters.
"""
if days_per_year is None:
days_per_year = const.days_per_year # depends on [control=['if'], data=['days_per_year']]
ecc = orb['ecc']
long_peri_rad = deg2rad(orb['long_peri'])
delta_lambda = (day - 80.0) * 2 * pi / days_per_year
beta = sqrt(1 - ecc ** 2)
lambda_long_m = -2 * ((ecc / 2 + ecc ** 3 / 8) * (1 + beta) * sin(-long_peri_rad) - ecc ** 2 / 4 * (1 / 2 + beta) * sin(-2 * long_peri_rad) + ecc ** 3 / 8 * (1 / 3 + beta) * sin(-3 * long_peri_rad)) + delta_lambda
lambda_long = lambda_long_m + (2 * ecc - ecc ** 3 / 4) * sin(lambda_long_m - long_peri_rad) + 5 / 4 * ecc ** 2 * sin(2 * (lambda_long_m - long_peri_rad)) + 13 / 12 * ecc ** 3 * sin(3 * (lambda_long_m - long_peri_rad))
return lambda_long |
def ghuser_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to a GitHub user.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
# app = inliner.document.settings.env.app
#app.info('user link %r' % text)
ref = 'https://www.github.com/' + text
node = nodes.reference(rawtext, text, refuri=ref, **options)
return [node], [] | def function[ghuser_role, parameter[name, rawtext, text, lineno, inliner, options, content]]:
constant[Link to a GitHub user.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
]
variable[ref] assign[=] binary_operation[constant[https://www.github.com/] + name[text]]
variable[node] assign[=] call[name[nodes].reference, parameter[name[rawtext], name[text]]]
return[tuple[[<ast.List object at 0x7da1b0291270>, <ast.List object at 0x7da1b02930a0>]]] | keyword[def] identifier[ghuser_role] ( identifier[name] , identifier[rawtext] , identifier[text] , identifier[lineno] , identifier[inliner] , identifier[options] ={}, identifier[content] =[]):
literal[string]
identifier[ref] = literal[string] + identifier[text]
identifier[node] = identifier[nodes] . identifier[reference] ( identifier[rawtext] , identifier[text] , identifier[refuri] = identifier[ref] ,** identifier[options] )
keyword[return] [ identifier[node] ],[] | def ghuser_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to a GitHub user.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
# app = inliner.document.settings.env.app
#app.info('user link %r' % text)
ref = 'https://www.github.com/' + text
node = nodes.reference(rawtext, text, refuri=ref, **options)
return ([node], []) |
def _fill_request(self, request, rdata):
"""Fills request with data from the jsonrpc call."""
if not isinstance(rdata, dict):
raise InvalidRequestError
request['jsonrpc'] = self._get_jsonrpc(rdata)
request['id'] = self._get_id(rdata)
request['method'] = self._get_method(rdata)
request['params'] = self._get_params(rdata) | def function[_fill_request, parameter[self, request, rdata]]:
constant[Fills request with data from the jsonrpc call.]
if <ast.UnaryOp object at 0x7da1b0a71e70> begin[:]
<ast.Raise object at 0x7da1b0a701c0>
call[name[request]][constant[jsonrpc]] assign[=] call[name[self]._get_jsonrpc, parameter[name[rdata]]]
call[name[request]][constant[id]] assign[=] call[name[self]._get_id, parameter[name[rdata]]]
call[name[request]][constant[method]] assign[=] call[name[self]._get_method, parameter[name[rdata]]]
call[name[request]][constant[params]] assign[=] call[name[self]._get_params, parameter[name[rdata]]] | keyword[def] identifier[_fill_request] ( identifier[self] , identifier[request] , identifier[rdata] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[rdata] , identifier[dict] ):
keyword[raise] identifier[InvalidRequestError]
identifier[request] [ literal[string] ]= identifier[self] . identifier[_get_jsonrpc] ( identifier[rdata] )
identifier[request] [ literal[string] ]= identifier[self] . identifier[_get_id] ( identifier[rdata] )
identifier[request] [ literal[string] ]= identifier[self] . identifier[_get_method] ( identifier[rdata] )
identifier[request] [ literal[string] ]= identifier[self] . identifier[_get_params] ( identifier[rdata] ) | def _fill_request(self, request, rdata):
"""Fills request with data from the jsonrpc call."""
if not isinstance(rdata, dict):
raise InvalidRequestError # depends on [control=['if'], data=[]]
request['jsonrpc'] = self._get_jsonrpc(rdata)
request['id'] = self._get_id(rdata)
request['method'] = self._get_method(rdata)
request['params'] = self._get_params(rdata) |
def stream_to_packet(data):
"""
Chop a stream of data into MODBUS packets.
:param data: stream of data
:returns: a tuple of the data that is a packet with the remaining
data, or ``None``
"""
if len(data) < 6:
return None
# unpack the length
pktlen = struct.unpack(">H", data[4:6])[0] + 6
if (len(data) < pktlen):
return None
return (data[:pktlen], data[pktlen:]) | def function[stream_to_packet, parameter[data]]:
constant[
Chop a stream of data into MODBUS packets.
:param data: stream of data
:returns: a tuple of the data that is a packet with the remaining
data, or ``None``
]
if compare[call[name[len], parameter[name[data]]] less[<] constant[6]] begin[:]
return[constant[None]]
variable[pktlen] assign[=] binary_operation[call[call[name[struct].unpack, parameter[constant[>H], call[name[data]][<ast.Slice object at 0x7da1b11a0910>]]]][constant[0]] + constant[6]]
if compare[call[name[len], parameter[name[data]]] less[<] name[pktlen]] begin[:]
return[constant[None]]
return[tuple[[<ast.Subscript object at 0x7da1b11a0e20>, <ast.Subscript object at 0x7da1b11a1f60>]]] | keyword[def] identifier[stream_to_packet] ( identifier[data] ):
literal[string]
keyword[if] identifier[len] ( identifier[data] )< literal[int] :
keyword[return] keyword[None]
identifier[pktlen] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[data] [ literal[int] : literal[int] ])[ literal[int] ]+ literal[int]
keyword[if] ( identifier[len] ( identifier[data] )< identifier[pktlen] ):
keyword[return] keyword[None]
keyword[return] ( identifier[data] [: identifier[pktlen] ], identifier[data] [ identifier[pktlen] :]) | def stream_to_packet(data):
"""
Chop a stream of data into MODBUS packets.
:param data: stream of data
:returns: a tuple of the data that is a packet with the remaining
data, or ``None``
"""
if len(data) < 6:
return None # depends on [control=['if'], data=[]]
# unpack the length
pktlen = struct.unpack('>H', data[4:6])[0] + 6
if len(data) < pktlen:
return None # depends on [control=['if'], data=[]]
return (data[:pktlen], data[pktlen:]) |
def ssh(self, vm_name=None, command=None, extra_ssh_args=None):
'''
Execute a command via ssh on the vm specified.
command: The command to execute via ssh.
extra_ssh_args: Corresponds to '--' option in the vagrant ssh command
Returns the output of running the command.
'''
cmd = ['ssh', vm_name, '--command', command]
if extra_ssh_args is not None:
cmd += ['--', extra_ssh_args]
return self._run_vagrant_command(cmd) | def function[ssh, parameter[self, vm_name, command, extra_ssh_args]]:
constant[
Execute a command via ssh on the vm specified.
command: The command to execute via ssh.
extra_ssh_args: Corresponds to '--' option in the vagrant ssh command
Returns the output of running the command.
]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b1a20190>, <ast.Name object at 0x7da1b1a20b50>, <ast.Constant object at 0x7da1b1a21810>, <ast.Name object at 0x7da1b1a23130>]]
if compare[name[extra_ssh_args] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b1a21a50>
return[call[name[self]._run_vagrant_command, parameter[name[cmd]]]] | keyword[def] identifier[ssh] ( identifier[self] , identifier[vm_name] = keyword[None] , identifier[command] = keyword[None] , identifier[extra_ssh_args] = keyword[None] ):
literal[string]
identifier[cmd] =[ literal[string] , identifier[vm_name] , literal[string] , identifier[command] ]
keyword[if] identifier[extra_ssh_args] keyword[is] keyword[not] keyword[None] :
identifier[cmd] +=[ literal[string] , identifier[extra_ssh_args] ]
keyword[return] identifier[self] . identifier[_run_vagrant_command] ( identifier[cmd] ) | def ssh(self, vm_name=None, command=None, extra_ssh_args=None):
"""
Execute a command via ssh on the vm specified.
command: The command to execute via ssh.
extra_ssh_args: Corresponds to '--' option in the vagrant ssh command
Returns the output of running the command.
"""
cmd = ['ssh', vm_name, '--command', command]
if extra_ssh_args is not None:
cmd += ['--', extra_ssh_args] # depends on [control=['if'], data=['extra_ssh_args']]
return self._run_vagrant_command(cmd) |
def write_secret(path, **kwargs):
'''
Set secret at the path in vault. The vault policy used must allow this.
CLI Example:
.. code-block:: bash
salt '*' vault.write_secret "secret/my/secret" user="foo" password="bar"
'''
log.debug('Writing vault secrets for %s at %s', __grains__['id'], path)
data = dict([(x, y) for x, y in kwargs.items() if not x.startswith('__')])
try:
url = 'v1/{0}'.format(path)
response = __utils__['vault.make_request']('POST', url, json=data)
if response.status_code == 200:
return response.json()['data']
elif response.status_code != 204:
response.raise_for_status()
return True
except Exception as err:
log.error('Failed to write secret! %s: %s', type(err).__name__, err)
return False | def function[write_secret, parameter[path]]:
constant[
Set secret at the path in vault. The vault policy used must allow this.
CLI Example:
.. code-block:: bash
salt '*' vault.write_secret "secret/my/secret" user="foo" password="bar"
]
call[name[log].debug, parameter[constant[Writing vault secrets for %s at %s], call[name[__grains__]][constant[id]], name[path]]]
variable[data] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da1b1f497b0>]]
<ast.Try object at 0x7da1b1f49450> | keyword[def] identifier[write_secret] ( identifier[path] ,** identifier[kwargs] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] , identifier[__grains__] [ literal[string] ], identifier[path] )
identifier[data] = identifier[dict] ([( identifier[x] , identifier[y] ) keyword[for] identifier[x] , identifier[y] keyword[in] identifier[kwargs] . identifier[items] () keyword[if] keyword[not] identifier[x] . identifier[startswith] ( literal[string] )])
keyword[try] :
identifier[url] = literal[string] . identifier[format] ( identifier[path] )
identifier[response] = identifier[__utils__] [ literal[string] ]( literal[string] , identifier[url] , identifier[json] = identifier[data] )
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
keyword[return] identifier[response] . identifier[json] ()[ literal[string] ]
keyword[elif] identifier[response] . identifier[status_code] != literal[int] :
identifier[response] . identifier[raise_for_status] ()
keyword[return] keyword[True]
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[log] . identifier[error] ( literal[string] , identifier[type] ( identifier[err] ). identifier[__name__] , identifier[err] )
keyword[return] keyword[False] | def write_secret(path, **kwargs):
"""
Set secret at the path in vault. The vault policy used must allow this.
CLI Example:
.. code-block:: bash
salt '*' vault.write_secret "secret/my/secret" user="foo" password="bar"
"""
log.debug('Writing vault secrets for %s at %s', __grains__['id'], path)
data = dict([(x, y) for (x, y) in kwargs.items() if not x.startswith('__')])
try:
url = 'v1/{0}'.format(path)
response = __utils__['vault.make_request']('POST', url, json=data)
if response.status_code == 200:
return response.json()['data'] # depends on [control=['if'], data=[]]
elif response.status_code != 204:
response.raise_for_status() # depends on [control=['if'], data=[]]
return True # depends on [control=['try'], data=[]]
except Exception as err:
log.error('Failed to write secret! %s: %s', type(err).__name__, err)
return False # depends on [control=['except'], data=['err']] |
def copyWithoutMathSubObjects(self):
"""
return a new MathGlyph containing all data except:
contours
components
anchors
guidelines
this is used mainly for internal glyph math.
"""
n = MathGlyph(None)
n.name = self.name
if self.unicodes is not None:
n.unicodes = list(self.unicodes)
n.width = self.width
n.height = self.height
n.note = self.note
n.lib = deepcopy(dict(self.lib))
return n | def function[copyWithoutMathSubObjects, parameter[self]]:
constant[
return a new MathGlyph containing all data except:
contours
components
anchors
guidelines
this is used mainly for internal glyph math.
]
variable[n] assign[=] call[name[MathGlyph], parameter[constant[None]]]
name[n].name assign[=] name[self].name
if compare[name[self].unicodes is_not constant[None]] begin[:]
name[n].unicodes assign[=] call[name[list], parameter[name[self].unicodes]]
name[n].width assign[=] name[self].width
name[n].height assign[=] name[self].height
name[n].note assign[=] name[self].note
name[n].lib assign[=] call[name[deepcopy], parameter[call[name[dict], parameter[name[self].lib]]]]
return[name[n]] | keyword[def] identifier[copyWithoutMathSubObjects] ( identifier[self] ):
literal[string]
identifier[n] = identifier[MathGlyph] ( keyword[None] )
identifier[n] . identifier[name] = identifier[self] . identifier[name]
keyword[if] identifier[self] . identifier[unicodes] keyword[is] keyword[not] keyword[None] :
identifier[n] . identifier[unicodes] = identifier[list] ( identifier[self] . identifier[unicodes] )
identifier[n] . identifier[width] = identifier[self] . identifier[width]
identifier[n] . identifier[height] = identifier[self] . identifier[height]
identifier[n] . identifier[note] = identifier[self] . identifier[note]
identifier[n] . identifier[lib] = identifier[deepcopy] ( identifier[dict] ( identifier[self] . identifier[lib] ))
keyword[return] identifier[n] | def copyWithoutMathSubObjects(self):
"""
return a new MathGlyph containing all data except:
contours
components
anchors
guidelines
this is used mainly for internal glyph math.
"""
n = MathGlyph(None)
n.name = self.name
if self.unicodes is not None:
n.unicodes = list(self.unicodes) # depends on [control=['if'], data=[]]
n.width = self.width
n.height = self.height
n.note = self.note
n.lib = deepcopy(dict(self.lib))
return n |
def process_normal_line( self, line ):
"""process a normal line and check whether it is the start of a new block"""
for f in re_source_block_formats:
if f.start.match( line ):
self.add_block_lines()
self.format = f
self.lineno = fileinput.filelineno()
self.lines.append( line ) | def function[process_normal_line, parameter[self, line]]:
constant[process a normal line and check whether it is the start of a new block]
for taget[name[f]] in starred[name[re_source_block_formats]] begin[:]
if call[name[f].start.match, parameter[name[line]]] begin[:]
call[name[self].add_block_lines, parameter[]]
name[self].format assign[=] name[f]
name[self].lineno assign[=] call[name[fileinput].filelineno, parameter[]]
call[name[self].lines.append, parameter[name[line]]] | keyword[def] identifier[process_normal_line] ( identifier[self] , identifier[line] ):
literal[string]
keyword[for] identifier[f] keyword[in] identifier[re_source_block_formats] :
keyword[if] identifier[f] . identifier[start] . identifier[match] ( identifier[line] ):
identifier[self] . identifier[add_block_lines] ()
identifier[self] . identifier[format] = identifier[f]
identifier[self] . identifier[lineno] = identifier[fileinput] . identifier[filelineno] ()
identifier[self] . identifier[lines] . identifier[append] ( identifier[line] ) | def process_normal_line(self, line):
"""process a normal line and check whether it is the start of a new block"""
for f in re_source_block_formats:
if f.start.match(line):
self.add_block_lines()
self.format = f
self.lineno = fileinput.filelineno() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
self.lines.append(line) |
def replace_party(self, url, token, encoding_aes_key, media_id):
"""
全量覆盖部门
https://work.weixin.qq.com/api/doc#90000/90135/90982
:param url: 企业应用接收企业微信推送请求的访问协议和地址,支持http或https协议
:param token: 用于生成签名
:param encoding_aes_key: 用于消息体的加密,是AES密钥的Base64编码
:param media_id: 上传的csv文件的media_id
:return: 返回的 JSON 数据包
"""
return self._post(
'batch/replaceparty',
data={
'media_id': media_id,
'callback': {
'url': url,
'token': token,
'encodingaeskey': encoding_aes_key
}
}
) | def function[replace_party, parameter[self, url, token, encoding_aes_key, media_id]]:
constant[
全量覆盖部门
https://work.weixin.qq.com/api/doc#90000/90135/90982
:param url: 企业应用接收企业微信推送请求的访问协议和地址,支持http或https协议
:param token: 用于生成签名
:param encoding_aes_key: 用于消息体的加密,是AES密钥的Base64编码
:param media_id: 上传的csv文件的media_id
:return: 返回的 JSON 数据包
]
return[call[name[self]._post, parameter[constant[batch/replaceparty]]]] | keyword[def] identifier[replace_party] ( identifier[self] , identifier[url] , identifier[token] , identifier[encoding_aes_key] , identifier[media_id] ):
literal[string]
keyword[return] identifier[self] . identifier[_post] (
literal[string] ,
identifier[data] ={
literal[string] : identifier[media_id] ,
literal[string] :{
literal[string] : identifier[url] ,
literal[string] : identifier[token] ,
literal[string] : identifier[encoding_aes_key]
}
}
) | def replace_party(self, url, token, encoding_aes_key, media_id):
"""
全量覆盖部门
https://work.weixin.qq.com/api/doc#90000/90135/90982
:param url: 企业应用接收企业微信推送请求的访问协议和地址,支持http或https协议
:param token: 用于生成签名
:param encoding_aes_key: 用于消息体的加密,是AES密钥的Base64编码
:param media_id: 上传的csv文件的media_id
:return: 返回的 JSON 数据包
"""
return self._post('batch/replaceparty', data={'media_id': media_id, 'callback': {'url': url, 'token': token, 'encodingaeskey': encoding_aes_key}}) |
def wait(self, wait_time=0):
"""
Returns a :class:`~retask.task.Task` object from the queue. Returns ``False`` if it timeouts.
:arg wait_time: Time in seconds to wait, default is infinite.
:return: :class:`~retask.task.Task` object from the queue or False if it timeouts.
.. doctest::
>>> from retask import Queue
>>> q = Queue('test')
>>> q.connect()
True
>>> task = q.wait()
>>> print task.data
{u'name': u'kushal'}
.. note::
This is a blocking call, you can specity wait_time argument for timeout.
"""
if not self.connected:
raise ConnectionError('Queue is not connected')
data = self.rdb.brpop(self._name, wait_time)
if data:
task = Task()
task.__dict__ = json.loads(data[1])
return task
else:
return False | def function[wait, parameter[self, wait_time]]:
constant[
Returns a :class:`~retask.task.Task` object from the queue. Returns ``False`` if it timeouts.
:arg wait_time: Time in seconds to wait, default is infinite.
:return: :class:`~retask.task.Task` object from the queue or False if it timeouts.
.. doctest::
>>> from retask import Queue
>>> q = Queue('test')
>>> q.connect()
True
>>> task = q.wait()
>>> print task.data
{u'name': u'kushal'}
.. note::
This is a blocking call, you can specity wait_time argument for timeout.
]
if <ast.UnaryOp object at 0x7da1b0a329b0> begin[:]
<ast.Raise object at 0x7da1b0a327a0>
variable[data] assign[=] call[name[self].rdb.brpop, parameter[name[self]._name, name[wait_time]]]
if name[data] begin[:]
variable[task] assign[=] call[name[Task], parameter[]]
name[task].__dict__ assign[=] call[name[json].loads, parameter[call[name[data]][constant[1]]]]
return[name[task]] | keyword[def] identifier[wait] ( identifier[self] , identifier[wait_time] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[connected] :
keyword[raise] identifier[ConnectionError] ( literal[string] )
identifier[data] = identifier[self] . identifier[rdb] . identifier[brpop] ( identifier[self] . identifier[_name] , identifier[wait_time] )
keyword[if] identifier[data] :
identifier[task] = identifier[Task] ()
identifier[task] . identifier[__dict__] = identifier[json] . identifier[loads] ( identifier[data] [ literal[int] ])
keyword[return] identifier[task]
keyword[else] :
keyword[return] keyword[False] | def wait(self, wait_time=0):
"""
Returns a :class:`~retask.task.Task` object from the queue. Returns ``False`` if it timeouts.
:arg wait_time: Time in seconds to wait, default is infinite.
:return: :class:`~retask.task.Task` object from the queue or False if it timeouts.
.. doctest::
>>> from retask import Queue
>>> q = Queue('test')
>>> q.connect()
True
>>> task = q.wait()
>>> print task.data
{u'name': u'kushal'}
.. note::
This is a blocking call, you can specity wait_time argument for timeout.
"""
if not self.connected:
raise ConnectionError('Queue is not connected') # depends on [control=['if'], data=[]]
data = self.rdb.brpop(self._name, wait_time)
if data:
task = Task()
task.__dict__ = json.loads(data[1])
return task # depends on [control=['if'], data=[]]
else:
return False |
def get_float(prompt=None):
"""
Read a line of text from standard input and return the equivalent float
as precisely as possible; if text does not represent a double, user is
prompted to retry. If line can't be read, return None.
"""
while True:
s = get_string(prompt)
if s is None:
return None
if len(s) > 0 and re.search(r"^[+-]?\d*(?:\.\d*)?$", s):
try:
return float(s)
except ValueError:
pass
# Temporarily here for backwards compatibility
if prompt is None:
print("Retry: ", end="") | def function[get_float, parameter[prompt]]:
constant[
Read a line of text from standard input and return the equivalent float
as precisely as possible; if text does not represent a double, user is
prompted to retry. If line can't be read, return None.
]
while constant[True] begin[:]
variable[s] assign[=] call[name[get_string], parameter[name[prompt]]]
if compare[name[s] is constant[None]] begin[:]
return[constant[None]]
if <ast.BoolOp object at 0x7da2054a7130> begin[:]
<ast.Try object at 0x7da2054a71f0>
if compare[name[prompt] is constant[None]] begin[:]
call[name[print], parameter[constant[Retry: ]]] | keyword[def] identifier[get_float] ( identifier[prompt] = keyword[None] ):
literal[string]
keyword[while] keyword[True] :
identifier[s] = identifier[get_string] ( identifier[prompt] )
keyword[if] identifier[s] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[if] identifier[len] ( identifier[s] )> literal[int] keyword[and] identifier[re] . identifier[search] ( literal[string] , identifier[s] ):
keyword[try] :
keyword[return] identifier[float] ( identifier[s] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[if] identifier[prompt] keyword[is] keyword[None] :
identifier[print] ( literal[string] , identifier[end] = literal[string] ) | def get_float(prompt=None):
"""
Read a line of text from standard input and return the equivalent float
as precisely as possible; if text does not represent a double, user is
prompted to retry. If line can't be read, return None.
"""
while True:
s = get_string(prompt)
if s is None:
return None # depends on [control=['if'], data=[]]
if len(s) > 0 and re.search('^[+-]?\\d*(?:\\.\\d*)?$', s):
try:
return float(s) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Temporarily here for backwards compatibility
if prompt is None:
print('Retry: ', end='') # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def _filter_candidates(self, tokens, candidates):
"""Discard if long shorter than abbr, or if abbr token(s) are in the long token(s)."""
results = []
for abbr_span, long_span in candidates:
abbr = tokens[abbr_span[0]:abbr_span[1]]
long = tokens[long_span[0]:long_span[1]]
if not all(a in long for a in abbr) and len(''.join(long)) > len(''.join(abbr)):
results.append((abbr_span, long_span))
return results | def function[_filter_candidates, parameter[self, tokens, candidates]]:
constant[Discard if long shorter than abbr, or if abbr token(s) are in the long token(s).]
variable[results] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18fe91660>, <ast.Name object at 0x7da18fe931f0>]]] in starred[name[candidates]] begin[:]
variable[abbr] assign[=] call[name[tokens]][<ast.Slice object at 0x7da18c4cf8e0>]
variable[long] assign[=] call[name[tokens]][<ast.Slice object at 0x7da18c4cc0d0>]
if <ast.BoolOp object at 0x7da18c4cded0> begin[:]
call[name[results].append, parameter[tuple[[<ast.Name object at 0x7da18c4ced40>, <ast.Name object at 0x7da18c4cf940>]]]]
return[name[results]] | keyword[def] identifier[_filter_candidates] ( identifier[self] , identifier[tokens] , identifier[candidates] ):
literal[string]
identifier[results] =[]
keyword[for] identifier[abbr_span] , identifier[long_span] keyword[in] identifier[candidates] :
identifier[abbr] = identifier[tokens] [ identifier[abbr_span] [ literal[int] ]: identifier[abbr_span] [ literal[int] ]]
identifier[long] = identifier[tokens] [ identifier[long_span] [ literal[int] ]: identifier[long_span] [ literal[int] ]]
keyword[if] keyword[not] identifier[all] ( identifier[a] keyword[in] identifier[long] keyword[for] identifier[a] keyword[in] identifier[abbr] ) keyword[and] identifier[len] ( literal[string] . identifier[join] ( identifier[long] ))> identifier[len] ( literal[string] . identifier[join] ( identifier[abbr] )):
identifier[results] . identifier[append] (( identifier[abbr_span] , identifier[long_span] ))
keyword[return] identifier[results] | def _filter_candidates(self, tokens, candidates):
"""Discard if long shorter than abbr, or if abbr token(s) are in the long token(s)."""
results = []
for (abbr_span, long_span) in candidates:
abbr = tokens[abbr_span[0]:abbr_span[1]]
long = tokens[long_span[0]:long_span[1]]
if not all((a in long for a in abbr)) and len(''.join(long)) > len(''.join(abbr)):
results.append((abbr_span, long_span)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return results |
def textpath(self, txt, x, y, width=None, height=1000000, enableRendering=False, **kwargs):
'''
Draws an outlined path of the input text
'''
txt = self.Text(txt, x, y, width, height, **kwargs)
path = txt.path
if draw:
path.draw()
return path | def function[textpath, parameter[self, txt, x, y, width, height, enableRendering]]:
constant[
Draws an outlined path of the input text
]
variable[txt] assign[=] call[name[self].Text, parameter[name[txt], name[x], name[y], name[width], name[height]]]
variable[path] assign[=] name[txt].path
if name[draw] begin[:]
call[name[path].draw, parameter[]]
return[name[path]] | keyword[def] identifier[textpath] ( identifier[self] , identifier[txt] , identifier[x] , identifier[y] , identifier[width] = keyword[None] , identifier[height] = literal[int] , identifier[enableRendering] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[txt] = identifier[self] . identifier[Text] ( identifier[txt] , identifier[x] , identifier[y] , identifier[width] , identifier[height] ,** identifier[kwargs] )
identifier[path] = identifier[txt] . identifier[path]
keyword[if] identifier[draw] :
identifier[path] . identifier[draw] ()
keyword[return] identifier[path] | def textpath(self, txt, x, y, width=None, height=1000000, enableRendering=False, **kwargs):
"""
Draws an outlined path of the input text
"""
txt = self.Text(txt, x, y, width, height, **kwargs)
path = txt.path
if draw:
path.draw() # depends on [control=['if'], data=[]]
return path |
def get_contribution(self, url):
"""Get the details of a particular contribution given it's
url"""
result = self.api_request(url)
# add the contrib id into the metadata
result['id'] = os.path.split(result['url'])[1]
return result | def function[get_contribution, parameter[self, url]]:
constant[Get the details of a particular contribution given it's
url]
variable[result] assign[=] call[name[self].api_request, parameter[name[url]]]
call[name[result]][constant[id]] assign[=] call[call[name[os].path.split, parameter[call[name[result]][constant[url]]]]][constant[1]]
return[name[result]] | keyword[def] identifier[get_contribution] ( identifier[self] , identifier[url] ):
literal[string]
identifier[result] = identifier[self] . identifier[api_request] ( identifier[url] )
identifier[result] [ literal[string] ]= identifier[os] . identifier[path] . identifier[split] ( identifier[result] [ literal[string] ])[ literal[int] ]
keyword[return] identifier[result] | def get_contribution(self, url):
"""Get the details of a particular contribution given it's
url"""
result = self.api_request(url)
# add the contrib id into the metadata
result['id'] = os.path.split(result['url'])[1]
return result |
def decimate_max(self, a, maxpoints, **kwargs):
"""Return data *a* max-decimated on *maxpoints*.
Histograms each column into *maxpoints* bins and calculates
the maximum in each bin as the decimated data, using
:func:`numkit.timeseries.max_histogrammed_function`. The coarse grained
time in the first column contains the centers of the histogram
time.
If *a* contains <= *maxpoints* then *a* is simply returned;
otherwise a new array of the same dimensions but with a
reduced number of *maxpoints* points is returned.
.. Note::
Assumes that the first column is time.
"""
return self._decimate(numkit.timeseries.max_histogrammed_function, a, maxpoints, **kwargs) | def function[decimate_max, parameter[self, a, maxpoints]]:
constant[Return data *a* max-decimated on *maxpoints*.
Histograms each column into *maxpoints* bins and calculates
the maximum in each bin as the decimated data, using
:func:`numkit.timeseries.max_histogrammed_function`. The coarse grained
time in the first column contains the centers of the histogram
time.
If *a* contains <= *maxpoints* then *a* is simply returned;
otherwise a new array of the same dimensions but with a
reduced number of *maxpoints* points is returned.
.. Note::
Assumes that the first column is time.
]
return[call[name[self]._decimate, parameter[name[numkit].timeseries.max_histogrammed_function, name[a], name[maxpoints]]]] | keyword[def] identifier[decimate_max] ( identifier[self] , identifier[a] , identifier[maxpoints] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_decimate] ( identifier[numkit] . identifier[timeseries] . identifier[max_histogrammed_function] , identifier[a] , identifier[maxpoints] ,** identifier[kwargs] ) | def decimate_max(self, a, maxpoints, **kwargs):
"""Return data *a* max-decimated on *maxpoints*.
Histograms each column into *maxpoints* bins and calculates
the maximum in each bin as the decimated data, using
:func:`numkit.timeseries.max_histogrammed_function`. The coarse grained
time in the first column contains the centers of the histogram
time.
If *a* contains <= *maxpoints* then *a* is simply returned;
otherwise a new array of the same dimensions but with a
reduced number of *maxpoints* points is returned.
.. Note::
Assumes that the first column is time.
"""
return self._decimate(numkit.timeseries.max_histogrammed_function, a, maxpoints, **kwargs) |
def manage_payload_services(action, services=None, charm_func=None):
"""Run an action against all services.
An optional charm_func() can be called. It should raise an Exception to
indicate that the function failed. If it was succesfull it should return
None or an optional message.
The signature for charm_func is:
charm_func() -> message: str
charm_func() is executed after any services are stopped, if supplied.
The services object can either be:
- None : no services were passed (an empty dict is returned)
- a list of strings
- A dictionary (optionally OrderedDict) {service_name: {'service': ..}}
- An array of [{'service': service_name, ...}, ...]
:param action: Action to run: pause, resume, start or stop.
:type action: str
:param services: See above
:type services: See above
:param charm_func: function to run for custom charm pausing.
:type charm_func: f()
:returns: Status boolean and list of messages
:rtype: (bool, [])
:raises: RuntimeError
"""
actions = {
'pause': service_pause,
'resume': service_resume,
'start': service_start,
'stop': service_stop}
action = action.lower()
if action not in actions.keys():
raise RuntimeError(
"action: {} must be one of: {}".format(action,
', '.join(actions.keys())))
services = _extract_services_list_helper(services)
messages = []
success = True
if services:
for service in services.keys():
rc = actions[action](service)
if not rc:
success = False
messages.append("{} didn't {} cleanly.".format(service,
action))
if charm_func:
try:
message = charm_func()
if message:
messages.append(message)
except Exception as e:
success = False
messages.append(str(e))
return success, messages | def function[manage_payload_services, parameter[action, services, charm_func]]:
constant[Run an action against all services.
An optional charm_func() can be called. It should raise an Exception to
indicate that the function failed. If it was succesfull it should return
None or an optional message.
The signature for charm_func is:
charm_func() -> message: str
charm_func() is executed after any services are stopped, if supplied.
The services object can either be:
- None : no services were passed (an empty dict is returned)
- a list of strings
- A dictionary (optionally OrderedDict) {service_name: {'service': ..}}
- An array of [{'service': service_name, ...}, ...]
:param action: Action to run: pause, resume, start or stop.
:type action: str
:param services: See above
:type services: See above
:param charm_func: function to run for custom charm pausing.
:type charm_func: f()
:returns: Status boolean and list of messages
:rtype: (bool, [])
:raises: RuntimeError
]
variable[actions] assign[=] dictionary[[<ast.Constant object at 0x7da1b11abf70>, <ast.Constant object at 0x7da18fe92a40>, <ast.Constant object at 0x7da18fe91600>, <ast.Constant object at 0x7da18fe92e90>], [<ast.Name object at 0x7da18fe90820>, <ast.Name object at 0x7da18fe91d80>, <ast.Name object at 0x7da18fe90280>, <ast.Name object at 0x7da18fe93cd0>]]
variable[action] assign[=] call[name[action].lower, parameter[]]
if compare[name[action] <ast.NotIn object at 0x7da2590d7190> call[name[actions].keys, parameter[]]] begin[:]
<ast.Raise object at 0x7da18fe92770>
variable[services] assign[=] call[name[_extract_services_list_helper], parameter[name[services]]]
variable[messages] assign[=] list[[]]
variable[success] assign[=] constant[True]
if name[services] begin[:]
for taget[name[service]] in starred[call[name[services].keys, parameter[]]] begin[:]
variable[rc] assign[=] call[call[name[actions]][name[action]], parameter[name[service]]]
if <ast.UnaryOp object at 0x7da1b12186a0> begin[:]
variable[success] assign[=] constant[False]
call[name[messages].append, parameter[call[constant[{} didn't {} cleanly.].format, parameter[name[service], name[action]]]]]
if name[charm_func] begin[:]
<ast.Try object at 0x7da1b121a4d0>
return[tuple[[<ast.Name object at 0x7da1b121b730>, <ast.Name object at 0x7da1b12184f0>]]] | keyword[def] identifier[manage_payload_services] ( identifier[action] , identifier[services] = keyword[None] , identifier[charm_func] = keyword[None] ):
literal[string]
identifier[actions] ={
literal[string] : identifier[service_pause] ,
literal[string] : identifier[service_resume] ,
literal[string] : identifier[service_start] ,
literal[string] : identifier[service_stop] }
identifier[action] = identifier[action] . identifier[lower] ()
keyword[if] identifier[action] keyword[not] keyword[in] identifier[actions] . identifier[keys] ():
keyword[raise] identifier[RuntimeError] (
literal[string] . identifier[format] ( identifier[action] ,
literal[string] . identifier[join] ( identifier[actions] . identifier[keys] ())))
identifier[services] = identifier[_extract_services_list_helper] ( identifier[services] )
identifier[messages] =[]
identifier[success] = keyword[True]
keyword[if] identifier[services] :
keyword[for] identifier[service] keyword[in] identifier[services] . identifier[keys] ():
identifier[rc] = identifier[actions] [ identifier[action] ]( identifier[service] )
keyword[if] keyword[not] identifier[rc] :
identifier[success] = keyword[False]
identifier[messages] . identifier[append] ( literal[string] . identifier[format] ( identifier[service] ,
identifier[action] ))
keyword[if] identifier[charm_func] :
keyword[try] :
identifier[message] = identifier[charm_func] ()
keyword[if] identifier[message] :
identifier[messages] . identifier[append] ( identifier[message] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[success] = keyword[False]
identifier[messages] . identifier[append] ( identifier[str] ( identifier[e] ))
keyword[return] identifier[success] , identifier[messages] | def manage_payload_services(action, services=None, charm_func=None):
"""Run an action against all services.
An optional charm_func() can be called. It should raise an Exception to
indicate that the function failed. If it was succesfull it should return
None or an optional message.
The signature for charm_func is:
charm_func() -> message: str
charm_func() is executed after any services are stopped, if supplied.
The services object can either be:
- None : no services were passed (an empty dict is returned)
- a list of strings
- A dictionary (optionally OrderedDict) {service_name: {'service': ..}}
- An array of [{'service': service_name, ...}, ...]
:param action: Action to run: pause, resume, start or stop.
:type action: str
:param services: See above
:type services: See above
:param charm_func: function to run for custom charm pausing.
:type charm_func: f()
:returns: Status boolean and list of messages
:rtype: (bool, [])
:raises: RuntimeError
"""
actions = {'pause': service_pause, 'resume': service_resume, 'start': service_start, 'stop': service_stop}
action = action.lower()
if action not in actions.keys():
raise RuntimeError('action: {} must be one of: {}'.format(action, ', '.join(actions.keys()))) # depends on [control=['if'], data=['action']]
services = _extract_services_list_helper(services)
messages = []
success = True
if services:
for service in services.keys():
rc = actions[action](service)
if not rc:
success = False
messages.append("{} didn't {} cleanly.".format(service, action)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['service']] # depends on [control=['if'], data=[]]
if charm_func:
try:
message = charm_func()
if message:
messages.append(message) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
success = False
messages.append(str(e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
return (success, messages) |
def download_data():
"""Download two sets of Enron1 spam/ham e-mails if they are not here
We will use the first as trainset and the second as testset.
Return the path prefix to us to load the data from disk."""
n_datasets = 2
for d in range(1, n_datasets + 1):
if not os.path.isdir('enron%d' % d):
URL = url[d-1]
print("Downloading %d/%d: %s" % (d, n_datasets, URL))
folderzip = 'enron%d.zip' % d
with urlopen(URL) as remotedata:
with open(folderzip, 'wb') as z:
z.write(remotedata.read())
with ZipFile(folderzip) as z:
z.extractall()
os.remove(folderzip) | def function[download_data, parameter[]]:
constant[Download two sets of Enron1 spam/ham e-mails if they are not here
We will use the first as trainset and the second as testset.
Return the path prefix to us to load the data from disk.]
variable[n_datasets] assign[=] constant[2]
for taget[name[d]] in starred[call[name[range], parameter[constant[1], binary_operation[name[n_datasets] + constant[1]]]]] begin[:]
if <ast.UnaryOp object at 0x7da20e954a00> begin[:]
variable[URL] assign[=] call[name[url]][binary_operation[name[d] - constant[1]]]
call[name[print], parameter[binary_operation[constant[Downloading %d/%d: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e954640>, <ast.Name object at 0x7da20e957c10>, <ast.Name object at 0x7da20e955930>]]]]]
variable[folderzip] assign[=] binary_operation[constant[enron%d.zip] <ast.Mod object at 0x7da2590d6920> name[d]]
with call[name[urlopen], parameter[name[URL]]] begin[:]
with call[name[open], parameter[name[folderzip], constant[wb]]] begin[:]
call[name[z].write, parameter[call[name[remotedata].read, parameter[]]]]
with call[name[ZipFile], parameter[name[folderzip]]] begin[:]
call[name[z].extractall, parameter[]]
call[name[os].remove, parameter[name[folderzip]]] | keyword[def] identifier[download_data] ():
literal[string]
identifier[n_datasets] = literal[int]
keyword[for] identifier[d] keyword[in] identifier[range] ( literal[int] , identifier[n_datasets] + literal[int] ):
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( literal[string] % identifier[d] ):
identifier[URL] = identifier[url] [ identifier[d] - literal[int] ]
identifier[print] ( literal[string] %( identifier[d] , identifier[n_datasets] , identifier[URL] ))
identifier[folderzip] = literal[string] % identifier[d]
keyword[with] identifier[urlopen] ( identifier[URL] ) keyword[as] identifier[remotedata] :
keyword[with] identifier[open] ( identifier[folderzip] , literal[string] ) keyword[as] identifier[z] :
identifier[z] . identifier[write] ( identifier[remotedata] . identifier[read] ())
keyword[with] identifier[ZipFile] ( identifier[folderzip] ) keyword[as] identifier[z] :
identifier[z] . identifier[extractall] ()
identifier[os] . identifier[remove] ( identifier[folderzip] ) | def download_data():
"""Download two sets of Enron1 spam/ham e-mails if they are not here
We will use the first as trainset and the second as testset.
Return the path prefix to us to load the data from disk."""
n_datasets = 2
for d in range(1, n_datasets + 1):
if not os.path.isdir('enron%d' % d):
URL = url[d - 1]
print('Downloading %d/%d: %s' % (d, n_datasets, URL))
folderzip = 'enron%d.zip' % d
with urlopen(URL) as remotedata:
with open(folderzip, 'wb') as z:
z.write(remotedata.read()) # depends on [control=['with'], data=['z']] # depends on [control=['with'], data=['remotedata']]
with ZipFile(folderzip) as z:
z.extractall() # depends on [control=['with'], data=['z']]
os.remove(folderzip) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']] |
def model_fn(model_dir):
"""
Load the gluon model. Called once when hosting service starts.
:param: model_dir The directory where model files are stored.
:return: a model (in this case a Gluon network)
"""
symbol = mx.sym.load('%s/model.json' % model_dir)
outputs = mx.symbol.softmax(data=symbol, name='softmax_label')
inputs = mx.sym.var('data')
param_dict = gluon.ParameterDict('model_')
net = gluon.SymbolBlock(outputs, inputs, param_dict)
net.load_params('%s/model.params' % model_dir, ctx=mx.cpu())
return net | def function[model_fn, parameter[model_dir]]:
constant[
Load the gluon model. Called once when hosting service starts.
:param: model_dir The directory where model files are stored.
:return: a model (in this case a Gluon network)
]
variable[symbol] assign[=] call[name[mx].sym.load, parameter[binary_operation[constant[%s/model.json] <ast.Mod object at 0x7da2590d6920> name[model_dir]]]]
variable[outputs] assign[=] call[name[mx].symbol.softmax, parameter[]]
variable[inputs] assign[=] call[name[mx].sym.var, parameter[constant[data]]]
variable[param_dict] assign[=] call[name[gluon].ParameterDict, parameter[constant[model_]]]
variable[net] assign[=] call[name[gluon].SymbolBlock, parameter[name[outputs], name[inputs], name[param_dict]]]
call[name[net].load_params, parameter[binary_operation[constant[%s/model.params] <ast.Mod object at 0x7da2590d6920> name[model_dir]]]]
return[name[net]] | keyword[def] identifier[model_fn] ( identifier[model_dir] ):
literal[string]
identifier[symbol] = identifier[mx] . identifier[sym] . identifier[load] ( literal[string] % identifier[model_dir] )
identifier[outputs] = identifier[mx] . identifier[symbol] . identifier[softmax] ( identifier[data] = identifier[symbol] , identifier[name] = literal[string] )
identifier[inputs] = identifier[mx] . identifier[sym] . identifier[var] ( literal[string] )
identifier[param_dict] = identifier[gluon] . identifier[ParameterDict] ( literal[string] )
identifier[net] = identifier[gluon] . identifier[SymbolBlock] ( identifier[outputs] , identifier[inputs] , identifier[param_dict] )
identifier[net] . identifier[load_params] ( literal[string] % identifier[model_dir] , identifier[ctx] = identifier[mx] . identifier[cpu] ())
keyword[return] identifier[net] | def model_fn(model_dir):
"""
Load the gluon model. Called once when hosting service starts.
:param: model_dir The directory where model files are stored.
:return: a model (in this case a Gluon network)
"""
symbol = mx.sym.load('%s/model.json' % model_dir)
outputs = mx.symbol.softmax(data=symbol, name='softmax_label')
inputs = mx.sym.var('data')
param_dict = gluon.ParameterDict('model_')
net = gluon.SymbolBlock(outputs, inputs, param_dict)
net.load_params('%s/model.params' % model_dir, ctx=mx.cpu())
return net |
def define_udp(self, name, valid_type, valid_components=None, default=None):
"""
Pre-define a user-defined property.
This is the equivalent to the following RDL:
.. code-block:: none
property <name> {
type = <valid_type>;
component = <valid_components>;
default = <default>
};
Parameters
----------
name: str
Property name
valid_components: list
List of :class:`~systemrdl.component.Component` types the UDP can be bound to.
If None, then UDP can be bound to all components.
valid_type: type
Assignment type that this UDP will enforce
default:
Default if a value is not specified when the UDP is bound to a component.
Value must be compatible with ``valid_type``
"""
if valid_components is None:
valid_components = [
comp.Field,
comp.Reg,
comp.Regfile,
comp.Addrmap,
comp.Mem,
comp.Signal,
#TODO constraint,
]
if name in self.env.property_rules.rdl_properties:
raise ValueError("name '%s' conflicts with existing built-in RDL property")
udp = UserProperty(self.env, name, valid_components, [valid_type], default)
self.env.property_rules.user_properties[udp.name] = udp | def function[define_udp, parameter[self, name, valid_type, valid_components, default]]:
constant[
Pre-define a user-defined property.
This is the equivalent to the following RDL:
.. code-block:: none
property <name> {
type = <valid_type>;
component = <valid_components>;
default = <default>
};
Parameters
----------
name: str
Property name
valid_components: list
List of :class:`~systemrdl.component.Component` types the UDP can be bound to.
If None, then UDP can be bound to all components.
valid_type: type
Assignment type that this UDP will enforce
default:
Default if a value is not specified when the UDP is bound to a component.
Value must be compatible with ``valid_type``
]
if compare[name[valid_components] is constant[None]] begin[:]
variable[valid_components] assign[=] list[[<ast.Attribute object at 0x7da1b0da2740>, <ast.Attribute object at 0x7da1b0da12d0>, <ast.Attribute object at 0x7da1b0da0550>, <ast.Attribute object at 0x7da1b0da2ef0>, <ast.Attribute object at 0x7da1b0da2e30>, <ast.Attribute object at 0x7da1b0da0dc0>]]
if compare[name[name] in name[self].env.property_rules.rdl_properties] begin[:]
<ast.Raise object at 0x7da1b0da27d0>
variable[udp] assign[=] call[name[UserProperty], parameter[name[self].env, name[name], name[valid_components], list[[<ast.Name object at 0x7da1b0da1a50>]], name[default]]]
call[name[self].env.property_rules.user_properties][name[udp].name] assign[=] name[udp] | keyword[def] identifier[define_udp] ( identifier[self] , identifier[name] , identifier[valid_type] , identifier[valid_components] = keyword[None] , identifier[default] = keyword[None] ):
literal[string]
keyword[if] identifier[valid_components] keyword[is] keyword[None] :
identifier[valid_components] =[
identifier[comp] . identifier[Field] ,
identifier[comp] . identifier[Reg] ,
identifier[comp] . identifier[Regfile] ,
identifier[comp] . identifier[Addrmap] ,
identifier[comp] . identifier[Mem] ,
identifier[comp] . identifier[Signal] ,
]
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[env] . identifier[property_rules] . identifier[rdl_properties] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[udp] = identifier[UserProperty] ( identifier[self] . identifier[env] , identifier[name] , identifier[valid_components] ,[ identifier[valid_type] ], identifier[default] )
identifier[self] . identifier[env] . identifier[property_rules] . identifier[user_properties] [ identifier[udp] . identifier[name] ]= identifier[udp] | def define_udp(self, name, valid_type, valid_components=None, default=None):
"""
Pre-define a user-defined property.
This is the equivalent to the following RDL:
.. code-block:: none
property <name> {
type = <valid_type>;
component = <valid_components>;
default = <default>
};
Parameters
----------
name: str
Property name
valid_components: list
List of :class:`~systemrdl.component.Component` types the UDP can be bound to.
If None, then UDP can be bound to all components.
valid_type: type
Assignment type that this UDP will enforce
default:
Default if a value is not specified when the UDP is bound to a component.
Value must be compatible with ``valid_type``
"""
if valid_components is None:
#TODO constraint,
valid_components = [comp.Field, comp.Reg, comp.Regfile, comp.Addrmap, comp.Mem, comp.Signal] # depends on [control=['if'], data=['valid_components']]
if name in self.env.property_rules.rdl_properties:
raise ValueError("name '%s' conflicts with existing built-in RDL property") # depends on [control=['if'], data=[]]
udp = UserProperty(self.env, name, valid_components, [valid_type], default)
self.env.property_rules.user_properties[udp.name] = udp |
def write_biom(biomT, output_fp, fmt="hdf5", gzip=False):
"""
Write the BIOM table to a file.
:type biomT: biom.table.Table
:param biomT: A BIOM table containing the per-sample OTU counts and metadata
to be written out to file.
:type output_fp str
:param output_fp: Path to the BIOM-format file that will be written.
:type fmt: str
:param fmt: One of: hdf5, json, tsv. The BIOM version the table will be
output (2.x, 1.0, 'classic').
"""
opener = open
mode = 'w'
if gzip and fmt != "hdf5":
if not output_fp.endswith(".gz"):
output_fp += ".gz"
opener = gzip_open
mode = 'wt'
# HDF5 BIOM files are gzipped by default
if fmt == "hdf5":
opener = h5py.File
with opener(output_fp, mode) as biom_f:
if fmt == "json":
biomT.to_json(biomT.generated_by, direct_io=biom_f)
elif fmt == "tsv":
biom_f.write(biomT.to_tsv())
else:
biomT.to_hdf5(biom_f, biomT.generated_by)
return output_fp | def function[write_biom, parameter[biomT, output_fp, fmt, gzip]]:
constant[
Write the BIOM table to a file.
:type biomT: biom.table.Table
:param biomT: A BIOM table containing the per-sample OTU counts and metadata
to be written out to file.
:type output_fp str
:param output_fp: Path to the BIOM-format file that will be written.
:type fmt: str
:param fmt: One of: hdf5, json, tsv. The BIOM version the table will be
output (2.x, 1.0, 'classic').
]
variable[opener] assign[=] name[open]
variable[mode] assign[=] constant[w]
if <ast.BoolOp object at 0x7da20c991450> begin[:]
if <ast.UnaryOp object at 0x7da18f8100d0> begin[:]
<ast.AugAssign object at 0x7da18f810760>
variable[opener] assign[=] name[gzip_open]
variable[mode] assign[=] constant[wt]
if compare[name[fmt] equal[==] constant[hdf5]] begin[:]
variable[opener] assign[=] name[h5py].File
with call[name[opener], parameter[name[output_fp], name[mode]]] begin[:]
if compare[name[fmt] equal[==] constant[json]] begin[:]
call[name[biomT].to_json, parameter[name[biomT].generated_by]]
return[name[output_fp]] | keyword[def] identifier[write_biom] ( identifier[biomT] , identifier[output_fp] , identifier[fmt] = literal[string] , identifier[gzip] = keyword[False] ):
literal[string]
identifier[opener] = identifier[open]
identifier[mode] = literal[string]
keyword[if] identifier[gzip] keyword[and] identifier[fmt] != literal[string] :
keyword[if] keyword[not] identifier[output_fp] . identifier[endswith] ( literal[string] ):
identifier[output_fp] += literal[string]
identifier[opener] = identifier[gzip_open]
identifier[mode] = literal[string]
keyword[if] identifier[fmt] == literal[string] :
identifier[opener] = identifier[h5py] . identifier[File]
keyword[with] identifier[opener] ( identifier[output_fp] , identifier[mode] ) keyword[as] identifier[biom_f] :
keyword[if] identifier[fmt] == literal[string] :
identifier[biomT] . identifier[to_json] ( identifier[biomT] . identifier[generated_by] , identifier[direct_io] = identifier[biom_f] )
keyword[elif] identifier[fmt] == literal[string] :
identifier[biom_f] . identifier[write] ( identifier[biomT] . identifier[to_tsv] ())
keyword[else] :
identifier[biomT] . identifier[to_hdf5] ( identifier[biom_f] , identifier[biomT] . identifier[generated_by] )
keyword[return] identifier[output_fp] | def write_biom(biomT, output_fp, fmt='hdf5', gzip=False):
"""
Write the BIOM table to a file.
:type biomT: biom.table.Table
:param biomT: A BIOM table containing the per-sample OTU counts and metadata
to be written out to file.
:type output_fp str
:param output_fp: Path to the BIOM-format file that will be written.
:type fmt: str
:param fmt: One of: hdf5, json, tsv. The BIOM version the table will be
output (2.x, 1.0, 'classic').
"""
opener = open
mode = 'w'
if gzip and fmt != 'hdf5':
if not output_fp.endswith('.gz'):
output_fp += '.gz' # depends on [control=['if'], data=[]]
opener = gzip_open
mode = 'wt' # depends on [control=['if'], data=[]]
# HDF5 BIOM files are gzipped by default
if fmt == 'hdf5':
opener = h5py.File # depends on [control=['if'], data=[]]
with opener(output_fp, mode) as biom_f:
if fmt == 'json':
biomT.to_json(biomT.generated_by, direct_io=biom_f) # depends on [control=['if'], data=[]]
elif fmt == 'tsv':
biom_f.write(biomT.to_tsv()) # depends on [control=['if'], data=[]]
else:
biomT.to_hdf5(biom_f, biomT.generated_by) # depends on [control=['with'], data=['biom_f']]
return output_fp |
def _parameterize_obj(obj):
"""Recursively parameterize all strings contained in an object.
Parameterizes all values of a Mapping, all items of a Sequence, an
unicode string, or pass other objects through unmodified.
Byte strings will be interpreted as UTF-8.
Args:
obj: data to parameterize
Return:
A parameterized object to be included in a CloudFormation template.
Mappings are converted to `dict`, Sequences are converted to `list`,
and strings possibly replaced by compositions of function calls.
"""
if isinstance(obj, Mapping):
return dict((key, _parameterize_obj(value))
for key, value in obj.items())
elif isinstance(obj, bytes):
return _parameterize_string(obj.decode('utf8'))
elif isinstance(obj, str):
return _parameterize_string(obj)
elif isinstance(obj, Sequence):
return list(_parameterize_obj(item) for item in obj)
else:
return obj | def function[_parameterize_obj, parameter[obj]]:
constant[Recursively parameterize all strings contained in an object.
Parameterizes all values of a Mapping, all items of a Sequence, an
unicode string, or pass other objects through unmodified.
Byte strings will be interpreted as UTF-8.
Args:
obj: data to parameterize
Return:
A parameterized object to be included in a CloudFormation template.
Mappings are converted to `dict`, Sequences are converted to `list`,
and strings possibly replaced by compositions of function calls.
]
if call[name[isinstance], parameter[name[obj], name[Mapping]]] begin[:]
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b180d780>]]] | keyword[def] identifier[_parameterize_obj] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Mapping] ):
keyword[return] identifier[dict] (( identifier[key] , identifier[_parameterize_obj] ( identifier[value] ))
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[obj] . identifier[items] ())
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[bytes] ):
keyword[return] identifier[_parameterize_string] ( identifier[obj] . identifier[decode] ( literal[string] ))
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[str] ):
keyword[return] identifier[_parameterize_string] ( identifier[obj] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[Sequence] ):
keyword[return] identifier[list] ( identifier[_parameterize_obj] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[obj] )
keyword[else] :
keyword[return] identifier[obj] | def _parameterize_obj(obj):
"""Recursively parameterize all strings contained in an object.
Parameterizes all values of a Mapping, all items of a Sequence, an
unicode string, or pass other objects through unmodified.
Byte strings will be interpreted as UTF-8.
Args:
obj: data to parameterize
Return:
A parameterized object to be included in a CloudFormation template.
Mappings are converted to `dict`, Sequences are converted to `list`,
and strings possibly replaced by compositions of function calls.
"""
if isinstance(obj, Mapping):
return dict(((key, _parameterize_obj(value)) for (key, value) in obj.items())) # depends on [control=['if'], data=[]]
elif isinstance(obj, bytes):
return _parameterize_string(obj.decode('utf8')) # depends on [control=['if'], data=[]]
elif isinstance(obj, str):
return _parameterize_string(obj) # depends on [control=['if'], data=[]]
elif isinstance(obj, Sequence):
return list((_parameterize_obj(item) for item in obj)) # depends on [control=['if'], data=[]]
else:
return obj |
def get_tax_rate_by_id(cls, tax_rate_id, **kwargs):
"""Find TaxRate
Return single instance of TaxRate by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_tax_rate_by_id(tax_rate_id, async=True)
>>> result = thread.get()
:param async bool
:param str tax_rate_id: ID of taxRate to return (required)
:return: TaxRate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._get_tax_rate_by_id_with_http_info(tax_rate_id, **kwargs)
else:
(data) = cls._get_tax_rate_by_id_with_http_info(tax_rate_id, **kwargs)
return data | def function[get_tax_rate_by_id, parameter[cls, tax_rate_id]]:
constant[Find TaxRate
Return single instance of TaxRate by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_tax_rate_by_id(tax_rate_id, async=True)
>>> result = thread.get()
:param async bool
:param str tax_rate_id: ID of taxRate to return (required)
:return: TaxRate
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._get_tax_rate_by_id_with_http_info, parameter[name[tax_rate_id]]]] | keyword[def] identifier[get_tax_rate_by_id] ( identifier[cls] , identifier[tax_rate_id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_get_tax_rate_by_id_with_http_info] ( identifier[tax_rate_id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_get_tax_rate_by_id_with_http_info] ( identifier[tax_rate_id] ,** identifier[kwargs] )
keyword[return] identifier[data] | def get_tax_rate_by_id(cls, tax_rate_id, **kwargs):
"""Find TaxRate
Return single instance of TaxRate by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_tax_rate_by_id(tax_rate_id, async=True)
>>> result = thread.get()
:param async bool
:param str tax_rate_id: ID of taxRate to return (required)
:return: TaxRate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._get_tax_rate_by_id_with_http_info(tax_rate_id, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._get_tax_rate_by_id_with_http_info(tax_rate_id, **kwargs)
return data |
def get_projection(self, axis):
""" Return the projection of this vector onto the given axis. The
axis does not need to be normalized. """
scale = axis.dot(self) / axis.dot(axis)
return axis * scale | def function[get_projection, parameter[self, axis]]:
constant[ Return the projection of this vector onto the given axis. The
axis does not need to be normalized. ]
variable[scale] assign[=] binary_operation[call[name[axis].dot, parameter[name[self]]] / call[name[axis].dot, parameter[name[axis]]]]
return[binary_operation[name[axis] * name[scale]]] | keyword[def] identifier[get_projection] ( identifier[self] , identifier[axis] ):
literal[string]
identifier[scale] = identifier[axis] . identifier[dot] ( identifier[self] )/ identifier[axis] . identifier[dot] ( identifier[axis] )
keyword[return] identifier[axis] * identifier[scale] | def get_projection(self, axis):
""" Return the projection of this vector onto the given axis. The
axis does not need to be normalized. """
scale = axis.dot(self) / axis.dot(axis)
return axis * scale |
def GetRowHeaders(self) -> list:
"""
Call IUIAutomationTablePattern::GetCurrentRowHeaders.
Return list, a list of `Control` subclasses, representing all the row headers in a table.
Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtablepattern-getcurrentrowheaders
"""
eleArray = self.pattern.GetCurrentRowHeaders()
if eleArray:
controls = []
for i in range(eleArray.Length):
ele = eleArray.GetElement(i)
con = Control.CreateControlFromElement(element=ele)
if con:
controls.append(con)
return controls
return [] | def function[GetRowHeaders, parameter[self]]:
constant[
Call IUIAutomationTablePattern::GetCurrentRowHeaders.
Return list, a list of `Control` subclasses, representing all the row headers in a table.
Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtablepattern-getcurrentrowheaders
]
variable[eleArray] assign[=] call[name[self].pattern.GetCurrentRowHeaders, parameter[]]
if name[eleArray] begin[:]
variable[controls] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[eleArray].Length]]] begin[:]
variable[ele] assign[=] call[name[eleArray].GetElement, parameter[name[i]]]
variable[con] assign[=] call[name[Control].CreateControlFromElement, parameter[]]
if name[con] begin[:]
call[name[controls].append, parameter[name[con]]]
return[name[controls]]
return[list[[]]] | keyword[def] identifier[GetRowHeaders] ( identifier[self] )-> identifier[list] :
literal[string]
identifier[eleArray] = identifier[self] . identifier[pattern] . identifier[GetCurrentRowHeaders] ()
keyword[if] identifier[eleArray] :
identifier[controls] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[eleArray] . identifier[Length] ):
identifier[ele] = identifier[eleArray] . identifier[GetElement] ( identifier[i] )
identifier[con] = identifier[Control] . identifier[CreateControlFromElement] ( identifier[element] = identifier[ele] )
keyword[if] identifier[con] :
identifier[controls] . identifier[append] ( identifier[con] )
keyword[return] identifier[controls]
keyword[return] [] | def GetRowHeaders(self) -> list:
"""
Call IUIAutomationTablePattern::GetCurrentRowHeaders.
Return list, a list of `Control` subclasses, representing all the row headers in a table.
Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtablepattern-getcurrentrowheaders
"""
eleArray = self.pattern.GetCurrentRowHeaders()
if eleArray:
controls = []
for i in range(eleArray.Length):
ele = eleArray.GetElement(i)
con = Control.CreateControlFromElement(element=ele)
if con:
controls.append(con) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return controls # depends on [control=['if'], data=[]]
return [] |
def dump_to_response(request, app_label=None, exclude=None,
filename_prefix=None):
"""Utility function that dumps the given app/model to an HttpResponse.
"""
app_label = app_label or []
exclude = exclude
try:
filename = '%s.%s' % (datetime.now().isoformat(),
settings.SMUGGLER_FORMAT)
if filename_prefix:
filename = '%s_%s' % (filename_prefix, filename)
if not isinstance(app_label, list):
app_label = [app_label]
response = serialize_to_response(app_label, exclude)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
except CommandError as e:
messages.error(
request,
_('An exception occurred while dumping data: %s') % force_text(e))
return HttpResponseRedirect(request.build_absolute_uri().split('dump')[0]) | def function[dump_to_response, parameter[request, app_label, exclude, filename_prefix]]:
constant[Utility function that dumps the given app/model to an HttpResponse.
]
variable[app_label] assign[=] <ast.BoolOp object at 0x7da18dc9aa70>
variable[exclude] assign[=] name[exclude]
<ast.Try object at 0x7da18dc9ad40>
return[call[name[HttpResponseRedirect], parameter[call[call[call[name[request].build_absolute_uri, parameter[]].split, parameter[constant[dump]]]][constant[0]]]]] | keyword[def] identifier[dump_to_response] ( identifier[request] , identifier[app_label] = keyword[None] , identifier[exclude] = keyword[None] ,
identifier[filename_prefix] = keyword[None] ):
literal[string]
identifier[app_label] = identifier[app_label] keyword[or] []
identifier[exclude] = identifier[exclude]
keyword[try] :
identifier[filename] = literal[string] %( identifier[datetime] . identifier[now] (). identifier[isoformat] (),
identifier[settings] . identifier[SMUGGLER_FORMAT] )
keyword[if] identifier[filename_prefix] :
identifier[filename] = literal[string] %( identifier[filename_prefix] , identifier[filename] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[app_label] , identifier[list] ):
identifier[app_label] =[ identifier[app_label] ]
identifier[response] = identifier[serialize_to_response] ( identifier[app_label] , identifier[exclude] )
identifier[response] [ literal[string] ]= literal[string] % identifier[filename]
keyword[return] identifier[response]
keyword[except] identifier[CommandError] keyword[as] identifier[e] :
identifier[messages] . identifier[error] (
identifier[request] ,
identifier[_] ( literal[string] )% identifier[force_text] ( identifier[e] ))
keyword[return] identifier[HttpResponseRedirect] ( identifier[request] . identifier[build_absolute_uri] (). identifier[split] ( literal[string] )[ literal[int] ]) | def dump_to_response(request, app_label=None, exclude=None, filename_prefix=None):
"""Utility function that dumps the given app/model to an HttpResponse.
"""
app_label = app_label or []
exclude = exclude
try:
filename = '%s.%s' % (datetime.now().isoformat(), settings.SMUGGLER_FORMAT)
if filename_prefix:
filename = '%s_%s' % (filename_prefix, filename) # depends on [control=['if'], data=[]]
if not isinstance(app_label, list):
app_label = [app_label] # depends on [control=['if'], data=[]]
response = serialize_to_response(app_label, exclude)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response # depends on [control=['try'], data=[]]
except CommandError as e:
messages.error(request, _('An exception occurred while dumping data: %s') % force_text(e)) # depends on [control=['except'], data=['e']]
return HttpResponseRedirect(request.build_absolute_uri().split('dump')[0]) |
def blocks(self, blocksize=None, overlap=0, frames=-1, dtype='float64',
always_2d=False, fill_value=None, out=None):
"""Return a generator for block-wise reading.
By default, the generator yields blocks of the given
`blocksize` (using a given `overlap`) until the end of the file
is reached; `frames` can be used to stop earlier.
Parameters
----------
blocksize : int
The number of frames to read per block. Either this or `out`
must be given.
overlap : int, optional
The number of frames to rewind between each block.
frames : int, optional
The number of frames to read.
If ``frames < 0``, the file is read until the end.
dtype : {'float64', 'float32', 'int32', 'int16'}, optional
See :meth:`.read`.
Yields
------
numpy.ndarray or type(out)
Blocks of audio data.
If `out` was given, and the requested frames are not an
integer multiple of the length of `out`, and no
`fill_value` was given, the last block will be a smaller
view into `out`.
Other Parameters
----------------
always_2d, fill_value, out
See :meth:`.read`.
fill_value : float, optional
See :meth:`.read`.
out : numpy.ndarray or subclass, optional
If `out` is specified, the data is written into the given
array instead of creating a new array. In this case, the
arguments `dtype` and `always_2d` are silently ignored!
Examples
--------
>>> from soundfile import SoundFile
>>> with SoundFile('stereo_file.wav') as f:
>>> for block in f.blocks(blocksize=1024):
>>> pass # do something with 'block'
"""
import numpy as np
if 'r' not in self.mode and '+' not in self.mode:
raise RuntimeError("blocks() is not allowed in write-only mode")
if out is None:
if blocksize is None:
raise TypeError("One of {blocksize, out} must be specified")
out = self._create_empty_array(blocksize, always_2d, dtype)
copy_out = True
else:
if blocksize is not None:
raise TypeError(
"Only one of {blocksize, out} may be specified")
blocksize = len(out)
copy_out = False
overlap_memory = None
frames = self._check_frames(frames, fill_value)
while frames > 0:
if overlap_memory is None:
output_offset = 0
else:
output_offset = len(overlap_memory)
out[:output_offset] = overlap_memory
toread = min(blocksize - output_offset, frames)
self.read(toread, dtype, always_2d, fill_value, out[output_offset:])
if overlap:
if overlap_memory is None:
overlap_memory = np.copy(out[-overlap:])
else:
overlap_memory[:] = out[-overlap:]
if blocksize > frames + overlap and fill_value is None:
block = out[:frames + overlap]
else:
block = out
yield np.copy(block) if copy_out else block
frames -= toread | def function[blocks, parameter[self, blocksize, overlap, frames, dtype, always_2d, fill_value, out]]:
constant[Return a generator for block-wise reading.
By default, the generator yields blocks of the given
`blocksize` (using a given `overlap`) until the end of the file
is reached; `frames` can be used to stop earlier.
Parameters
----------
blocksize : int
The number of frames to read per block. Either this or `out`
must be given.
overlap : int, optional
The number of frames to rewind between each block.
frames : int, optional
The number of frames to read.
If ``frames < 0``, the file is read until the end.
dtype : {'float64', 'float32', 'int32', 'int16'}, optional
See :meth:`.read`.
Yields
------
numpy.ndarray or type(out)
Blocks of audio data.
If `out` was given, and the requested frames are not an
integer multiple of the length of `out`, and no
`fill_value` was given, the last block will be a smaller
view into `out`.
Other Parameters
----------------
always_2d, fill_value, out
See :meth:`.read`.
fill_value : float, optional
See :meth:`.read`.
out : numpy.ndarray or subclass, optional
If `out` is specified, the data is written into the given
array instead of creating a new array. In this case, the
arguments `dtype` and `always_2d` are silently ignored!
Examples
--------
>>> from soundfile import SoundFile
>>> with SoundFile('stereo_file.wav') as f:
>>> for block in f.blocks(blocksize=1024):
>>> pass # do something with 'block'
]
import module[numpy] as alias[np]
if <ast.BoolOp object at 0x7da1b197ebf0> begin[:]
<ast.Raise object at 0x7da1b197f970>
if compare[name[out] is constant[None]] begin[:]
if compare[name[blocksize] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b197c250>
variable[out] assign[=] call[name[self]._create_empty_array, parameter[name[blocksize], name[always_2d], name[dtype]]]
variable[copy_out] assign[=] constant[True]
variable[overlap_memory] assign[=] constant[None]
variable[frames] assign[=] call[name[self]._check_frames, parameter[name[frames], name[fill_value]]]
while compare[name[frames] greater[>] constant[0]] begin[:]
if compare[name[overlap_memory] is constant[None]] begin[:]
variable[output_offset] assign[=] constant[0]
variable[toread] assign[=] call[name[min], parameter[binary_operation[name[blocksize] - name[output_offset]], name[frames]]]
call[name[self].read, parameter[name[toread], name[dtype], name[always_2d], name[fill_value], call[name[out]][<ast.Slice object at 0x7da1b197d240>]]]
if name[overlap] begin[:]
if compare[name[overlap_memory] is constant[None]] begin[:]
variable[overlap_memory] assign[=] call[name[np].copy, parameter[call[name[out]][<ast.Slice object at 0x7da1b197ce50>]]]
if <ast.BoolOp object at 0x7da1b197c130> begin[:]
variable[block] assign[=] call[name[out]][<ast.Slice object at 0x7da1b197c820>]
<ast.Yield object at 0x7da1b197da80>
<ast.AugAssign object at 0x7da1b197ee30> | keyword[def] identifier[blocks] ( identifier[self] , identifier[blocksize] = keyword[None] , identifier[overlap] = literal[int] , identifier[frames] =- literal[int] , identifier[dtype] = literal[string] ,
identifier[always_2d] = keyword[False] , identifier[fill_value] = keyword[None] , identifier[out] = keyword[None] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[mode] keyword[and] literal[string] keyword[not] keyword[in] identifier[self] . identifier[mode] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[if] identifier[out] keyword[is] keyword[None] :
keyword[if] identifier[blocksize] keyword[is] keyword[None] :
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[out] = identifier[self] . identifier[_create_empty_array] ( identifier[blocksize] , identifier[always_2d] , identifier[dtype] )
identifier[copy_out] = keyword[True]
keyword[else] :
keyword[if] identifier[blocksize] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[TypeError] (
literal[string] )
identifier[blocksize] = identifier[len] ( identifier[out] )
identifier[copy_out] = keyword[False]
identifier[overlap_memory] = keyword[None]
identifier[frames] = identifier[self] . identifier[_check_frames] ( identifier[frames] , identifier[fill_value] )
keyword[while] identifier[frames] > literal[int] :
keyword[if] identifier[overlap_memory] keyword[is] keyword[None] :
identifier[output_offset] = literal[int]
keyword[else] :
identifier[output_offset] = identifier[len] ( identifier[overlap_memory] )
identifier[out] [: identifier[output_offset] ]= identifier[overlap_memory]
identifier[toread] = identifier[min] ( identifier[blocksize] - identifier[output_offset] , identifier[frames] )
identifier[self] . identifier[read] ( identifier[toread] , identifier[dtype] , identifier[always_2d] , identifier[fill_value] , identifier[out] [ identifier[output_offset] :])
keyword[if] identifier[overlap] :
keyword[if] identifier[overlap_memory] keyword[is] keyword[None] :
identifier[overlap_memory] = identifier[np] . identifier[copy] ( identifier[out] [- identifier[overlap] :])
keyword[else] :
identifier[overlap_memory] [:]= identifier[out] [- identifier[overlap] :]
keyword[if] identifier[blocksize] > identifier[frames] + identifier[overlap] keyword[and] identifier[fill_value] keyword[is] keyword[None] :
identifier[block] = identifier[out] [: identifier[frames] + identifier[overlap] ]
keyword[else] :
identifier[block] = identifier[out]
keyword[yield] identifier[np] . identifier[copy] ( identifier[block] ) keyword[if] identifier[copy_out] keyword[else] identifier[block]
identifier[frames] -= identifier[toread] | def blocks(self, blocksize=None, overlap=0, frames=-1, dtype='float64', always_2d=False, fill_value=None, out=None):
"""Return a generator for block-wise reading.
By default, the generator yields blocks of the given
`blocksize` (using a given `overlap`) until the end of the file
is reached; `frames` can be used to stop earlier.
Parameters
----------
blocksize : int
The number of frames to read per block. Either this or `out`
must be given.
overlap : int, optional
The number of frames to rewind between each block.
frames : int, optional
The number of frames to read.
If ``frames < 0``, the file is read until the end.
dtype : {'float64', 'float32', 'int32', 'int16'}, optional
See :meth:`.read`.
Yields
------
numpy.ndarray or type(out)
Blocks of audio data.
If `out` was given, and the requested frames are not an
integer multiple of the length of `out`, and no
`fill_value` was given, the last block will be a smaller
view into `out`.
Other Parameters
----------------
always_2d, fill_value, out
See :meth:`.read`.
fill_value : float, optional
See :meth:`.read`.
out : numpy.ndarray or subclass, optional
If `out` is specified, the data is written into the given
array instead of creating a new array. In this case, the
arguments `dtype` and `always_2d` are silently ignored!
Examples
--------
>>> from soundfile import SoundFile
>>> with SoundFile('stereo_file.wav') as f:
>>> for block in f.blocks(blocksize=1024):
>>> pass # do something with 'block'
"""
import numpy as np
if 'r' not in self.mode and '+' not in self.mode:
raise RuntimeError('blocks() is not allowed in write-only mode') # depends on [control=['if'], data=[]]
if out is None:
if blocksize is None:
raise TypeError('One of {blocksize, out} must be specified') # depends on [control=['if'], data=[]]
out = self._create_empty_array(blocksize, always_2d, dtype)
copy_out = True # depends on [control=['if'], data=['out']]
else:
if blocksize is not None:
raise TypeError('Only one of {blocksize, out} may be specified') # depends on [control=['if'], data=[]]
blocksize = len(out)
copy_out = False
overlap_memory = None
frames = self._check_frames(frames, fill_value)
while frames > 0:
if overlap_memory is None:
output_offset = 0 # depends on [control=['if'], data=[]]
else:
output_offset = len(overlap_memory)
out[:output_offset] = overlap_memory
toread = min(blocksize - output_offset, frames)
self.read(toread, dtype, always_2d, fill_value, out[output_offset:])
if overlap:
if overlap_memory is None:
overlap_memory = np.copy(out[-overlap:]) # depends on [control=['if'], data=['overlap_memory']]
else:
overlap_memory[:] = out[-overlap:] # depends on [control=['if'], data=[]]
if blocksize > frames + overlap and fill_value is None:
block = out[:frames + overlap] # depends on [control=['if'], data=[]]
else:
block = out
yield (np.copy(block) if copy_out else block)
frames -= toread # depends on [control=['while'], data=['frames']] |
def getWindowByPID(self, pid, order=0):
""" Returns a handle for the first window that matches the provided PID """
if pid <= 0:
return None
EnumWindowsProc = ctypes.WINFUNCTYPE(
ctypes.c_bool,
ctypes.POINTER(ctypes.c_int),
ctypes.py_object)
def callback(hwnd, context):
if ctypes.windll.user32.IsWindowVisible(hwnd):
pid = ctypes.c_ulong()
ctypes.windll.user32.GetWindowThreadProcessId(hwnd, ctypes.byref(pid))
if context["pid"] == int(pid.value) and not context["handle"]:
if context["order"] > 0:
context["order"] -= 1
else:
context["handle"] = hwnd
return True
data = {"pid": pid, "handle": None, "order": order}
ctypes.windll.user32.EnumWindows(EnumWindowsProc(callback), ctypes.py_object(data))
return data["handle"] | def function[getWindowByPID, parameter[self, pid, order]]:
constant[ Returns a handle for the first window that matches the provided PID ]
if compare[name[pid] less_or_equal[<=] constant[0]] begin[:]
return[constant[None]]
variable[EnumWindowsProc] assign[=] call[name[ctypes].WINFUNCTYPE, parameter[name[ctypes].c_bool, call[name[ctypes].POINTER, parameter[name[ctypes].c_int]], name[ctypes].py_object]]
def function[callback, parameter[hwnd, context]]:
if call[name[ctypes].windll.user32.IsWindowVisible, parameter[name[hwnd]]] begin[:]
variable[pid] assign[=] call[name[ctypes].c_ulong, parameter[]]
call[name[ctypes].windll.user32.GetWindowThreadProcessId, parameter[name[hwnd], call[name[ctypes].byref, parameter[name[pid]]]]]
if <ast.BoolOp object at 0x7da18c4ce410> begin[:]
if compare[call[name[context]][constant[order]] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18c4cfb80>
return[constant[True]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18dc998a0>, <ast.Constant object at 0x7da18dc98fd0>, <ast.Constant object at 0x7da18dc99d50>], [<ast.Name object at 0x7da18dc9a0e0>, <ast.Constant object at 0x7da18dc9b0d0>, <ast.Name object at 0x7da18dc9af80>]]
call[name[ctypes].windll.user32.EnumWindows, parameter[call[name[EnumWindowsProc], parameter[name[callback]]], call[name[ctypes].py_object, parameter[name[data]]]]]
return[call[name[data]][constant[handle]]] | keyword[def] identifier[getWindowByPID] ( identifier[self] , identifier[pid] , identifier[order] = literal[int] ):
literal[string]
keyword[if] identifier[pid] <= literal[int] :
keyword[return] keyword[None]
identifier[EnumWindowsProc] = identifier[ctypes] . identifier[WINFUNCTYPE] (
identifier[ctypes] . identifier[c_bool] ,
identifier[ctypes] . identifier[POINTER] ( identifier[ctypes] . identifier[c_int] ),
identifier[ctypes] . identifier[py_object] )
keyword[def] identifier[callback] ( identifier[hwnd] , identifier[context] ):
keyword[if] identifier[ctypes] . identifier[windll] . identifier[user32] . identifier[IsWindowVisible] ( identifier[hwnd] ):
identifier[pid] = identifier[ctypes] . identifier[c_ulong] ()
identifier[ctypes] . identifier[windll] . identifier[user32] . identifier[GetWindowThreadProcessId] ( identifier[hwnd] , identifier[ctypes] . identifier[byref] ( identifier[pid] ))
keyword[if] identifier[context] [ literal[string] ]== identifier[int] ( identifier[pid] . identifier[value] ) keyword[and] keyword[not] identifier[context] [ literal[string] ]:
keyword[if] identifier[context] [ literal[string] ]> literal[int] :
identifier[context] [ literal[string] ]-= literal[int]
keyword[else] :
identifier[context] [ literal[string] ]= identifier[hwnd]
keyword[return] keyword[True]
identifier[data] ={ literal[string] : identifier[pid] , literal[string] : keyword[None] , literal[string] : identifier[order] }
identifier[ctypes] . identifier[windll] . identifier[user32] . identifier[EnumWindows] ( identifier[EnumWindowsProc] ( identifier[callback] ), identifier[ctypes] . identifier[py_object] ( identifier[data] ))
keyword[return] identifier[data] [ literal[string] ] | def getWindowByPID(self, pid, order=0):
""" Returns a handle for the first window that matches the provided PID """
if pid <= 0:
return None # depends on [control=['if'], data=[]]
EnumWindowsProc = ctypes.WINFUNCTYPE(ctypes.c_bool, ctypes.POINTER(ctypes.c_int), ctypes.py_object)
def callback(hwnd, context):
if ctypes.windll.user32.IsWindowVisible(hwnd):
pid = ctypes.c_ulong()
ctypes.windll.user32.GetWindowThreadProcessId(hwnd, ctypes.byref(pid))
if context['pid'] == int(pid.value) and (not context['handle']):
if context['order'] > 0:
context['order'] -= 1 # depends on [control=['if'], data=[]]
else:
context['handle'] = hwnd # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return True
data = {'pid': pid, 'handle': None, 'order': order}
ctypes.windll.user32.EnumWindows(EnumWindowsProc(callback), ctypes.py_object(data))
return data['handle'] |
def createPoll(self, poll, thread_id=None):
"""
Creates poll in a group thread
:param poll: Poll to create
:param thread_id: User/Group ID to create poll in. See :ref:`intro_threads`
:type poll: models.Poll
:raises: FBchatException if request failed
"""
thread_id, thread_type = self._getThread(thread_id, None)
# We're using ordered dicts, because the Facebook endpoint that parses the POST
# parameters is badly implemented, and deals with ordering the options wrongly.
# This also means we had to change `client._payload_default` to an ordered dict,
# since that's being copied in between this point and the `requests` call
#
# If you can find a way to fix this for the endpoint, or if you find another
# endpoint, please do suggest it ;)
data = OrderedDict([("question_text", poll.title), ("target_id", thread_id)])
for i, option in enumerate(poll.options):
data["option_text_array[{}]".format(i)] = option.text
data["option_is_selected_array[{}]".format(i)] = str(int(option.vote))
j = self._post(self.req_url.CREATE_POLL, data, fix_request=True, as_json=True) | def function[createPoll, parameter[self, poll, thread_id]]:
constant[
Creates poll in a group thread
:param poll: Poll to create
:param thread_id: User/Group ID to create poll in. See :ref:`intro_threads`
:type poll: models.Poll
:raises: FBchatException if request failed
]
<ast.Tuple object at 0x7da1b18ed720> assign[=] call[name[self]._getThread, parameter[name[thread_id], constant[None]]]
variable[data] assign[=] call[name[OrderedDict], parameter[list[[<ast.Tuple object at 0x7da1b18ee020>, <ast.Tuple object at 0x7da1b18efe50>]]]]
for taget[tuple[[<ast.Name object at 0x7da1b18efe20>, <ast.Name object at 0x7da1b18ee080>]]] in starred[call[name[enumerate], parameter[name[poll].options]]] begin[:]
call[name[data]][call[constant[option_text_array[{}]].format, parameter[name[i]]]] assign[=] name[option].text
call[name[data]][call[constant[option_is_selected_array[{}]].format, parameter[name[i]]]] assign[=] call[name[str], parameter[call[name[int], parameter[name[option].vote]]]]
variable[j] assign[=] call[name[self]._post, parameter[name[self].req_url.CREATE_POLL, name[data]]] | keyword[def] identifier[createPoll] ( identifier[self] , identifier[poll] , identifier[thread_id] = keyword[None] ):
literal[string]
identifier[thread_id] , identifier[thread_type] = identifier[self] . identifier[_getThread] ( identifier[thread_id] , keyword[None] )
identifier[data] = identifier[OrderedDict] ([( literal[string] , identifier[poll] . identifier[title] ),( literal[string] , identifier[thread_id] )])
keyword[for] identifier[i] , identifier[option] keyword[in] identifier[enumerate] ( identifier[poll] . identifier[options] ):
identifier[data] [ literal[string] . identifier[format] ( identifier[i] )]= identifier[option] . identifier[text]
identifier[data] [ literal[string] . identifier[format] ( identifier[i] )]= identifier[str] ( identifier[int] ( identifier[option] . identifier[vote] ))
identifier[j] = identifier[self] . identifier[_post] ( identifier[self] . identifier[req_url] . identifier[CREATE_POLL] , identifier[data] , identifier[fix_request] = keyword[True] , identifier[as_json] = keyword[True] ) | def createPoll(self, poll, thread_id=None):
"""
Creates poll in a group thread
:param poll: Poll to create
:param thread_id: User/Group ID to create poll in. See :ref:`intro_threads`
:type poll: models.Poll
:raises: FBchatException if request failed
"""
(thread_id, thread_type) = self._getThread(thread_id, None)
# We're using ordered dicts, because the Facebook endpoint that parses the POST
# parameters is badly implemented, and deals with ordering the options wrongly.
# This also means we had to change `client._payload_default` to an ordered dict,
# since that's being copied in between this point and the `requests` call
#
# If you can find a way to fix this for the endpoint, or if you find another
# endpoint, please do suggest it ;)
data = OrderedDict([('question_text', poll.title), ('target_id', thread_id)])
for (i, option) in enumerate(poll.options):
data['option_text_array[{}]'.format(i)] = option.text
data['option_is_selected_array[{}]'.format(i)] = str(int(option.vote)) # depends on [control=['for'], data=[]]
j = self._post(self.req_url.CREATE_POLL, data, fix_request=True, as_json=True) |
def _add_message_info_multiple(self, msg_info):
""" add a message info multiple to self._msg_info_multiple_dict """
if msg_info.key in self._msg_info_multiple_dict:
if msg_info.is_continued:
self._msg_info_multiple_dict[msg_info.key][-1].append(msg_info.value)
else:
self._msg_info_multiple_dict[msg_info.key].append([msg_info.value])
else:
self._msg_info_multiple_dict[msg_info.key] = [[msg_info.value]] | def function[_add_message_info_multiple, parameter[self, msg_info]]:
constant[ add a message info multiple to self._msg_info_multiple_dict ]
if compare[name[msg_info].key in name[self]._msg_info_multiple_dict] begin[:]
if name[msg_info].is_continued begin[:]
call[call[call[name[self]._msg_info_multiple_dict][name[msg_info].key]][<ast.UnaryOp object at 0x7da20c6e4e50>].append, parameter[name[msg_info].value]] | keyword[def] identifier[_add_message_info_multiple] ( identifier[self] , identifier[msg_info] ):
literal[string]
keyword[if] identifier[msg_info] . identifier[key] keyword[in] identifier[self] . identifier[_msg_info_multiple_dict] :
keyword[if] identifier[msg_info] . identifier[is_continued] :
identifier[self] . identifier[_msg_info_multiple_dict] [ identifier[msg_info] . identifier[key] ][- literal[int] ]. identifier[append] ( identifier[msg_info] . identifier[value] )
keyword[else] :
identifier[self] . identifier[_msg_info_multiple_dict] [ identifier[msg_info] . identifier[key] ]. identifier[append] ([ identifier[msg_info] . identifier[value] ])
keyword[else] :
identifier[self] . identifier[_msg_info_multiple_dict] [ identifier[msg_info] . identifier[key] ]=[[ identifier[msg_info] . identifier[value] ]] | def _add_message_info_multiple(self, msg_info):
""" add a message info multiple to self._msg_info_multiple_dict """
if msg_info.key in self._msg_info_multiple_dict:
if msg_info.is_continued:
self._msg_info_multiple_dict[msg_info.key][-1].append(msg_info.value) # depends on [control=['if'], data=[]]
else:
self._msg_info_multiple_dict[msg_info.key].append([msg_info.value]) # depends on [control=['if'], data=[]]
else:
self._msg_info_multiple_dict[msg_info.key] = [[msg_info.value]] |
def modify(self, **params):
"""https://developers.coinbase.com/api#modify-an-account"""
data = self.api_client.update_account(self.id, **params)
self.update(data)
return data | def function[modify, parameter[self]]:
constant[https://developers.coinbase.com/api#modify-an-account]
variable[data] assign[=] call[name[self].api_client.update_account, parameter[name[self].id]]
call[name[self].update, parameter[name[data]]]
return[name[data]] | keyword[def] identifier[modify] ( identifier[self] ,** identifier[params] ):
literal[string]
identifier[data] = identifier[self] . identifier[api_client] . identifier[update_account] ( identifier[self] . identifier[id] ,** identifier[params] )
identifier[self] . identifier[update] ( identifier[data] )
keyword[return] identifier[data] | def modify(self, **params):
"""https://developers.coinbase.com/api#modify-an-account"""
data = self.api_client.update_account(self.id, **params)
self.update(data)
return data |
def update_item(self, table_name, key, attribute_updates,
expected=None, return_values=None,
object_hook=None):
"""
Edits an existing item's attributes. You can perform a conditional
update (insert a new attribute name-value pair if it doesn't exist,
or replace an existing name-value pair if it has certain expected
attribute values).
:type table_name: str
:param table_name: The name of the table.
:type key: dict
:param key: A Python version of the Key data structure
defined by DynamoDB which identifies the item to be updated.
:type attribute_updates: dict
:param attribute_updates: A Python version of the AttributeUpdates
data structure defined by DynamoDB.
:type expected: dict
:param expected: A Python version of the Expected
data structure defined by DynamoDB.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
"""
data = {'TableName' : table_name,
'Key' : key,
'AttributeUpdates': attribute_updates}
if expected:
data['Expected'] = expected
if return_values:
data['ReturnValues'] = return_values
json_input = json.dumps(data)
return self.make_request('UpdateItem', json_input,
object_hook=object_hook) | def function[update_item, parameter[self, table_name, key, attribute_updates, expected, return_values, object_hook]]:
constant[
Edits an existing item's attributes. You can perform a conditional
update (insert a new attribute name-value pair if it doesn't exist,
or replace an existing name-value pair if it has certain expected
attribute values).
:type table_name: str
:param table_name: The name of the table.
:type key: dict
:param key: A Python version of the Key data structure
defined by DynamoDB which identifies the item to be updated.
:type attribute_updates: dict
:param attribute_updates: A Python version of the AttributeUpdates
data structure defined by DynamoDB.
:type expected: dict
:param expected: A Python version of the Expected
data structure defined by DynamoDB.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b26148e0>, <ast.Constant object at 0x7da1b2615ae0>, <ast.Constant object at 0x7da1b2615090>], [<ast.Name object at 0x7da1b26174f0>, <ast.Name object at 0x7da1b2614e20>, <ast.Name object at 0x7da1b2617910>]]
if name[expected] begin[:]
call[name[data]][constant[Expected]] assign[=] name[expected]
if name[return_values] begin[:]
call[name[data]][constant[ReturnValues]] assign[=] name[return_values]
variable[json_input] assign[=] call[name[json].dumps, parameter[name[data]]]
return[call[name[self].make_request, parameter[constant[UpdateItem], name[json_input]]]] | keyword[def] identifier[update_item] ( identifier[self] , identifier[table_name] , identifier[key] , identifier[attribute_updates] ,
identifier[expected] = keyword[None] , identifier[return_values] = keyword[None] ,
identifier[object_hook] = keyword[None] ):
literal[string]
identifier[data] ={ literal[string] : identifier[table_name] ,
literal[string] : identifier[key] ,
literal[string] : identifier[attribute_updates] }
keyword[if] identifier[expected] :
identifier[data] [ literal[string] ]= identifier[expected]
keyword[if] identifier[return_values] :
identifier[data] [ literal[string] ]= identifier[return_values]
identifier[json_input] = identifier[json] . identifier[dumps] ( identifier[data] )
keyword[return] identifier[self] . identifier[make_request] ( literal[string] , identifier[json_input] ,
identifier[object_hook] = identifier[object_hook] ) | def update_item(self, table_name, key, attribute_updates, expected=None, return_values=None, object_hook=None):
"""
Edits an existing item's attributes. You can perform a conditional
update (insert a new attribute name-value pair if it doesn't exist,
or replace an existing name-value pair if it has certain expected
attribute values).
:type table_name: str
:param table_name: The name of the table.
:type key: dict
:param key: A Python version of the Key data structure
defined by DynamoDB which identifies the item to be updated.
:type attribute_updates: dict
:param attribute_updates: A Python version of the AttributeUpdates
data structure defined by DynamoDB.
:type expected: dict
:param expected: A Python version of the Expected
data structure defined by DynamoDB.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
"""
data = {'TableName': table_name, 'Key': key, 'AttributeUpdates': attribute_updates}
if expected:
data['Expected'] = expected # depends on [control=['if'], data=[]]
if return_values:
data['ReturnValues'] = return_values # depends on [control=['if'], data=[]]
json_input = json.dumps(data)
return self.make_request('UpdateItem', json_input, object_hook=object_hook) |
def get_quantiles(acquisition_par, fmin, m, s):
'''
Quantiles of the Gaussian distribution useful to determine the acquisition function values
:param acquisition_par: parameter of the acquisition function
:param fmin: current minimum.
:param m: vector of means.
:param s: vector of standard deviations.
'''
if isinstance(s, np.ndarray):
s[s<1e-10] = 1e-10
elif s< 1e-10:
s = 1e-10
u = (fmin - m - acquisition_par)/s
phi = np.exp(-0.5 * u**2) / np.sqrt(2*np.pi)
Phi = 0.5 * erfc(-u / np.sqrt(2))
return (phi, Phi, u) | def function[get_quantiles, parameter[acquisition_par, fmin, m, s]]:
constant[
Quantiles of the Gaussian distribution useful to determine the acquisition function values
:param acquisition_par: parameter of the acquisition function
:param fmin: current minimum.
:param m: vector of means.
:param s: vector of standard deviations.
]
if call[name[isinstance], parameter[name[s], name[np].ndarray]] begin[:]
call[name[s]][compare[name[s] less[<] constant[1e-10]]] assign[=] constant[1e-10]
variable[u] assign[=] binary_operation[binary_operation[binary_operation[name[fmin] - name[m]] - name[acquisition_par]] / name[s]]
variable[phi] assign[=] binary_operation[call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da18bc71600> * binary_operation[name[u] ** constant[2]]]]] / call[name[np].sqrt, parameter[binary_operation[constant[2] * name[np].pi]]]]
variable[Phi] assign[=] binary_operation[constant[0.5] * call[name[erfc], parameter[binary_operation[<ast.UnaryOp object at 0x7da1b2344f40> / call[name[np].sqrt, parameter[constant[2]]]]]]]
return[tuple[[<ast.Name object at 0x7da1b2344a90>, <ast.Name object at 0x7da1b2344e20>, <ast.Name object at 0x7da1b23476d0>]]] | keyword[def] identifier[get_quantiles] ( identifier[acquisition_par] , identifier[fmin] , identifier[m] , identifier[s] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[s] , identifier[np] . identifier[ndarray] ):
identifier[s] [ identifier[s] < literal[int] ]= literal[int]
keyword[elif] identifier[s] < literal[int] :
identifier[s] = literal[int]
identifier[u] =( identifier[fmin] - identifier[m] - identifier[acquisition_par] )/ identifier[s]
identifier[phi] = identifier[np] . identifier[exp] (- literal[int] * identifier[u] ** literal[int] )/ identifier[np] . identifier[sqrt] ( literal[int] * identifier[np] . identifier[pi] )
identifier[Phi] = literal[int] * identifier[erfc] (- identifier[u] / identifier[np] . identifier[sqrt] ( literal[int] ))
keyword[return] ( identifier[phi] , identifier[Phi] , identifier[u] ) | def get_quantiles(acquisition_par, fmin, m, s):
"""
Quantiles of the Gaussian distribution useful to determine the acquisition function values
:param acquisition_par: parameter of the acquisition function
:param fmin: current minimum.
:param m: vector of means.
:param s: vector of standard deviations.
"""
if isinstance(s, np.ndarray):
s[s < 1e-10] = 1e-10 # depends on [control=['if'], data=[]]
elif s < 1e-10:
s = 1e-10 # depends on [control=['if'], data=['s']]
u = (fmin - m - acquisition_par) / s
phi = np.exp(-0.5 * u ** 2) / np.sqrt(2 * np.pi)
Phi = 0.5 * erfc(-u / np.sqrt(2))
return (phi, Phi, u) |
def find_ostype(ost_id=None, ost_name=None, ost_arch=None):
"""
find the OS type (ost) according ost id (prioritary) or ost name
:param ost_id: the OS type id
:param ost_name: the OS type name
:param ost_arch: the OS type architecture
:return: found OS type or None if not found
"""
LOGGER.debug("OSTypeService.find_ostype")
if (ost_id is None or not ost_id) and (ost_name is None or not ost_name) and (ost_arch is None or not ost_arch):
raise exceptions.ArianeCallParametersError('id and (name, architecture)')
if (ost_id is not None and ost_id) and ((ost_name is not None and ost_name) or
(ost_arch is not None and ost_arch)):
LOGGER.warn('OSTypeService.find_ostype - Both id and (name, arc) are defined. Will give you search on id.')
ost_name = None
ost_arch = None
if ((ost_name is not None and ost_name) and (ost_arch is None or not ost_arch)) or\
((ost_arch is not None and ost_arch) and (ost_name is None or not ost_name)):
raise exceptions.ArianeCallParametersError('(name, architecture)')
params = None
if ost_id is not None and ost_id:
params = {'id': ost_id}
elif ost_name is not None and ost_name and ost_arch is not None and ost_arch:
params = {'name': ost_name, 'arc': ost_arch}
ret = None
if params is not None:
args = {'http_operation': 'GET', 'operation_path': 'get', 'parameters': params}
response = OSTypeService.requester.call(args)
if response.rc == 0:
ret = OSType.json_2_ostype(response.response_content)
elif response.rc != 404:
err_msg = 'OSTypeService.find_ostype - Problem while finding OS Type (id:' + str(ost_id) + \
', name:' + str(ost_name) + '). ' + \
'Reason: ' + str(response.response_content) + '-' + str(response.error_message) + \
" (" + str(response.rc) + ")"
LOGGER.warning(
err_msg
)
return ret | def function[find_ostype, parameter[ost_id, ost_name, ost_arch]]:
constant[
find the OS type (ost) according ost id (prioritary) or ost name
:param ost_id: the OS type id
:param ost_name: the OS type name
:param ost_arch: the OS type architecture
:return: found OS type or None if not found
]
call[name[LOGGER].debug, parameter[constant[OSTypeService.find_ostype]]]
if <ast.BoolOp object at 0x7da1b14e41f0> begin[:]
<ast.Raise object at 0x7da1b14e67a0>
if <ast.BoolOp object at 0x7da1b14e76d0> begin[:]
call[name[LOGGER].warn, parameter[constant[OSTypeService.find_ostype - Both id and (name, arc) are defined. Will give you search on id.]]]
variable[ost_name] assign[=] constant[None]
variable[ost_arch] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b135d660> begin[:]
<ast.Raise object at 0x7da1b135d0c0>
variable[params] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b135cfa0> begin[:]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b135ca60>], [<ast.Name object at 0x7da1b135d030>]]
variable[ret] assign[=] constant[None]
if compare[name[params] is_not constant[None]] begin[:]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b135e6b0>, <ast.Constant object at 0x7da1b135ea10>, <ast.Constant object at 0x7da1b135e5c0>], [<ast.Constant object at 0x7da1b135e410>, <ast.Constant object at 0x7da1b135e4a0>, <ast.Name object at 0x7da1b135e500>]]
variable[response] assign[=] call[name[OSTypeService].requester.call, parameter[name[args]]]
if compare[name[response].rc equal[==] constant[0]] begin[:]
variable[ret] assign[=] call[name[OSType].json_2_ostype, parameter[name[response].response_content]]
return[name[ret]] | keyword[def] identifier[find_ostype] ( identifier[ost_id] = keyword[None] , identifier[ost_name] = keyword[None] , identifier[ost_arch] = keyword[None] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[if] ( identifier[ost_id] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ost_id] ) keyword[and] ( identifier[ost_name] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ost_name] ) keyword[and] ( identifier[ost_arch] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ost_arch] ):
keyword[raise] identifier[exceptions] . identifier[ArianeCallParametersError] ( literal[string] )
keyword[if] ( identifier[ost_id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ost_id] ) keyword[and] (( identifier[ost_name] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ost_name] ) keyword[or]
( identifier[ost_arch] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ost_arch] )):
identifier[LOGGER] . identifier[warn] ( literal[string] )
identifier[ost_name] = keyword[None]
identifier[ost_arch] = keyword[None]
keyword[if] (( identifier[ost_name] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ost_name] ) keyword[and] ( identifier[ost_arch] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ost_arch] )) keyword[or] (( identifier[ost_arch] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ost_arch] ) keyword[and] ( identifier[ost_name] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ost_name] )):
keyword[raise] identifier[exceptions] . identifier[ArianeCallParametersError] ( literal[string] )
identifier[params] = keyword[None]
keyword[if] identifier[ost_id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ost_id] :
identifier[params] ={ literal[string] : identifier[ost_id] }
keyword[elif] identifier[ost_name] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ost_name] keyword[and] identifier[ost_arch] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ost_arch] :
identifier[params] ={ literal[string] : identifier[ost_name] , literal[string] : identifier[ost_arch] }
identifier[ret] = keyword[None]
keyword[if] identifier[params] keyword[is] keyword[not] keyword[None] :
identifier[args] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[params] }
identifier[response] = identifier[OSTypeService] . identifier[requester] . identifier[call] ( identifier[args] )
keyword[if] identifier[response] . identifier[rc] == literal[int] :
identifier[ret] = identifier[OSType] . identifier[json_2_ostype] ( identifier[response] . identifier[response_content] )
keyword[elif] identifier[response] . identifier[rc] != literal[int] :
identifier[err_msg] = literal[string] + identifier[str] ( identifier[ost_id] )+ literal[string] + identifier[str] ( identifier[ost_name] )+ literal[string] + literal[string] + identifier[str] ( identifier[response] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[response] . identifier[error_message] )+ literal[string] + identifier[str] ( identifier[response] . identifier[rc] )+ literal[string]
identifier[LOGGER] . identifier[warning] (
identifier[err_msg]
)
keyword[return] identifier[ret] | def find_ostype(ost_id=None, ost_name=None, ost_arch=None):
"""
find the OS type (ost) according ost id (prioritary) or ost name
:param ost_id: the OS type id
:param ost_name: the OS type name
:param ost_arch: the OS type architecture
:return: found OS type or None if not found
"""
LOGGER.debug('OSTypeService.find_ostype')
if (ost_id is None or not ost_id) and (ost_name is None or not ost_name) and (ost_arch is None or not ost_arch):
raise exceptions.ArianeCallParametersError('id and (name, architecture)') # depends on [control=['if'], data=[]]
if (ost_id is not None and ost_id) and (ost_name is not None and ost_name or (ost_arch is not None and ost_arch)):
LOGGER.warn('OSTypeService.find_ostype - Both id and (name, arc) are defined. Will give you search on id.')
ost_name = None
ost_arch = None # depends on [control=['if'], data=[]]
if (ost_name is not None and ost_name) and (ost_arch is None or not ost_arch) or ((ost_arch is not None and ost_arch) and (ost_name is None or not ost_name)):
raise exceptions.ArianeCallParametersError('(name, architecture)') # depends on [control=['if'], data=[]]
params = None
if ost_id is not None and ost_id:
params = {'id': ost_id} # depends on [control=['if'], data=[]]
elif ost_name is not None and ost_name and (ost_arch is not None) and ost_arch:
params = {'name': ost_name, 'arc': ost_arch} # depends on [control=['if'], data=[]]
ret = None
if params is not None:
args = {'http_operation': 'GET', 'operation_path': 'get', 'parameters': params}
response = OSTypeService.requester.call(args)
if response.rc == 0:
ret = OSType.json_2_ostype(response.response_content) # depends on [control=['if'], data=[]]
elif response.rc != 404:
err_msg = 'OSTypeService.find_ostype - Problem while finding OS Type (id:' + str(ost_id) + ', name:' + str(ost_name) + '). ' + 'Reason: ' + str(response.response_content) + '-' + str(response.error_message) + ' (' + str(response.rc) + ')'
LOGGER.warning(err_msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['params']]
return ret |
def generate_labels_from_classifications(classifications, timestamps):
"""
This is to generate continuous segments out of classified small windows
:param classifications:
:param timestamps:
:return:
"""
window_length = timestamps[1] - timestamps[0]
combo_list = [(classifications[k], timestamps[k]) for k in range(0, len(classifications))]
labels = []
for k, g in itertools.groupby(combo_list, lambda x: x[0]):
items = list(g)
start_time = items[0][1]
end_time = items[-1][1] + window_length
label_class = items[0][0]
labels.append(AudacityLabel(start_time, end_time, label_class))
return labels | def function[generate_labels_from_classifications, parameter[classifications, timestamps]]:
constant[
This is to generate continuous segments out of classified small windows
:param classifications:
:param timestamps:
:return:
]
variable[window_length] assign[=] binary_operation[call[name[timestamps]][constant[1]] - call[name[timestamps]][constant[0]]]
variable[combo_list] assign[=] <ast.ListComp object at 0x7da2041d92a0>
variable[labels] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2041d9330>, <ast.Name object at 0x7da2041d8a90>]]] in starred[call[name[itertools].groupby, parameter[name[combo_list], <ast.Lambda object at 0x7da2041dbdc0>]]] begin[:]
variable[items] assign[=] call[name[list], parameter[name[g]]]
variable[start_time] assign[=] call[call[name[items]][constant[0]]][constant[1]]
variable[end_time] assign[=] binary_operation[call[call[name[items]][<ast.UnaryOp object at 0x7da2041dbf40>]][constant[1]] + name[window_length]]
variable[label_class] assign[=] call[call[name[items]][constant[0]]][constant[0]]
call[name[labels].append, parameter[call[name[AudacityLabel], parameter[name[start_time], name[end_time], name[label_class]]]]]
return[name[labels]] | keyword[def] identifier[generate_labels_from_classifications] ( identifier[classifications] , identifier[timestamps] ):
literal[string]
identifier[window_length] = identifier[timestamps] [ literal[int] ]- identifier[timestamps] [ literal[int] ]
identifier[combo_list] =[( identifier[classifications] [ identifier[k] ], identifier[timestamps] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[classifications] ))]
identifier[labels] =[]
keyword[for] identifier[k] , identifier[g] keyword[in] identifier[itertools] . identifier[groupby] ( identifier[combo_list] , keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]):
identifier[items] = identifier[list] ( identifier[g] )
identifier[start_time] = identifier[items] [ literal[int] ][ literal[int] ]
identifier[end_time] = identifier[items] [- literal[int] ][ literal[int] ]+ identifier[window_length]
identifier[label_class] = identifier[items] [ literal[int] ][ literal[int] ]
identifier[labels] . identifier[append] ( identifier[AudacityLabel] ( identifier[start_time] , identifier[end_time] , identifier[label_class] ))
keyword[return] identifier[labels] | def generate_labels_from_classifications(classifications, timestamps):
"""
This is to generate continuous segments out of classified small windows
:param classifications:
:param timestamps:
:return:
"""
window_length = timestamps[1] - timestamps[0]
combo_list = [(classifications[k], timestamps[k]) for k in range(0, len(classifications))]
labels = []
for (k, g) in itertools.groupby(combo_list, lambda x: x[0]):
items = list(g)
start_time = items[0][1]
end_time = items[-1][1] + window_length
label_class = items[0][0]
labels.append(AudacityLabel(start_time, end_time, label_class)) # depends on [control=['for'], data=[]]
return labels |
def read_samples(self, sr=None, offset=0, duration=None):
"""
Read the samples of the utterance.
Args:
sr (int): If None uses the sampling rate given by the track,
otherwise resamples to the given sampling rate.
offset (float): Offset in seconds to read samples from.
duration (float): If not ``None`` read only this
number of seconds in maximum.
Returns:
np.ndarray: A numpy array containing the samples
as a floating point (numpy.float32) time series.
"""
read_duration = self.duration
if offset > 0 and read_duration is not None:
read_duration -= offset
if duration is not None:
if read_duration is None:
read_duration = duration
else:
read_duration = min(duration, read_duration)
return self.track.read_samples(
sr=sr,
offset=self.start + offset,
duration=read_duration
) | def function[read_samples, parameter[self, sr, offset, duration]]:
constant[
Read the samples of the utterance.
Args:
sr (int): If None uses the sampling rate given by the track,
otherwise resamples to the given sampling rate.
offset (float): Offset in seconds to read samples from.
duration (float): If not ``None`` read only this
number of seconds in maximum.
Returns:
np.ndarray: A numpy array containing the samples
as a floating point (numpy.float32) time series.
]
variable[read_duration] assign[=] name[self].duration
if <ast.BoolOp object at 0x7da1b0b41120> begin[:]
<ast.AugAssign object at 0x7da1b0ed3790>
if compare[name[duration] is_not constant[None]] begin[:]
if compare[name[read_duration] is constant[None]] begin[:]
variable[read_duration] assign[=] name[duration]
return[call[name[self].track.read_samples, parameter[]]] | keyword[def] identifier[read_samples] ( identifier[self] , identifier[sr] = keyword[None] , identifier[offset] = literal[int] , identifier[duration] = keyword[None] ):
literal[string]
identifier[read_duration] = identifier[self] . identifier[duration]
keyword[if] identifier[offset] > literal[int] keyword[and] identifier[read_duration] keyword[is] keyword[not] keyword[None] :
identifier[read_duration] -= identifier[offset]
keyword[if] identifier[duration] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[read_duration] keyword[is] keyword[None] :
identifier[read_duration] = identifier[duration]
keyword[else] :
identifier[read_duration] = identifier[min] ( identifier[duration] , identifier[read_duration] )
keyword[return] identifier[self] . identifier[track] . identifier[read_samples] (
identifier[sr] = identifier[sr] ,
identifier[offset] = identifier[self] . identifier[start] + identifier[offset] ,
identifier[duration] = identifier[read_duration]
) | def read_samples(self, sr=None, offset=0, duration=None):
"""
Read the samples of the utterance.
Args:
sr (int): If None uses the sampling rate given by the track,
otherwise resamples to the given sampling rate.
offset (float): Offset in seconds to read samples from.
duration (float): If not ``None`` read only this
number of seconds in maximum.
Returns:
np.ndarray: A numpy array containing the samples
as a floating point (numpy.float32) time series.
"""
read_duration = self.duration
if offset > 0 and read_duration is not None:
read_duration -= offset # depends on [control=['if'], data=[]]
if duration is not None:
if read_duration is None:
read_duration = duration # depends on [control=['if'], data=['read_duration']]
else:
read_duration = min(duration, read_duration) # depends on [control=['if'], data=['duration']]
return self.track.read_samples(sr=sr, offset=self.start + offset, duration=read_duration) |
def set_heat_pump_feature(self, device_label, feature):
""" Set heatpump mode
Args:
feature: 'QUIET', 'ECONAVI', or 'POWERFUL'
"""
response = None
try:
response = requests.put(
urls.set_heatpump_feature(self._giid, device_label, feature),
headers={
'Accept': 'application/json',
'Content-Type': 'application/json',
'Cookie': 'vid={}'.format(self._vid)})
except requests.exceptions.RequestException as ex:
raise RequestError(ex)
_validate_response(response)
return json.loads(response.text) | def function[set_heat_pump_feature, parameter[self, device_label, feature]]:
constant[ Set heatpump mode
Args:
feature: 'QUIET', 'ECONAVI', or 'POWERFUL'
]
variable[response] assign[=] constant[None]
<ast.Try object at 0x7da1b1020a00>
call[name[_validate_response], parameter[name[response]]]
return[call[name[json].loads, parameter[name[response].text]]] | keyword[def] identifier[set_heat_pump_feature] ( identifier[self] , identifier[device_label] , identifier[feature] ):
literal[string]
identifier[response] = keyword[None]
keyword[try] :
identifier[response] = identifier[requests] . identifier[put] (
identifier[urls] . identifier[set_heatpump_feature] ( identifier[self] . identifier[_giid] , identifier[device_label] , identifier[feature] ),
identifier[headers] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] . identifier[format] ( identifier[self] . identifier[_vid] )})
keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[ex] :
keyword[raise] identifier[RequestError] ( identifier[ex] )
identifier[_validate_response] ( identifier[response] )
keyword[return] identifier[json] . identifier[loads] ( identifier[response] . identifier[text] ) | def set_heat_pump_feature(self, device_label, feature):
""" Set heatpump mode
Args:
feature: 'QUIET', 'ECONAVI', or 'POWERFUL'
"""
response = None
try:
response = requests.put(urls.set_heatpump_feature(self._giid, device_label, feature), headers={'Accept': 'application/json', 'Content-Type': 'application/json', 'Cookie': 'vid={}'.format(self._vid)}) # depends on [control=['try'], data=[]]
except requests.exceptions.RequestException as ex:
raise RequestError(ex) # depends on [control=['except'], data=['ex']]
_validate_response(response)
return json.loads(response.text) |
def query_file(self, file_sha, verbose=False):
"""Query the VirusTotal Service
Args:
file_sha (str): The file sha1 or sha256 hash
url (str): The domain/url to be queried (default=None)
"""
# Sanity check sha hash input
if len(file_sha) not in [64, 40]: # sha256 and sha1 lengths
print('File sha looks malformed: {:s}'.format(file_sha))
return {'file_sha': file_sha, 'malformed': True}
# Call and return the internal query method
return self._query('file', file_sha, verbose) | def function[query_file, parameter[self, file_sha, verbose]]:
constant[Query the VirusTotal Service
Args:
file_sha (str): The file sha1 or sha256 hash
url (str): The domain/url to be queried (default=None)
]
if compare[call[name[len], parameter[name[file_sha]]] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da204566f50>, <ast.Constant object at 0x7da204567010>]]] begin[:]
call[name[print], parameter[call[constant[File sha looks malformed: {:s}].format, parameter[name[file_sha]]]]]
return[dictionary[[<ast.Constant object at 0x7da204567610>, <ast.Constant object at 0x7da2045640d0>], [<ast.Name object at 0x7da204565960>, <ast.Constant object at 0x7da2045658d0>]]]
return[call[name[self]._query, parameter[constant[file], name[file_sha], name[verbose]]]] | keyword[def] identifier[query_file] ( identifier[self] , identifier[file_sha] , identifier[verbose] = keyword[False] ):
literal[string]
keyword[if] identifier[len] ( identifier[file_sha] ) keyword[not] keyword[in] [ literal[int] , literal[int] ]:
identifier[print] ( literal[string] . identifier[format] ( identifier[file_sha] ))
keyword[return] { literal[string] : identifier[file_sha] , literal[string] : keyword[True] }
keyword[return] identifier[self] . identifier[_query] ( literal[string] , identifier[file_sha] , identifier[verbose] ) | def query_file(self, file_sha, verbose=False):
"""Query the VirusTotal Service
Args:
file_sha (str): The file sha1 or sha256 hash
url (str): The domain/url to be queried (default=None)
"""
# Sanity check sha hash input
if len(file_sha) not in [64, 40]: # sha256 and sha1 lengths
print('File sha looks malformed: {:s}'.format(file_sha))
return {'file_sha': file_sha, 'malformed': True} # depends on [control=['if'], data=[]]
# Call and return the internal query method
return self._query('file', file_sha, verbose) |
def median_high(data):
"""Return the high median of data.
When the number of data points is odd, the middle value is returned.
When it is even, the larger of the two middle values is returned.
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
return data[n // 2] | def function[median_high, parameter[data]]:
constant[Return the high median of data.
When the number of data points is odd, the middle value is returned.
When it is even, the larger of the two middle values is returned.
]
variable[data] assign[=] call[name[sorted], parameter[name[data]]]
variable[n] assign[=] call[name[len], parameter[name[data]]]
if compare[name[n] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b0a22320>
return[call[name[data]][binary_operation[name[n] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]]] | keyword[def] identifier[median_high] ( identifier[data] ):
literal[string]
identifier[data] = identifier[sorted] ( identifier[data] )
identifier[n] = identifier[len] ( identifier[data] )
keyword[if] identifier[n] == literal[int] :
keyword[raise] identifier[StatisticsError] ( literal[string] )
keyword[return] identifier[data] [ identifier[n] // literal[int] ] | def median_high(data):
"""Return the high median of data.
When the number of data points is odd, the middle value is returned.
When it is even, the larger of the two middle values is returned.
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError('no median for empty data') # depends on [control=['if'], data=[]]
return data[n // 2] |
def _parse_typedef(line, _rawtypedef):
"""Parse a typedef line.
The typedef is organized as a succesion of ``key:value`` pairs
that are extracted into the same dictionnary until a new
header is encountered
Arguments:
line (str): the line containing a typedef statement
"""
if "[Typedef]" in line:
_rawtypedef.append(collections.defaultdict(list))
else:
key, value = line.split(':', 1)
_rawtypedef[-1][key.strip()].append(value.strip()) | def function[_parse_typedef, parameter[line, _rawtypedef]]:
constant[Parse a typedef line.
The typedef is organized as a succesion of ``key:value`` pairs
that are extracted into the same dictionnary until a new
header is encountered
Arguments:
line (str): the line containing a typedef statement
]
if compare[constant[[Typedef]] in name[line]] begin[:]
call[name[_rawtypedef].append, parameter[call[name[collections].defaultdict, parameter[name[list]]]]] | keyword[def] identifier[_parse_typedef] ( identifier[line] , identifier[_rawtypedef] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[_rawtypedef] . identifier[append] ( identifier[collections] . identifier[defaultdict] ( identifier[list] ))
keyword[else] :
identifier[key] , identifier[value] = identifier[line] . identifier[split] ( literal[string] , literal[int] )
identifier[_rawtypedef] [- literal[int] ][ identifier[key] . identifier[strip] ()]. identifier[append] ( identifier[value] . identifier[strip] ()) | def _parse_typedef(line, _rawtypedef):
"""Parse a typedef line.
The typedef is organized as a succesion of ``key:value`` pairs
that are extracted into the same dictionnary until a new
header is encountered
Arguments:
line (str): the line containing a typedef statement
"""
if '[Typedef]' in line:
_rawtypedef.append(collections.defaultdict(list)) # depends on [control=['if'], data=[]]
else:
(key, value) = line.split(':', 1)
_rawtypedef[-1][key.strip()].append(value.strip()) |
def _encode_ndef_uri_type(self, data):
"""
Implement NDEF URI Identifier Code.
This is a small hack to replace some well known prefixes (such as http://)
with a one byte code. If the prefix is not known, 0x00 is used.
"""
t = 0x0
for (code, prefix) in uri_identifiers:
if data[:len(prefix)].decode('latin-1').lower() == prefix:
t = code
data = data[len(prefix):]
break
data = yubico_util.chr_byte(t) + data
return data | def function[_encode_ndef_uri_type, parameter[self, data]]:
constant[
Implement NDEF URI Identifier Code.
This is a small hack to replace some well known prefixes (such as http://)
with a one byte code. If the prefix is not known, 0x00 is used.
]
variable[t] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b08454b0>, <ast.Name object at 0x7da1b0847ac0>]]] in starred[name[uri_identifiers]] begin[:]
if compare[call[call[call[name[data]][<ast.Slice object at 0x7da1b0847a60>].decode, parameter[constant[latin-1]]].lower, parameter[]] equal[==] name[prefix]] begin[:]
variable[t] assign[=] name[code]
variable[data] assign[=] call[name[data]][<ast.Slice object at 0x7da1b0846290>]
break
variable[data] assign[=] binary_operation[call[name[yubico_util].chr_byte, parameter[name[t]]] + name[data]]
return[name[data]] | keyword[def] identifier[_encode_ndef_uri_type] ( identifier[self] , identifier[data] ):
literal[string]
identifier[t] = literal[int]
keyword[for] ( identifier[code] , identifier[prefix] ) keyword[in] identifier[uri_identifiers] :
keyword[if] identifier[data] [: identifier[len] ( identifier[prefix] )]. identifier[decode] ( literal[string] ). identifier[lower] ()== identifier[prefix] :
identifier[t] = identifier[code]
identifier[data] = identifier[data] [ identifier[len] ( identifier[prefix] ):]
keyword[break]
identifier[data] = identifier[yubico_util] . identifier[chr_byte] ( identifier[t] )+ identifier[data]
keyword[return] identifier[data] | def _encode_ndef_uri_type(self, data):
"""
Implement NDEF URI Identifier Code.
This is a small hack to replace some well known prefixes (such as http://)
with a one byte code. If the prefix is not known, 0x00 is used.
"""
t = 0
for (code, prefix) in uri_identifiers:
if data[:len(prefix)].decode('latin-1').lower() == prefix:
t = code
data = data[len(prefix):]
break # depends on [control=['if'], data=['prefix']] # depends on [control=['for'], data=[]]
data = yubico_util.chr_byte(t) + data
return data |
def __wrap_accepted_val(self, value):
"""Wrap accepted value in the list if yet not wrapped.
"""
if isinstance(value, tuple):
value = list(value)
elif not isinstance(value, list):
value = [value]
return value | def function[__wrap_accepted_val, parameter[self, value]]:
constant[Wrap accepted value in the list if yet not wrapped.
]
if call[name[isinstance], parameter[name[value], name[tuple]]] begin[:]
variable[value] assign[=] call[name[list], parameter[name[value]]]
return[name[value]] | keyword[def] identifier[__wrap_accepted_val] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[tuple] ):
identifier[value] = identifier[list] ( identifier[value] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[value] , identifier[list] ):
identifier[value] =[ identifier[value] ]
keyword[return] identifier[value] | def __wrap_accepted_val(self, value):
"""Wrap accepted value in the list if yet not wrapped.
"""
if isinstance(value, tuple):
value = list(value) # depends on [control=['if'], data=[]]
elif not isinstance(value, list):
value = [value] # depends on [control=['if'], data=[]]
return value |
def mean_temporal_distance(self):
"""
Get mean temporal distance (in seconds) to the target.
Returns
-------
mean_temporal_distance : float
"""
total_width = self.end_time_dep - self.start_time_dep
total_area = sum([block.area() for block in self._profile_blocks])
return total_area / total_width | def function[mean_temporal_distance, parameter[self]]:
constant[
Get mean temporal distance (in seconds) to the target.
Returns
-------
mean_temporal_distance : float
]
variable[total_width] assign[=] binary_operation[name[self].end_time_dep - name[self].start_time_dep]
variable[total_area] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b012d330>]]
return[binary_operation[name[total_area] / name[total_width]]] | keyword[def] identifier[mean_temporal_distance] ( identifier[self] ):
literal[string]
identifier[total_width] = identifier[self] . identifier[end_time_dep] - identifier[self] . identifier[start_time_dep]
identifier[total_area] = identifier[sum] ([ identifier[block] . identifier[area] () keyword[for] identifier[block] keyword[in] identifier[self] . identifier[_profile_blocks] ])
keyword[return] identifier[total_area] / identifier[total_width] | def mean_temporal_distance(self):
"""
Get mean temporal distance (in seconds) to the target.
Returns
-------
mean_temporal_distance : float
"""
total_width = self.end_time_dep - self.start_time_dep
total_area = sum([block.area() for block in self._profile_blocks])
return total_area / total_width |
def requirements(debug=True, with_examples=True, with_pgi=None):
"""
Build requirements based on flags
:param with_pgi: Use 'pgi' instead of 'gi' - False on CPython, True elsewhere
:param with_examples:
:return:
"""
reqs = list(BASE_REQUIREMENTS)
if with_pgi is None:
with_pgi = is_jython
if debug:
print("setup options: ")
print("with_pgi: ", "yes" if with_pgi else "no")
print("with_examples: ", "yes" if with_examples else "no")
if with_pgi:
reqs.append("pgi")
if debug:
print("warning, as of April 2019 typography does not work with pgi")
else:
reqs.append(PYGOBJECT)
if with_examples:
reqs.extend(EXAMPLE_REQUIREMENTS)
if debug:
print("")
print("")
for req in reqs:
print(req)
return reqs | def function[requirements, parameter[debug, with_examples, with_pgi]]:
constant[
Build requirements based on flags
:param with_pgi: Use 'pgi' instead of 'gi' - False on CPython, True elsewhere
:param with_examples:
:return:
]
variable[reqs] assign[=] call[name[list], parameter[name[BASE_REQUIREMENTS]]]
if compare[name[with_pgi] is constant[None]] begin[:]
variable[with_pgi] assign[=] name[is_jython]
if name[debug] begin[:]
call[name[print], parameter[constant[setup options: ]]]
call[name[print], parameter[constant[with_pgi: ], <ast.IfExp object at 0x7da204623250>]]
call[name[print], parameter[constant[with_examples: ], <ast.IfExp object at 0x7da204621ea0>]]
if name[with_pgi] begin[:]
call[name[reqs].append, parameter[constant[pgi]]]
if name[debug] begin[:]
call[name[print], parameter[constant[warning, as of April 2019 typography does not work with pgi]]]
if name[with_examples] begin[:]
call[name[reqs].extend, parameter[name[EXAMPLE_REQUIREMENTS]]]
if name[debug] begin[:]
call[name[print], parameter[constant[]]]
call[name[print], parameter[constant[]]]
for taget[name[req]] in starred[name[reqs]] begin[:]
call[name[print], parameter[name[req]]]
return[name[reqs]] | keyword[def] identifier[requirements] ( identifier[debug] = keyword[True] , identifier[with_examples] = keyword[True] , identifier[with_pgi] = keyword[None] ):
literal[string]
identifier[reqs] = identifier[list] ( identifier[BASE_REQUIREMENTS] )
keyword[if] identifier[with_pgi] keyword[is] keyword[None] :
identifier[with_pgi] = identifier[is_jython]
keyword[if] identifier[debug] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] , literal[string] keyword[if] identifier[with_pgi] keyword[else] literal[string] )
identifier[print] ( literal[string] , literal[string] keyword[if] identifier[with_examples] keyword[else] literal[string] )
keyword[if] identifier[with_pgi] :
identifier[reqs] . identifier[append] ( literal[string] )
keyword[if] identifier[debug] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[reqs] . identifier[append] ( identifier[PYGOBJECT] )
keyword[if] identifier[with_examples] :
identifier[reqs] . identifier[extend] ( identifier[EXAMPLE_REQUIREMENTS] )
keyword[if] identifier[debug] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[for] identifier[req] keyword[in] identifier[reqs] :
identifier[print] ( identifier[req] )
keyword[return] identifier[reqs] | def requirements(debug=True, with_examples=True, with_pgi=None):
"""
Build requirements based on flags
:param with_pgi: Use 'pgi' instead of 'gi' - False on CPython, True elsewhere
:param with_examples:
:return:
"""
reqs = list(BASE_REQUIREMENTS)
if with_pgi is None:
with_pgi = is_jython # depends on [control=['if'], data=['with_pgi']]
if debug:
print('setup options: ')
print('with_pgi: ', 'yes' if with_pgi else 'no')
print('with_examples: ', 'yes' if with_examples else 'no') # depends on [control=['if'], data=[]]
if with_pgi:
reqs.append('pgi')
if debug:
print('warning, as of April 2019 typography does not work with pgi') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
reqs.append(PYGOBJECT)
if with_examples:
reqs.extend(EXAMPLE_REQUIREMENTS) # depends on [control=['if'], data=[]]
if debug:
print('')
print('')
for req in reqs:
print(req) # depends on [control=['for'], data=['req']] # depends on [control=['if'], data=[]]
return reqs |
def share(self, accounts):
"""
Create a share
"""
if not isinstance(accounts, (list, tuple)):
msg = "Video.share expects an iterable argument"
raise exceptions.PyBrightcoveError(msg)
raise exceptions.PyBrightcoveError("Not yet implemented") | def function[share, parameter[self, accounts]]:
constant[
Create a share
]
if <ast.UnaryOp object at 0x7da207f036d0> begin[:]
variable[msg] assign[=] constant[Video.share expects an iterable argument]
<ast.Raise object at 0x7da207f022f0>
<ast.Raise object at 0x7da207f01ff0> | keyword[def] identifier[share] ( identifier[self] , identifier[accounts] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[accounts] ,( identifier[list] , identifier[tuple] )):
identifier[msg] = literal[string]
keyword[raise] identifier[exceptions] . identifier[PyBrightcoveError] ( identifier[msg] )
keyword[raise] identifier[exceptions] . identifier[PyBrightcoveError] ( literal[string] ) | def share(self, accounts):
"""
Create a share
"""
if not isinstance(accounts, (list, tuple)):
msg = 'Video.share expects an iterable argument'
raise exceptions.PyBrightcoveError(msg) # depends on [control=['if'], data=[]]
raise exceptions.PyBrightcoveError('Not yet implemented') |
def on_key_up(self):
""" Process key up event by updating buffer and release key. """
if (self.last_pressed is not None):
self.set_key_state(self.last_pressed, 0)
self.buffer = self.last_pressed.update_buffer(self.buffer)
self.text_consumer(self.buffer)
self.last_pressed = None | def function[on_key_up, parameter[self]]:
constant[ Process key up event by updating buffer and release key. ]
if compare[name[self].last_pressed is_not constant[None]] begin[:]
call[name[self].set_key_state, parameter[name[self].last_pressed, constant[0]]]
name[self].buffer assign[=] call[name[self].last_pressed.update_buffer, parameter[name[self].buffer]]
call[name[self].text_consumer, parameter[name[self].buffer]]
name[self].last_pressed assign[=] constant[None] | keyword[def] identifier[on_key_up] ( identifier[self] ):
literal[string]
keyword[if] ( identifier[self] . identifier[last_pressed] keyword[is] keyword[not] keyword[None] ):
identifier[self] . identifier[set_key_state] ( identifier[self] . identifier[last_pressed] , literal[int] )
identifier[self] . identifier[buffer] = identifier[self] . identifier[last_pressed] . identifier[update_buffer] ( identifier[self] . identifier[buffer] )
identifier[self] . identifier[text_consumer] ( identifier[self] . identifier[buffer] )
identifier[self] . identifier[last_pressed] = keyword[None] | def on_key_up(self):
""" Process key up event by updating buffer and release key. """
if self.last_pressed is not None:
self.set_key_state(self.last_pressed, 0)
self.buffer = self.last_pressed.update_buffer(self.buffer)
self.text_consumer(self.buffer)
self.last_pressed = None # depends on [control=['if'], data=[]] |
def parse_theme(self, xml):
""" Parses a theme from XML returned by Kuler.
Gets the theme's id, label and swatches.
All of the swatches are converted to RGB.
If we have a full description for a theme id in cache,
parse that to get tags associated with the theme.
"""
kt = KulerTheme()
kt.author = xml.getElementsByTagName("author")[0]
kt.author = kt.author.childNodes[1].childNodes[0].nodeValue
kt.id = int(self.parse_tag(xml, "id"))
kt.label = self.parse_tag(xml, "label")
mode = self.parse_tag(xml, "mode")
for swatch in xml.getElementsByTagName("swatch"):
c1 = float(self.parse_tag(swatch, "c1"))
c2 = float(self.parse_tag(swatch, "c2"))
c3 = float(self.parse_tag(swatch, "c3"))
c4 = float(self.parse_tag(swatch, "c4"))
if mode == "rgb":
kt.append((c1,c2,c3))
if mode == "cmyk":
kt.append(cmyk_to_rgb(c1,c2,c3,c4))
if mode == "hsv":
kt.append(colorsys.hsv_to_rgb(c1,c2,c3))
if mode == "hex":
kt.append(hex_to_rgb(c1))
if mode == "lab":
kt.append(lab_to_rgb(c1,c2,c3))
# If we have the full theme in cache,
# parse tags from it.
if self._cache.exists(self.id_string + str(kt.id)):
xml = self._cache.read(self.id_string + str(kt.id))
xml = minidom.parseString(xml)
for tags in xml.getElementsByTagName("tag"):
tags = self.parse_tag(tags, "label")
tags = tags.split(" ")
kt.tags.extend(tags)
return kt | def function[parse_theme, parameter[self, xml]]:
constant[ Parses a theme from XML returned by Kuler.
Gets the theme's id, label and swatches.
All of the swatches are converted to RGB.
If we have a full description for a theme id in cache,
parse that to get tags associated with the theme.
]
variable[kt] assign[=] call[name[KulerTheme], parameter[]]
name[kt].author assign[=] call[call[name[xml].getElementsByTagName, parameter[constant[author]]]][constant[0]]
name[kt].author assign[=] call[call[name[kt].author.childNodes][constant[1]].childNodes][constant[0]].nodeValue
name[kt].id assign[=] call[name[int], parameter[call[name[self].parse_tag, parameter[name[xml], constant[id]]]]]
name[kt].label assign[=] call[name[self].parse_tag, parameter[name[xml], constant[label]]]
variable[mode] assign[=] call[name[self].parse_tag, parameter[name[xml], constant[mode]]]
for taget[name[swatch]] in starred[call[name[xml].getElementsByTagName, parameter[constant[swatch]]]] begin[:]
variable[c1] assign[=] call[name[float], parameter[call[name[self].parse_tag, parameter[name[swatch], constant[c1]]]]]
variable[c2] assign[=] call[name[float], parameter[call[name[self].parse_tag, parameter[name[swatch], constant[c2]]]]]
variable[c3] assign[=] call[name[float], parameter[call[name[self].parse_tag, parameter[name[swatch], constant[c3]]]]]
variable[c4] assign[=] call[name[float], parameter[call[name[self].parse_tag, parameter[name[swatch], constant[c4]]]]]
if compare[name[mode] equal[==] constant[rgb]] begin[:]
call[name[kt].append, parameter[tuple[[<ast.Name object at 0x7da20c6c47c0>, <ast.Name object at 0x7da20c6c6ce0>, <ast.Name object at 0x7da20c6c7370>]]]]
if compare[name[mode] equal[==] constant[cmyk]] begin[:]
call[name[kt].append, parameter[call[name[cmyk_to_rgb], parameter[name[c1], name[c2], name[c3], name[c4]]]]]
if compare[name[mode] equal[==] constant[hsv]] begin[:]
call[name[kt].append, parameter[call[name[colorsys].hsv_to_rgb, parameter[name[c1], name[c2], name[c3]]]]]
if compare[name[mode] equal[==] constant[hex]] begin[:]
call[name[kt].append, parameter[call[name[hex_to_rgb], parameter[name[c1]]]]]
if compare[name[mode] equal[==] constant[lab]] begin[:]
call[name[kt].append, parameter[call[name[lab_to_rgb], parameter[name[c1], name[c2], name[c3]]]]]
if call[name[self]._cache.exists, parameter[binary_operation[name[self].id_string + call[name[str], parameter[name[kt].id]]]]] begin[:]
variable[xml] assign[=] call[name[self]._cache.read, parameter[binary_operation[name[self].id_string + call[name[str], parameter[name[kt].id]]]]]
variable[xml] assign[=] call[name[minidom].parseString, parameter[name[xml]]]
for taget[name[tags]] in starred[call[name[xml].getElementsByTagName, parameter[constant[tag]]]] begin[:]
variable[tags] assign[=] call[name[self].parse_tag, parameter[name[tags], constant[label]]]
variable[tags] assign[=] call[name[tags].split, parameter[constant[ ]]]
call[name[kt].tags.extend, parameter[name[tags]]]
return[name[kt]] | keyword[def] identifier[parse_theme] ( identifier[self] , identifier[xml] ):
literal[string]
identifier[kt] = identifier[KulerTheme] ()
identifier[kt] . identifier[author] = identifier[xml] . identifier[getElementsByTagName] ( literal[string] )[ literal[int] ]
identifier[kt] . identifier[author] = identifier[kt] . identifier[author] . identifier[childNodes] [ literal[int] ]. identifier[childNodes] [ literal[int] ]. identifier[nodeValue]
identifier[kt] . identifier[id] = identifier[int] ( identifier[self] . identifier[parse_tag] ( identifier[xml] , literal[string] ))
identifier[kt] . identifier[label] = identifier[self] . identifier[parse_tag] ( identifier[xml] , literal[string] )
identifier[mode] = identifier[self] . identifier[parse_tag] ( identifier[xml] , literal[string] )
keyword[for] identifier[swatch] keyword[in] identifier[xml] . identifier[getElementsByTagName] ( literal[string] ):
identifier[c1] = identifier[float] ( identifier[self] . identifier[parse_tag] ( identifier[swatch] , literal[string] ))
identifier[c2] = identifier[float] ( identifier[self] . identifier[parse_tag] ( identifier[swatch] , literal[string] ))
identifier[c3] = identifier[float] ( identifier[self] . identifier[parse_tag] ( identifier[swatch] , literal[string] ))
identifier[c4] = identifier[float] ( identifier[self] . identifier[parse_tag] ( identifier[swatch] , literal[string] ))
keyword[if] identifier[mode] == literal[string] :
identifier[kt] . identifier[append] (( identifier[c1] , identifier[c2] , identifier[c3] ))
keyword[if] identifier[mode] == literal[string] :
identifier[kt] . identifier[append] ( identifier[cmyk_to_rgb] ( identifier[c1] , identifier[c2] , identifier[c3] , identifier[c4] ))
keyword[if] identifier[mode] == literal[string] :
identifier[kt] . identifier[append] ( identifier[colorsys] . identifier[hsv_to_rgb] ( identifier[c1] , identifier[c2] , identifier[c3] ))
keyword[if] identifier[mode] == literal[string] :
identifier[kt] . identifier[append] ( identifier[hex_to_rgb] ( identifier[c1] ))
keyword[if] identifier[mode] == literal[string] :
identifier[kt] . identifier[append] ( identifier[lab_to_rgb] ( identifier[c1] , identifier[c2] , identifier[c3] ))
keyword[if] identifier[self] . identifier[_cache] . identifier[exists] ( identifier[self] . identifier[id_string] + identifier[str] ( identifier[kt] . identifier[id] )):
identifier[xml] = identifier[self] . identifier[_cache] . identifier[read] ( identifier[self] . identifier[id_string] + identifier[str] ( identifier[kt] . identifier[id] ))
identifier[xml] = identifier[minidom] . identifier[parseString] ( identifier[xml] )
keyword[for] identifier[tags] keyword[in] identifier[xml] . identifier[getElementsByTagName] ( literal[string] ):
identifier[tags] = identifier[self] . identifier[parse_tag] ( identifier[tags] , literal[string] )
identifier[tags] = identifier[tags] . identifier[split] ( literal[string] )
identifier[kt] . identifier[tags] . identifier[extend] ( identifier[tags] )
keyword[return] identifier[kt] | def parse_theme(self, xml):
""" Parses a theme from XML returned by Kuler.
Gets the theme's id, label and swatches.
All of the swatches are converted to RGB.
If we have a full description for a theme id in cache,
parse that to get tags associated with the theme.
"""
kt = KulerTheme()
kt.author = xml.getElementsByTagName('author')[0]
kt.author = kt.author.childNodes[1].childNodes[0].nodeValue
kt.id = int(self.parse_tag(xml, 'id'))
kt.label = self.parse_tag(xml, 'label')
mode = self.parse_tag(xml, 'mode')
for swatch in xml.getElementsByTagName('swatch'):
c1 = float(self.parse_tag(swatch, 'c1'))
c2 = float(self.parse_tag(swatch, 'c2'))
c3 = float(self.parse_tag(swatch, 'c3'))
c4 = float(self.parse_tag(swatch, 'c4'))
if mode == 'rgb':
kt.append((c1, c2, c3)) # depends on [control=['if'], data=[]]
if mode == 'cmyk':
kt.append(cmyk_to_rgb(c1, c2, c3, c4)) # depends on [control=['if'], data=[]]
if mode == 'hsv':
kt.append(colorsys.hsv_to_rgb(c1, c2, c3)) # depends on [control=['if'], data=[]]
if mode == 'hex':
kt.append(hex_to_rgb(c1)) # depends on [control=['if'], data=[]]
if mode == 'lab':
kt.append(lab_to_rgb(c1, c2, c3)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['swatch']]
# If we have the full theme in cache,
# parse tags from it.
if self._cache.exists(self.id_string + str(kt.id)):
xml = self._cache.read(self.id_string + str(kt.id))
xml = minidom.parseString(xml) # depends on [control=['if'], data=[]]
for tags in xml.getElementsByTagName('tag'):
tags = self.parse_tag(tags, 'label')
tags = tags.split(' ')
kt.tags.extend(tags) # depends on [control=['for'], data=['tags']]
return kt |
def batch_save(self, *documents):
"""
Inserts or updates every document specified in documents.
:param documents: Array of documents to save as dictionaries
:type documents: ``array`` of ``dict``
:return: Results of update operation as overall stats
:rtype: ``dict``
"""
if len(documents) < 1:
raise Exception('Must have at least one document.')
data = json.dumps(documents)
return json.loads(self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) | def function[batch_save, parameter[self]]:
constant[
Inserts or updates every document specified in documents.
:param documents: Array of documents to save as dictionaries
:type documents: ``array`` of ``dict``
:return: Results of update operation as overall stats
:rtype: ``dict``
]
if compare[call[name[len], parameter[name[documents]]] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da1b170c310>
variable[data] assign[=] call[name[json].dumps, parameter[name[documents]]]
return[call[name[json].loads, parameter[call[call[call[name[self]._post, parameter[constant[batch_save]]].body.read, parameter[]].decode, parameter[constant[utf-8]]]]]] | keyword[def] identifier[batch_save] ( identifier[self] ,* identifier[documents] ):
literal[string]
keyword[if] identifier[len] ( identifier[documents] )< literal[int] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[data] = identifier[json] . identifier[dumps] ( identifier[documents] )
keyword[return] identifier[json] . identifier[loads] ( identifier[self] . identifier[_post] ( literal[string] , identifier[headers] = identifier[KVStoreCollectionData] . identifier[JSON_HEADER] , identifier[body] = identifier[data] ). identifier[body] . identifier[read] (). identifier[decode] ( literal[string] )) | def batch_save(self, *documents):
"""
Inserts or updates every document specified in documents.
:param documents: Array of documents to save as dictionaries
:type documents: ``array`` of ``dict``
:return: Results of update operation as overall stats
:rtype: ``dict``
"""
if len(documents) < 1:
raise Exception('Must have at least one document.') # depends on [control=['if'], data=[]]
data = json.dumps(documents)
return json.loads(self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) |
def compute_video_metrics_from_predictions(predictions, decode_hparams):
"""Computes metrics from predictions.
Args:
predictions: list of list of dicts.
outer length: num_decodes, inner_length: num_samples
decode_hparams: Decode hparams. instance of HParams.
Returns:
statistics: dict of Tensors, key being the metric with each Tensor
having the shape (num_samples, num_frames).
"""
all_results = {}
ssim_all_decodes, psnr_all_decodes = [], []
for single_decode in predictions:
args = get_zipped_dataset_from_predictions(single_decode)
psnr_single, ssim_single = compute_one_decoding_video_metrics(*args)
psnr_all_decodes.append(psnr_single)
ssim_all_decodes.append(ssim_single)
psnr_all_decodes = np.array(psnr_all_decodes)
ssim_all_decodes = np.array(ssim_all_decodes)
all_results.update({"PSNR": psnr_all_decodes, "SSIM": ssim_all_decodes})
return compute_all_metrics_statistics(all_results) | def function[compute_video_metrics_from_predictions, parameter[predictions, decode_hparams]]:
constant[Computes metrics from predictions.
Args:
predictions: list of list of dicts.
outer length: num_decodes, inner_length: num_samples
decode_hparams: Decode hparams. instance of HParams.
Returns:
statistics: dict of Tensors, key being the metric with each Tensor
having the shape (num_samples, num_frames).
]
variable[all_results] assign[=] dictionary[[], []]
<ast.Tuple object at 0x7da1b203f7f0> assign[=] tuple[[<ast.List object at 0x7da1b20fb0a0>, <ast.List object at 0x7da1b20fba00>]]
for taget[name[single_decode]] in starred[name[predictions]] begin[:]
variable[args] assign[=] call[name[get_zipped_dataset_from_predictions], parameter[name[single_decode]]]
<ast.Tuple object at 0x7da1b20f8f10> assign[=] call[name[compute_one_decoding_video_metrics], parameter[<ast.Starred object at 0x7da1b20b3cd0>]]
call[name[psnr_all_decodes].append, parameter[name[psnr_single]]]
call[name[ssim_all_decodes].append, parameter[name[ssim_single]]]
variable[psnr_all_decodes] assign[=] call[name[np].array, parameter[name[psnr_all_decodes]]]
variable[ssim_all_decodes] assign[=] call[name[np].array, parameter[name[ssim_all_decodes]]]
call[name[all_results].update, parameter[dictionary[[<ast.Constant object at 0x7da1b20b3e80>, <ast.Constant object at 0x7da1b20b2bc0>], [<ast.Name object at 0x7da1b20b3b50>, <ast.Name object at 0x7da1b20b0d90>]]]]
return[call[name[compute_all_metrics_statistics], parameter[name[all_results]]]] | keyword[def] identifier[compute_video_metrics_from_predictions] ( identifier[predictions] , identifier[decode_hparams] ):
literal[string]
identifier[all_results] ={}
identifier[ssim_all_decodes] , identifier[psnr_all_decodes] =[],[]
keyword[for] identifier[single_decode] keyword[in] identifier[predictions] :
identifier[args] = identifier[get_zipped_dataset_from_predictions] ( identifier[single_decode] )
identifier[psnr_single] , identifier[ssim_single] = identifier[compute_one_decoding_video_metrics] (* identifier[args] )
identifier[psnr_all_decodes] . identifier[append] ( identifier[psnr_single] )
identifier[ssim_all_decodes] . identifier[append] ( identifier[ssim_single] )
identifier[psnr_all_decodes] = identifier[np] . identifier[array] ( identifier[psnr_all_decodes] )
identifier[ssim_all_decodes] = identifier[np] . identifier[array] ( identifier[ssim_all_decodes] )
identifier[all_results] . identifier[update] ({ literal[string] : identifier[psnr_all_decodes] , literal[string] : identifier[ssim_all_decodes] })
keyword[return] identifier[compute_all_metrics_statistics] ( identifier[all_results] ) | def compute_video_metrics_from_predictions(predictions, decode_hparams):
"""Computes metrics from predictions.
Args:
predictions: list of list of dicts.
outer length: num_decodes, inner_length: num_samples
decode_hparams: Decode hparams. instance of HParams.
Returns:
statistics: dict of Tensors, key being the metric with each Tensor
having the shape (num_samples, num_frames).
"""
all_results = {}
(ssim_all_decodes, psnr_all_decodes) = ([], [])
for single_decode in predictions:
args = get_zipped_dataset_from_predictions(single_decode)
(psnr_single, ssim_single) = compute_one_decoding_video_metrics(*args)
psnr_all_decodes.append(psnr_single)
ssim_all_decodes.append(ssim_single) # depends on [control=['for'], data=['single_decode']]
psnr_all_decodes = np.array(psnr_all_decodes)
ssim_all_decodes = np.array(ssim_all_decodes)
all_results.update({'PSNR': psnr_all_decodes, 'SSIM': ssim_all_decodes})
return compute_all_metrics_statistics(all_results) |
def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path) | def function[rmtree, parameter[self, path]]:
constant[
Removes directory structure, similar to shutil.rmtree.
]
for taget[tuple[[<ast.Name object at 0x7da18bcc9510>, <ast.Name object at 0x7da18bcc9fc0>, <ast.Name object at 0x7da18bcc9f90>]]] in starred[call[name[self].walk, parameter[name[path]]]] begin[:]
for taget[name[item]] in starred[name[streams]] begin[:]
call[name[self].free_fat_chain, parameter[name[item].sector_id, compare[name[item].byte_size less[<] name[self].min_stream_max_size]]]
call[name[self].free_dir_entry, parameter[name[item]]]
for taget[name[item]] in starred[name[storage]] begin[:]
call[name[self].free_dir_entry, parameter[name[item]]]
name[root].child_id assign[=] constant[None]
call[name[self].remove, parameter[name[path]]] | keyword[def] identifier[rmtree] ( identifier[self] , identifier[path] ):
literal[string]
keyword[for] identifier[root] , identifier[storage] , identifier[streams] keyword[in] identifier[self] . identifier[walk] ( identifier[path] , identifier[topdown] = keyword[False] ):
keyword[for] identifier[item] keyword[in] identifier[streams] :
identifier[self] . identifier[free_fat_chain] ( identifier[item] . identifier[sector_id] , identifier[item] . identifier[byte_size] < identifier[self] . identifier[min_stream_max_size] )
identifier[self] . identifier[free_dir_entry] ( identifier[item] )
keyword[for] identifier[item] keyword[in] identifier[storage] :
identifier[self] . identifier[free_dir_entry] ( identifier[item] )
identifier[root] . identifier[child_id] = keyword[None]
identifier[self] . identifier[remove] ( identifier[path] ) | def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for (root, storage, streams) in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item) # depends on [control=['for'], data=['item']]
for item in storage:
self.free_dir_entry(item) # depends on [control=['for'], data=['item']]
root.child_id = None # depends on [control=['for'], data=[]]
# remove root item
self.remove(path) |
def derive_child_context(self, whence):
"""Derives a scalar context as a child of the current context."""
return _HandlerContext(
container=self.container,
queue=self.queue,
field_name=None,
annotations=None,
depth=self.depth,
whence=whence,
value=bytearray(), # children start without a value
ion_type=None,
pending_symbol=None
) | def function[derive_child_context, parameter[self, whence]]:
constant[Derives a scalar context as a child of the current context.]
return[call[name[_HandlerContext], parameter[]]] | keyword[def] identifier[derive_child_context] ( identifier[self] , identifier[whence] ):
literal[string]
keyword[return] identifier[_HandlerContext] (
identifier[container] = identifier[self] . identifier[container] ,
identifier[queue] = identifier[self] . identifier[queue] ,
identifier[field_name] = keyword[None] ,
identifier[annotations] = keyword[None] ,
identifier[depth] = identifier[self] . identifier[depth] ,
identifier[whence] = identifier[whence] ,
identifier[value] = identifier[bytearray] (),
identifier[ion_type] = keyword[None] ,
identifier[pending_symbol] = keyword[None]
) | def derive_child_context(self, whence):
"""Derives a scalar context as a child of the current context.""" # children start without a value
return _HandlerContext(container=self.container, queue=self.queue, field_name=None, annotations=None, depth=self.depth, whence=whence, value=bytearray(), ion_type=None, pending_symbol=None) |
def information_title_header_element(feature, parent):
"""Retrieve information title header string from definitions."""
_ = feature, parent # NOQA
header = information_title_header['string_format']
return header.capitalize() | def function[information_title_header_element, parameter[feature, parent]]:
constant[Retrieve information title header string from definitions.]
variable[_] assign[=] tuple[[<ast.Name object at 0x7da1b0c45840>, <ast.Name object at 0x7da1b0c45870>]]
variable[header] assign[=] call[name[information_title_header]][constant[string_format]]
return[call[name[header].capitalize, parameter[]]] | keyword[def] identifier[information_title_header_element] ( identifier[feature] , identifier[parent] ):
literal[string]
identifier[_] = identifier[feature] , identifier[parent]
identifier[header] = identifier[information_title_header] [ literal[string] ]
keyword[return] identifier[header] . identifier[capitalize] () | def information_title_header_element(feature, parent):
"""Retrieve information title header string from definitions."""
_ = (feature, parent) # NOQA
header = information_title_header['string_format']
return header.capitalize() |
def get_xml(vm_, **kwargs):
'''
Returns the XML for a given vm
:param vm_: domain name
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param password: password to connect with, overriding defaults
.. versionadded:: 2019.2.0
CLI Example:
.. code-block:: bash
salt '*' virt.get_xml <domain>
'''
conn = __get_conn(**kwargs)
xml_desc = _get_domain(conn, vm_).XMLDesc(0)
conn.close()
return xml_desc | def function[get_xml, parameter[vm_]]:
constant[
Returns the XML for a given vm
:param vm_: domain name
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param password: password to connect with, overriding defaults
.. versionadded:: 2019.2.0
CLI Example:
.. code-block:: bash
salt '*' virt.get_xml <domain>
]
variable[conn] assign[=] call[name[__get_conn], parameter[]]
variable[xml_desc] assign[=] call[call[name[_get_domain], parameter[name[conn], name[vm_]]].XMLDesc, parameter[constant[0]]]
call[name[conn].close, parameter[]]
return[name[xml_desc]] | keyword[def] identifier[get_xml] ( identifier[vm_] ,** identifier[kwargs] ):
literal[string]
identifier[conn] = identifier[__get_conn] (** identifier[kwargs] )
identifier[xml_desc] = identifier[_get_domain] ( identifier[conn] , identifier[vm_] ). identifier[XMLDesc] ( literal[int] )
identifier[conn] . identifier[close] ()
keyword[return] identifier[xml_desc] | def get_xml(vm_, **kwargs):
"""
Returns the XML for a given vm
:param vm_: domain name
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param password: password to connect with, overriding defaults
.. versionadded:: 2019.2.0
CLI Example:
.. code-block:: bash
salt '*' virt.get_xml <domain>
"""
conn = __get_conn(**kwargs)
xml_desc = _get_domain(conn, vm_).XMLDesc(0)
conn.close()
return xml_desc |
def set(self, shape):
""" Fill this rectangle with the dimensions of the given shape. """
self.bottom, self.left = shape.bottom, shape.left
self.width, self.height = shape.width, shape.height
return self | def function[set, parameter[self, shape]]:
constant[ Fill this rectangle with the dimensions of the given shape. ]
<ast.Tuple object at 0x7da1b1367670> assign[=] tuple[[<ast.Attribute object at 0x7da1b1367580>, <ast.Attribute object at 0x7da1b1364f40>]]
<ast.Tuple object at 0x7da1b1367b50> assign[=] tuple[[<ast.Attribute object at 0x7da1b1365f90>, <ast.Attribute object at 0x7da1b1366260>]]
return[name[self]] | keyword[def] identifier[set] ( identifier[self] , identifier[shape] ):
literal[string]
identifier[self] . identifier[bottom] , identifier[self] . identifier[left] = identifier[shape] . identifier[bottom] , identifier[shape] . identifier[left]
identifier[self] . identifier[width] , identifier[self] . identifier[height] = identifier[shape] . identifier[width] , identifier[shape] . identifier[height]
keyword[return] identifier[self] | def set(self, shape):
""" Fill this rectangle with the dimensions of the given shape. """
(self.bottom, self.left) = (shape.bottom, shape.left)
(self.width, self.height) = (shape.width, shape.height)
return self |
def parse_cmdln_opts(parser, cmdln_args):
"""Rather than have this all clutter main(), let's split this out.
Clean arch decision: rather than parsing sys.argv directly, pass
sys.argv[1:] to this function (or any iterable for testing.)
"""
parser.set_defaults(
hosts=[],
cert=None,
log_level=logging.INFO,
output_dir=None,
output_file=None,
formats=[],
includes=[],
excludes=[],
nsscmd=None,
tokenfile=None,
noncefile=None,
cachedir=None,
)
parser.add_option(
"-H", "--host", dest="hosts", action="append", help="format[:format]:hostname[:port]")
parser.add_option("-c", "--server-cert", dest="cert")
parser.add_option("-t", "--token-file", dest="tokenfile",
help="file where token is stored")
parser.add_option("-n", "--nonce-file", dest="noncefile",
help="file where nonce is stored")
parser.add_option("-d", "--output-dir", dest="output_dir",
help="output directory; if not set then files are "
"replaced with signed copies")
parser.add_option("-o", "--output-file", dest="output_file",
help="output file; if not set then files are replaced with signed "
"copies. This can only be used when signing a single file")
parser.add_option("-f", "--formats", dest="formats", action="append",
help="signing formats (one or more of %s)" % ", ".join(ALLOWED_FORMATS))
parser.add_option("-q", "--quiet", dest="log_level", action="store_const",
const=logging.WARN)
parser.add_option(
"-v", "--verbose", dest="log_level", action="store_const",
const=logging.DEBUG)
parser.add_option("-i", "--include", dest="includes", action="append",
help="add to include patterns")
parser.add_option("-x", "--exclude", dest="excludes", action="append",
help="add to exclude patterns")
parser.add_option("--nsscmd", dest="nsscmd",
help="command to re-sign nss libraries, if required")
parser.add_option("--cachedir", dest="cachedir",
help="local cache directory")
# TODO: Concurrency?
# TODO: Different certs per server?
options, args = parser.parse_args(cmdln_args)
if not options.hosts:
parser.error("at least one host is required")
if not options.cert:
parser.error("certificate is required")
if not os.path.exists(options.cert):
parser.error("certificate not found")
if not options.tokenfile:
parser.error("token file is required")
if not options.noncefile:
parser.error("nonce file is required")
# Covert nsscmd to win32 path if required
if sys.platform == 'win32' and options.nsscmd:
nsscmd = options.nsscmd.strip()
if nsscmd.startswith("/"):
drive = nsscmd[1]
options.nsscmd = "%s:%s" % (drive, nsscmd[2:])
# Handle format
formats = []
for fmt in options.formats:
if "," in fmt:
for fmt in fmt.split(","):
if fmt not in ALLOWED_FORMATS:
parser.error("invalid format: %s" % fmt)
formats.append(fmt)
elif fmt not in ALLOWED_FORMATS:
parser.error("invalid format: %s" % fmt)
else:
formats.append(fmt)
# bug 1382882, 1164456
# Widevine and GPG signing must happen last because they will be invalid if
# done prior to any format that modifies the file in-place.
for fmt in ("widevine", "widevine_blessed", "gpg"):
if fmt in formats:
formats.remove(fmt)
formats.append(fmt)
if options.output_file and (len(args) > 1 or os.path.isdir(args[0])):
parser.error(
"-o / --output-file can only be used when signing a single file")
if options.output_dir:
if os.path.exists(options.output_dir):
if not os.path.isdir(options.output_dir):
parser.error(
"output_dir (%s) must be a directory", options.output_dir)
else:
os.makedirs(options.output_dir)
if not options.includes:
# Do everything!
options.includes.append("*")
if not formats:
parser.error("no formats specified")
options.formats = formats
format_urls = defaultdict(list)
for h in options.hosts:
# The last two parts of a host is the actual hostname:port. Any parts
# before that are formats - there could be 0..n formats so this is
# tricky to split.
parts = h.split(":")
h = parts[-2:]
fmts = parts[:-2]
# If no formats are specified, the host is assumed to support all of them.
if not fmts:
fmts = formats
for f in fmts:
format_urls[f].append("https://%s" % ":".join(h))
options.format_urls = format_urls
missing_fmt_hosts = set(formats) - set(format_urls.keys())
if missing_fmt_hosts:
parser.error("no hosts capable of signing formats: %s" % " ".join(missing_fmt_hosts))
return options, args | def function[parse_cmdln_opts, parameter[parser, cmdln_args]]:
constant[Rather than have this all clutter main(), let's split this out.
Clean arch decision: rather than parsing sys.argv directly, pass
sys.argv[1:] to this function (or any iterable for testing.)
]
call[name[parser].set_defaults, parameter[]]
call[name[parser].add_option, parameter[constant[-H], constant[--host]]]
call[name[parser].add_option, parameter[constant[-c], constant[--server-cert]]]
call[name[parser].add_option, parameter[constant[-t], constant[--token-file]]]
call[name[parser].add_option, parameter[constant[-n], constant[--nonce-file]]]
call[name[parser].add_option, parameter[constant[-d], constant[--output-dir]]]
call[name[parser].add_option, parameter[constant[-o], constant[--output-file]]]
call[name[parser].add_option, parameter[constant[-f], constant[--formats]]]
call[name[parser].add_option, parameter[constant[-q], constant[--quiet]]]
call[name[parser].add_option, parameter[constant[-v], constant[--verbose]]]
call[name[parser].add_option, parameter[constant[-i], constant[--include]]]
call[name[parser].add_option, parameter[constant[-x], constant[--exclude]]]
call[name[parser].add_option, parameter[constant[--nsscmd]]]
call[name[parser].add_option, parameter[constant[--cachedir]]]
<ast.Tuple object at 0x7da1b0a36e90> assign[=] call[name[parser].parse_args, parameter[name[cmdln_args]]]
if <ast.UnaryOp object at 0x7da1b0a37220> begin[:]
call[name[parser].error, parameter[constant[at least one host is required]]]
if <ast.UnaryOp object at 0x7da1b0a37340> begin[:]
call[name[parser].error, parameter[constant[certificate is required]]]
if <ast.UnaryOp object at 0x7da1b0a37d30> begin[:]
call[name[parser].error, parameter[constant[certificate not found]]]
if <ast.UnaryOp object at 0x7da1b0a37970> begin[:]
call[name[parser].error, parameter[constant[token file is required]]]
if <ast.UnaryOp object at 0x7da1b0a35480> begin[:]
call[name[parser].error, parameter[constant[nonce file is required]]]
if <ast.BoolOp object at 0x7da1b0a35450> begin[:]
variable[nsscmd] assign[=] call[name[options].nsscmd.strip, parameter[]]
if call[name[nsscmd].startswith, parameter[constant[/]]] begin[:]
variable[drive] assign[=] call[name[nsscmd]][constant[1]]
name[options].nsscmd assign[=] binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0a34580>, <ast.Subscript object at 0x7da1b0a365c0>]]]
variable[formats] assign[=] list[[]]
for taget[name[fmt]] in starred[name[options].formats] begin[:]
if compare[constant[,] in name[fmt]] begin[:]
for taget[name[fmt]] in starred[call[name[fmt].split, parameter[constant[,]]]] begin[:]
if compare[name[fmt] <ast.NotIn object at 0x7da2590d7190> name[ALLOWED_FORMATS]] begin[:]
call[name[parser].error, parameter[binary_operation[constant[invalid format: %s] <ast.Mod object at 0x7da2590d6920> name[fmt]]]]
call[name[formats].append, parameter[name[fmt]]]
for taget[name[fmt]] in starred[tuple[[<ast.Constant object at 0x7da1b0a36a70>, <ast.Constant object at 0x7da1b0a35720>, <ast.Constant object at 0x7da1b0a35150>]]] begin[:]
if compare[name[fmt] in name[formats]] begin[:]
call[name[formats].remove, parameter[name[fmt]]]
call[name[formats].append, parameter[name[fmt]]]
if <ast.BoolOp object at 0x7da1b0bd85e0> begin[:]
call[name[parser].error, parameter[constant[-o / --output-file can only be used when signing a single file]]]
if name[options].output_dir begin[:]
if call[name[os].path.exists, parameter[name[options].output_dir]] begin[:]
if <ast.UnaryOp object at 0x7da1b0bda980> begin[:]
call[name[parser].error, parameter[constant[output_dir (%s) must be a directory], name[options].output_dir]]
if <ast.UnaryOp object at 0x7da1b0bdb640> begin[:]
call[name[options].includes.append, parameter[constant[*]]]
if <ast.UnaryOp object at 0x7da1b0bd9f30> begin[:]
call[name[parser].error, parameter[constant[no formats specified]]]
name[options].formats assign[=] name[formats]
variable[format_urls] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[h]] in starred[name[options].hosts] begin[:]
variable[parts] assign[=] call[name[h].split, parameter[constant[:]]]
variable[h] assign[=] call[name[parts]][<ast.Slice object at 0x7da1b0bd8cd0>]
variable[fmts] assign[=] call[name[parts]][<ast.Slice object at 0x7da1b0bda740>]
if <ast.UnaryOp object at 0x7da1b0bdb910> begin[:]
variable[fmts] assign[=] name[formats]
for taget[name[f]] in starred[name[fmts]] begin[:]
call[call[name[format_urls]][name[f]].append, parameter[binary_operation[constant[https://%s] <ast.Mod object at 0x7da2590d6920> call[constant[:].join, parameter[name[h]]]]]]
name[options].format_urls assign[=] name[format_urls]
variable[missing_fmt_hosts] assign[=] binary_operation[call[name[set], parameter[name[formats]]] - call[name[set], parameter[call[name[format_urls].keys, parameter[]]]]]
if name[missing_fmt_hosts] begin[:]
call[name[parser].error, parameter[binary_operation[constant[no hosts capable of signing formats: %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[missing_fmt_hosts]]]]]]
return[tuple[[<ast.Name object at 0x7da1b0bda1d0>, <ast.Name object at 0x7da1b0bda3b0>]]] | keyword[def] identifier[parse_cmdln_opts] ( identifier[parser] , identifier[cmdln_args] ):
literal[string]
identifier[parser] . identifier[set_defaults] (
identifier[hosts] =[],
identifier[cert] = keyword[None] ,
identifier[log_level] = identifier[logging] . identifier[INFO] ,
identifier[output_dir] = keyword[None] ,
identifier[output_file] = keyword[None] ,
identifier[formats] =[],
identifier[includes] =[],
identifier[excludes] =[],
identifier[nsscmd] = keyword[None] ,
identifier[tokenfile] = keyword[None] ,
identifier[noncefile] = keyword[None] ,
identifier[cachedir] = keyword[None] ,
)
identifier[parser] . identifier[add_option] (
literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[dest] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[dest] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[dest] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[dest] = literal[string] ,
identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[dest] = literal[string] ,
identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] % literal[string] . identifier[join] ( identifier[ALLOWED_FORMATS] ))
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] ,
identifier[const] = identifier[logging] . identifier[WARN] )
identifier[parser] . identifier[add_option] (
literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] ,
identifier[const] = identifier[logging] . identifier[DEBUG] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , identifier[dest] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_option] ( literal[string] , identifier[dest] = literal[string] ,
identifier[help] = literal[string] )
identifier[options] , identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[cmdln_args] )
keyword[if] keyword[not] identifier[options] . identifier[hosts] :
identifier[parser] . identifier[error] ( literal[string] )
keyword[if] keyword[not] identifier[options] . identifier[cert] :
identifier[parser] . identifier[error] ( literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[options] . identifier[cert] ):
identifier[parser] . identifier[error] ( literal[string] )
keyword[if] keyword[not] identifier[options] . identifier[tokenfile] :
identifier[parser] . identifier[error] ( literal[string] )
keyword[if] keyword[not] identifier[options] . identifier[noncefile] :
identifier[parser] . identifier[error] ( literal[string] )
keyword[if] identifier[sys] . identifier[platform] == literal[string] keyword[and] identifier[options] . identifier[nsscmd] :
identifier[nsscmd] = identifier[options] . identifier[nsscmd] . identifier[strip] ()
keyword[if] identifier[nsscmd] . identifier[startswith] ( literal[string] ):
identifier[drive] = identifier[nsscmd] [ literal[int] ]
identifier[options] . identifier[nsscmd] = literal[string] %( identifier[drive] , identifier[nsscmd] [ literal[int] :])
identifier[formats] =[]
keyword[for] identifier[fmt] keyword[in] identifier[options] . identifier[formats] :
keyword[if] literal[string] keyword[in] identifier[fmt] :
keyword[for] identifier[fmt] keyword[in] identifier[fmt] . identifier[split] ( literal[string] ):
keyword[if] identifier[fmt] keyword[not] keyword[in] identifier[ALLOWED_FORMATS] :
identifier[parser] . identifier[error] ( literal[string] % identifier[fmt] )
identifier[formats] . identifier[append] ( identifier[fmt] )
keyword[elif] identifier[fmt] keyword[not] keyword[in] identifier[ALLOWED_FORMATS] :
identifier[parser] . identifier[error] ( literal[string] % identifier[fmt] )
keyword[else] :
identifier[formats] . identifier[append] ( identifier[fmt] )
keyword[for] identifier[fmt] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[if] identifier[fmt] keyword[in] identifier[formats] :
identifier[formats] . identifier[remove] ( identifier[fmt] )
identifier[formats] . identifier[append] ( identifier[fmt] )
keyword[if] identifier[options] . identifier[output_file] keyword[and] ( identifier[len] ( identifier[args] )> literal[int] keyword[or] identifier[os] . identifier[path] . identifier[isdir] ( identifier[args] [ literal[int] ])):
identifier[parser] . identifier[error] (
literal[string] )
keyword[if] identifier[options] . identifier[output_dir] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[options] . identifier[output_dir] ):
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[options] . identifier[output_dir] ):
identifier[parser] . identifier[error] (
literal[string] , identifier[options] . identifier[output_dir] )
keyword[else] :
identifier[os] . identifier[makedirs] ( identifier[options] . identifier[output_dir] )
keyword[if] keyword[not] identifier[options] . identifier[includes] :
identifier[options] . identifier[includes] . identifier[append] ( literal[string] )
keyword[if] keyword[not] identifier[formats] :
identifier[parser] . identifier[error] ( literal[string] )
identifier[options] . identifier[formats] = identifier[formats]
identifier[format_urls] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[h] keyword[in] identifier[options] . identifier[hosts] :
identifier[parts] = identifier[h] . identifier[split] ( literal[string] )
identifier[h] = identifier[parts] [- literal[int] :]
identifier[fmts] = identifier[parts] [:- literal[int] ]
keyword[if] keyword[not] identifier[fmts] :
identifier[fmts] = identifier[formats]
keyword[for] identifier[f] keyword[in] identifier[fmts] :
identifier[format_urls] [ identifier[f] ]. identifier[append] ( literal[string] % literal[string] . identifier[join] ( identifier[h] ))
identifier[options] . identifier[format_urls] = identifier[format_urls]
identifier[missing_fmt_hosts] = identifier[set] ( identifier[formats] )- identifier[set] ( identifier[format_urls] . identifier[keys] ())
keyword[if] identifier[missing_fmt_hosts] :
identifier[parser] . identifier[error] ( literal[string] % literal[string] . identifier[join] ( identifier[missing_fmt_hosts] ))
keyword[return] identifier[options] , identifier[args] | def parse_cmdln_opts(parser, cmdln_args):
"""Rather than have this all clutter main(), let's split this out.
Clean arch decision: rather than parsing sys.argv directly, pass
sys.argv[1:] to this function (or any iterable for testing.)
"""
parser.set_defaults(hosts=[], cert=None, log_level=logging.INFO, output_dir=None, output_file=None, formats=[], includes=[], excludes=[], nsscmd=None, tokenfile=None, noncefile=None, cachedir=None)
parser.add_option('-H', '--host', dest='hosts', action='append', help='format[:format]:hostname[:port]')
parser.add_option('-c', '--server-cert', dest='cert')
parser.add_option('-t', '--token-file', dest='tokenfile', help='file where token is stored')
parser.add_option('-n', '--nonce-file', dest='noncefile', help='file where nonce is stored')
parser.add_option('-d', '--output-dir', dest='output_dir', help='output directory; if not set then files are replaced with signed copies')
parser.add_option('-o', '--output-file', dest='output_file', help='output file; if not set then files are replaced with signed copies. This can only be used when signing a single file')
parser.add_option('-f', '--formats', dest='formats', action='append', help='signing formats (one or more of %s)' % ', '.join(ALLOWED_FORMATS))
parser.add_option('-q', '--quiet', dest='log_level', action='store_const', const=logging.WARN)
parser.add_option('-v', '--verbose', dest='log_level', action='store_const', const=logging.DEBUG)
parser.add_option('-i', '--include', dest='includes', action='append', help='add to include patterns')
parser.add_option('-x', '--exclude', dest='excludes', action='append', help='add to exclude patterns')
parser.add_option('--nsscmd', dest='nsscmd', help='command to re-sign nss libraries, if required')
parser.add_option('--cachedir', dest='cachedir', help='local cache directory')
# TODO: Concurrency?
# TODO: Different certs per server?
(options, args) = parser.parse_args(cmdln_args)
if not options.hosts:
parser.error('at least one host is required') # depends on [control=['if'], data=[]]
if not options.cert:
parser.error('certificate is required') # depends on [control=['if'], data=[]]
if not os.path.exists(options.cert):
parser.error('certificate not found') # depends on [control=['if'], data=[]]
if not options.tokenfile:
parser.error('token file is required') # depends on [control=['if'], data=[]]
if not options.noncefile:
parser.error('nonce file is required') # depends on [control=['if'], data=[]]
# Covert nsscmd to win32 path if required
if sys.platform == 'win32' and options.nsscmd:
nsscmd = options.nsscmd.strip()
if nsscmd.startswith('/'):
drive = nsscmd[1]
options.nsscmd = '%s:%s' % (drive, nsscmd[2:]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Handle format
formats = []
for fmt in options.formats:
if ',' in fmt:
for fmt in fmt.split(','):
if fmt not in ALLOWED_FORMATS:
parser.error('invalid format: %s' % fmt) # depends on [control=['if'], data=['fmt']]
formats.append(fmt) # depends on [control=['for'], data=['fmt']] # depends on [control=['if'], data=['fmt']]
elif fmt not in ALLOWED_FORMATS:
parser.error('invalid format: %s' % fmt) # depends on [control=['if'], data=['fmt']]
else:
formats.append(fmt) # depends on [control=['for'], data=['fmt']]
# bug 1382882, 1164456
# Widevine and GPG signing must happen last because they will be invalid if
# done prior to any format that modifies the file in-place.
for fmt in ('widevine', 'widevine_blessed', 'gpg'):
if fmt in formats:
formats.remove(fmt)
formats.append(fmt) # depends on [control=['if'], data=['fmt', 'formats']] # depends on [control=['for'], data=['fmt']]
if options.output_file and (len(args) > 1 or os.path.isdir(args[0])):
parser.error('-o / --output-file can only be used when signing a single file') # depends on [control=['if'], data=[]]
if options.output_dir:
if os.path.exists(options.output_dir):
if not os.path.isdir(options.output_dir):
parser.error('output_dir (%s) must be a directory', options.output_dir) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
os.makedirs(options.output_dir) # depends on [control=['if'], data=[]]
if not options.includes:
# Do everything!
options.includes.append('*') # depends on [control=['if'], data=[]]
if not formats:
parser.error('no formats specified') # depends on [control=['if'], data=[]]
options.formats = formats
format_urls = defaultdict(list)
for h in options.hosts:
# The last two parts of a host is the actual hostname:port. Any parts
# before that are formats - there could be 0..n formats so this is
# tricky to split.
parts = h.split(':')
h = parts[-2:]
fmts = parts[:-2]
# If no formats are specified, the host is assumed to support all of them.
if not fmts:
fmts = formats # depends on [control=['if'], data=[]]
for f in fmts:
format_urls[f].append('https://%s' % ':'.join(h)) # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=['h']]
options.format_urls = format_urls
missing_fmt_hosts = set(formats) - set(format_urls.keys())
if missing_fmt_hosts:
parser.error('no hosts capable of signing formats: %s' % ' '.join(missing_fmt_hosts)) # depends on [control=['if'], data=[]]
return (options, args) |
def is_simplified(s):
"""Check if a string's Chinese characters are Simplified.
This is equivalent to:
>>> identify('foo') in (SIMPLIFIED, BOTH)
"""
chinese = _get_hanzi(s)
if not chinese:
return False
elif chinese.issubset(_SHARED_CHARACTERS):
return True
elif chinese.issubset(_SIMPLIFIED_CHARACTERS):
return True
return False | def function[is_simplified, parameter[s]]:
constant[Check if a string's Chinese characters are Simplified.
This is equivalent to:
>>> identify('foo') in (SIMPLIFIED, BOTH)
]
variable[chinese] assign[=] call[name[_get_hanzi], parameter[name[s]]]
if <ast.UnaryOp object at 0x7da1b0bb9570> begin[:]
return[constant[False]]
return[constant[False]] | keyword[def] identifier[is_simplified] ( identifier[s] ):
literal[string]
identifier[chinese] = identifier[_get_hanzi] ( identifier[s] )
keyword[if] keyword[not] identifier[chinese] :
keyword[return] keyword[False]
keyword[elif] identifier[chinese] . identifier[issubset] ( identifier[_SHARED_CHARACTERS] ):
keyword[return] keyword[True]
keyword[elif] identifier[chinese] . identifier[issubset] ( identifier[_SIMPLIFIED_CHARACTERS] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_simplified(s):
"""Check if a string's Chinese characters are Simplified.
This is equivalent to:
>>> identify('foo') in (SIMPLIFIED, BOTH)
"""
chinese = _get_hanzi(s)
if not chinese:
return False # depends on [control=['if'], data=[]]
elif chinese.issubset(_SHARED_CHARACTERS):
return True # depends on [control=['if'], data=[]]
elif chinese.issubset(_SIMPLIFIED_CHARACTERS):
return True # depends on [control=['if'], data=[]]
return False |
def checkArgs(args):
"""Checks the arguments and options.
:param args: a an object containing the options of the program.
:type args: argparse.Namespace
:returns: ``True`` if everything was OK.
If there is a problem with an option, an exception is raised using the
:py:class:`ProgramError` class, a message is printed to the
:class:`sys.stderr` and the program exists with code 1.
"""
if not os.path.isfile(args.ibs_related):
msg = "{}: no such file".format(args.ibs_related)
raise ProgramError(msg)
return True | def function[checkArgs, parameter[args]]:
constant[Checks the arguments and options.
:param args: a an object containing the options of the program.
:type args: argparse.Namespace
:returns: ``True`` if everything was OK.
If there is a problem with an option, an exception is raised using the
:py:class:`ProgramError` class, a message is printed to the
:class:`sys.stderr` and the program exists with code 1.
]
if <ast.UnaryOp object at 0x7da1b0a7a9b0> begin[:]
variable[msg] assign[=] call[constant[{}: no such file].format, parameter[name[args].ibs_related]]
<ast.Raise object at 0x7da1b0a78190>
return[constant[True]] | keyword[def] identifier[checkArgs] ( identifier[args] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[args] . identifier[ibs_related] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[args] . identifier[ibs_related] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[return] keyword[True] | def checkArgs(args):
"""Checks the arguments and options.
:param args: a an object containing the options of the program.
:type args: argparse.Namespace
:returns: ``True`` if everything was OK.
If there is a problem with an option, an exception is raised using the
:py:class:`ProgramError` class, a message is printed to the
:class:`sys.stderr` and the program exists with code 1.
"""
if not os.path.isfile(args.ibs_related):
msg = '{}: no such file'.format(args.ibs_related)
raise ProgramError(msg) # depends on [control=['if'], data=[]]
return True |
def deprecatedmethod(classname='', info=''):
"""
Defines a particular method as being deprecated - the
method will exist for backwards compatibility, but will
contain information as to how update code to become
compatible with the current system.
Code that is deprecated will only be supported through the
end of a minor release cycle and will be cleaned during a
major release upgrade.
:usage |from projex.decorators import deprecated
|
|class A(object):
| @deprecatedmethod('A', 'Use A.printout instead')
| def format( self ):
| print 'test'
|
| def printout( self ):
: print 'new test'
"""
def decorated(func):
@wraps(func)
def wrapped(*args, **kwds):
frame = last_frame = None
try:
frame = inspect.currentframe()
last_frame = frame.f_back
fname = last_frame.f_code.co_filename
func_file = func.func_code.co_filename
opts = {
'func': func.__name__,
'line': last_frame.f_lineno,
'file': fname,
'class': classname,
'info': info,
'package': projex.packageFromPath(func_file)
}
msg = 'Deprecated method called from %(file)s, line %(line)d.' \
'\n %(package)s.%(class)s.%(func)s is deprecated.' \
' %(info)s' % opts
logger.warning(errors.DeprecatedMethodWarning(msg))
finally:
del frame
del last_frame
return func(*args, **kwds)
wrapped.__name__ = func.__name__
wrapped.__doc__ = ':warning This method is deprecated! %s\n\n' % info
if func.__doc__:
wrapped.__doc__ += func.__doc__
wrapped.__dict__.update(func.__dict__)
wrapped.__dict__['func_type'] = 'deprecated method'
return wrapped
return decorated | def function[deprecatedmethod, parameter[classname, info]]:
constant[
Defines a particular method as being deprecated - the
method will exist for backwards compatibility, but will
contain information as to how update code to become
compatible with the current system.
Code that is deprecated will only be supported through the
end of a minor release cycle and will be cleaned during a
major release upgrade.
:usage |from projex.decorators import deprecated
|
|class A(object):
| @deprecatedmethod('A', 'Use A.printout instead')
| def format( self ):
| print 'test'
|
| def printout( self ):
: print 'new test'
]
def function[decorated, parameter[func]]:
def function[wrapped, parameter[]]:
variable[frame] assign[=] constant[None]
<ast.Try object at 0x7da20cabcb20>
return[call[name[func], parameter[<ast.Starred object at 0x7da20cabec50>]]]
name[wrapped].__name__ assign[=] name[func].__name__
name[wrapped].__doc__ assign[=] binary_operation[constant[:warning This method is deprecated! %s
] <ast.Mod object at 0x7da2590d6920> name[info]]
if name[func].__doc__ begin[:]
<ast.AugAssign object at 0x7da20eb29a50>
call[name[wrapped].__dict__.update, parameter[name[func].__dict__]]
call[name[wrapped].__dict__][constant[func_type]] assign[=] constant[deprecated method]
return[name[wrapped]]
return[name[decorated]] | keyword[def] identifier[deprecatedmethod] ( identifier[classname] = literal[string] , identifier[info] = literal[string] ):
literal[string]
keyword[def] identifier[decorated] ( identifier[func] ):
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapped] (* identifier[args] ,** identifier[kwds] ):
identifier[frame] = identifier[last_frame] = keyword[None]
keyword[try] :
identifier[frame] = identifier[inspect] . identifier[currentframe] ()
identifier[last_frame] = identifier[frame] . identifier[f_back]
identifier[fname] = identifier[last_frame] . identifier[f_code] . identifier[co_filename]
identifier[func_file] = identifier[func] . identifier[func_code] . identifier[co_filename]
identifier[opts] ={
literal[string] : identifier[func] . identifier[__name__] ,
literal[string] : identifier[last_frame] . identifier[f_lineno] ,
literal[string] : identifier[fname] ,
literal[string] : identifier[classname] ,
literal[string] : identifier[info] ,
literal[string] : identifier[projex] . identifier[packageFromPath] ( identifier[func_file] )
}
identifier[msg] = literal[string] literal[string] literal[string] % identifier[opts]
identifier[logger] . identifier[warning] ( identifier[errors] . identifier[DeprecatedMethodWarning] ( identifier[msg] ))
keyword[finally] :
keyword[del] identifier[frame]
keyword[del] identifier[last_frame]
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwds] )
identifier[wrapped] . identifier[__name__] = identifier[func] . identifier[__name__]
identifier[wrapped] . identifier[__doc__] = literal[string] % identifier[info]
keyword[if] identifier[func] . identifier[__doc__] :
identifier[wrapped] . identifier[__doc__] += identifier[func] . identifier[__doc__]
identifier[wrapped] . identifier[__dict__] . identifier[update] ( identifier[func] . identifier[__dict__] )
identifier[wrapped] . identifier[__dict__] [ literal[string] ]= literal[string]
keyword[return] identifier[wrapped]
keyword[return] identifier[decorated] | def deprecatedmethod(classname='', info=''):
"""
Defines a particular method as being deprecated - the
method will exist for backwards compatibility, but will
contain information as to how update code to become
compatible with the current system.
Code that is deprecated will only be supported through the
end of a minor release cycle and will be cleaned during a
major release upgrade.
:usage |from projex.decorators import deprecated
|
|class A(object):
| @deprecatedmethod('A', 'Use A.printout instead')
| def format( self ):
| print 'test'
|
| def printout( self ):
: print 'new test'
"""
def decorated(func):
@wraps(func)
def wrapped(*args, **kwds):
frame = last_frame = None
try:
frame = inspect.currentframe()
last_frame = frame.f_back
fname = last_frame.f_code.co_filename
func_file = func.func_code.co_filename
opts = {'func': func.__name__, 'line': last_frame.f_lineno, 'file': fname, 'class': classname, 'info': info, 'package': projex.packageFromPath(func_file)}
msg = 'Deprecated method called from %(file)s, line %(line)d.\n %(package)s.%(class)s.%(func)s is deprecated. %(info)s' % opts
logger.warning(errors.DeprecatedMethodWarning(msg)) # depends on [control=['try'], data=[]]
finally:
del frame
del last_frame
return func(*args, **kwds)
wrapped.__name__ = func.__name__
wrapped.__doc__ = ':warning This method is deprecated! %s\n\n' % info
if func.__doc__:
wrapped.__doc__ += func.__doc__ # depends on [control=['if'], data=[]]
wrapped.__dict__.update(func.__dict__)
wrapped.__dict__['func_type'] = 'deprecated method'
return wrapped
return decorated |
def return_multiple_convert_numpy(self, start_id, end_id, converter, add_args=None):
"""
Converts several objects, with ids in the range (start_id, end_id)
into a 2d numpy array and returns the array, the conversion is done by the 'converter' function
Parameters
----------
start_id : the id of the first object to be converted
end_id : the id of the last object to be converted, if equal to -1, will convert all data points in range
(start_id, <id of last element in database>)
converter : function, which takes the path of a data point and *args as parameters and returns a numpy array
add_args : optional arguments for the converter (list/dictionary/tuple/whatever). if None, the
converter should take only one input argument - the file path. default value: None
Returns
-------
result : 2-dimensional ndarray
"""
if end_id == -1:
end_id = self.points_amt
return return_multiple_convert_numpy_base(self.dbpath, self.path_to_set, self._set_object, start_id, end_id,
converter, add_args) | def function[return_multiple_convert_numpy, parameter[self, start_id, end_id, converter, add_args]]:
constant[
Converts several objects, with ids in the range (start_id, end_id)
into a 2d numpy array and returns the array, the conversion is done by the 'converter' function
Parameters
----------
start_id : the id of the first object to be converted
end_id : the id of the last object to be converted, if equal to -1, will convert all data points in range
(start_id, <id of last element in database>)
converter : function, which takes the path of a data point and *args as parameters and returns a numpy array
add_args : optional arguments for the converter (list/dictionary/tuple/whatever). if None, the
converter should take only one input argument - the file path. default value: None
Returns
-------
result : 2-dimensional ndarray
]
if compare[name[end_id] equal[==] <ast.UnaryOp object at 0x7da18ede6380>] begin[:]
variable[end_id] assign[=] name[self].points_amt
return[call[name[return_multiple_convert_numpy_base], parameter[name[self].dbpath, name[self].path_to_set, name[self]._set_object, name[start_id], name[end_id], name[converter], name[add_args]]]] | keyword[def] identifier[return_multiple_convert_numpy] ( identifier[self] , identifier[start_id] , identifier[end_id] , identifier[converter] , identifier[add_args] = keyword[None] ):
literal[string]
keyword[if] identifier[end_id] ==- literal[int] :
identifier[end_id] = identifier[self] . identifier[points_amt]
keyword[return] identifier[return_multiple_convert_numpy_base] ( identifier[self] . identifier[dbpath] , identifier[self] . identifier[path_to_set] , identifier[self] . identifier[_set_object] , identifier[start_id] , identifier[end_id] ,
identifier[converter] , identifier[add_args] ) | def return_multiple_convert_numpy(self, start_id, end_id, converter, add_args=None):
"""
Converts several objects, with ids in the range (start_id, end_id)
into a 2d numpy array and returns the array, the conversion is done by the 'converter' function
Parameters
----------
start_id : the id of the first object to be converted
end_id : the id of the last object to be converted, if equal to -1, will convert all data points in range
(start_id, <id of last element in database>)
converter : function, which takes the path of a data point and *args as parameters and returns a numpy array
add_args : optional arguments for the converter (list/dictionary/tuple/whatever). if None, the
converter should take only one input argument - the file path. default value: None
Returns
-------
result : 2-dimensional ndarray
"""
if end_id == -1:
end_id = self.points_amt # depends on [control=['if'], data=['end_id']]
return return_multiple_convert_numpy_base(self.dbpath, self.path_to_set, self._set_object, start_id, end_id, converter, add_args) |
def make_fig(self):
"""
Figure constructor, called before `self.plot()`
"""
self.fig = plt.figure(figsize=(8, 4))
self._all_figures.append(self.fig) | def function[make_fig, parameter[self]]:
constant[
Figure constructor, called before `self.plot()`
]
name[self].fig assign[=] call[name[plt].figure, parameter[]]
call[name[self]._all_figures.append, parameter[name[self].fig]] | keyword[def] identifier[make_fig] ( identifier[self] ):
literal[string]
identifier[self] . identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] ))
identifier[self] . identifier[_all_figures] . identifier[append] ( identifier[self] . identifier[fig] ) | def make_fig(self):
"""
Figure constructor, called before `self.plot()`
"""
self.fig = plt.figure(figsize=(8, 4))
self._all_figures.append(self.fig) |
def rapid_to_gssha(self):
"""
Prepare RAPID data for simulation
"""
# if no streamflow given, download forecast
if self.path_to_rapid_qout is None and self.connection_list_file:
rapid_qout_directory = os.path.join(self.gssha_directory, 'rapid_streamflow')
try:
os.mkdir(rapid_qout_directory)
except OSError:
pass
self.path_to_rapid_qout = self.download_spt_forecast(rapid_qout_directory)
# prepare input for GSSHA if user wants
if self.path_to_rapid_qout is not None and self.connection_list_file:
self.event_manager.prepare_rapid_streamflow(self.path_to_rapid_qout,
self.connection_list_file)
self.simulation_modified_input_cards.append('CHAN_POINT_INPUT') | def function[rapid_to_gssha, parameter[self]]:
constant[
Prepare RAPID data for simulation
]
if <ast.BoolOp object at 0x7da18bc70dc0> begin[:]
variable[rapid_qout_directory] assign[=] call[name[os].path.join, parameter[name[self].gssha_directory, constant[rapid_streamflow]]]
<ast.Try object at 0x7da18bc71c30>
name[self].path_to_rapid_qout assign[=] call[name[self].download_spt_forecast, parameter[name[rapid_qout_directory]]]
if <ast.BoolOp object at 0x7da2044c11b0> begin[:]
call[name[self].event_manager.prepare_rapid_streamflow, parameter[name[self].path_to_rapid_qout, name[self].connection_list_file]]
call[name[self].simulation_modified_input_cards.append, parameter[constant[CHAN_POINT_INPUT]]] | keyword[def] identifier[rapid_to_gssha] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[path_to_rapid_qout] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[connection_list_file] :
identifier[rapid_qout_directory] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[gssha_directory] , literal[string] )
keyword[try] :
identifier[os] . identifier[mkdir] ( identifier[rapid_qout_directory] )
keyword[except] identifier[OSError] :
keyword[pass]
identifier[self] . identifier[path_to_rapid_qout] = identifier[self] . identifier[download_spt_forecast] ( identifier[rapid_qout_directory] )
keyword[if] identifier[self] . identifier[path_to_rapid_qout] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[connection_list_file] :
identifier[self] . identifier[event_manager] . identifier[prepare_rapid_streamflow] ( identifier[self] . identifier[path_to_rapid_qout] ,
identifier[self] . identifier[connection_list_file] )
identifier[self] . identifier[simulation_modified_input_cards] . identifier[append] ( literal[string] ) | def rapid_to_gssha(self):
"""
Prepare RAPID data for simulation
"""
# if no streamflow given, download forecast
if self.path_to_rapid_qout is None and self.connection_list_file:
rapid_qout_directory = os.path.join(self.gssha_directory, 'rapid_streamflow')
try:
os.mkdir(rapid_qout_directory) # depends on [control=['try'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]]
self.path_to_rapid_qout = self.download_spt_forecast(rapid_qout_directory) # depends on [control=['if'], data=[]]
# prepare input for GSSHA if user wants
if self.path_to_rapid_qout is not None and self.connection_list_file:
self.event_manager.prepare_rapid_streamflow(self.path_to_rapid_qout, self.connection_list_file)
self.simulation_modified_input_cards.append('CHAN_POINT_INPUT') # depends on [control=['if'], data=[]] |
def get_lib(self, arch='x86', native=False):
"""
Get lib directories of Windows SDK.
"""
if self.sdk_version == 'v7.0A':
if arch == 'x86':
arch = ''
lib = os.path.join(self.sdk_dir, 'lib', arch)
if os.path.isdir(lib):
logging.info(_('using lib: %s'), lib)
return [lib]
logging.debug(_('lib not found: %s'), lib)
return []
if self.sdk_version == 'v8.1':
if native:
extra = os.path.join('winv6.3', 'km')
else:
extra = os.path.join('winv6.3', 'um')
lib = os.path.join(self.sdk_dir, 'lib', extra, arch)
if os.path.isdir(lib):
logging.info(_('using lib: %s'), lib)
return [lib]
logging.debug(_('lib not found: %s'), lib)
return []
if self.sdk_version == 'v10.0':
dirs = []
extra = os.path.join('lib', '10.0.10240.0')
for mode in ['um', 'ucrt']:
lib = os.path.join(self.sdk_dir, extra, mode, arch)
if os.path.isdir(lib):
logging.info(_('using lib: %s'), lib)
dirs.append(lib)
else:
logging.debug(_('lib not found: %s'), lib)
return dirs
message = 'unknown sdk version: {}'.format(self.sdk_version)
raise RuntimeError(message) | def function[get_lib, parameter[self, arch, native]]:
constant[
Get lib directories of Windows SDK.
]
if compare[name[self].sdk_version equal[==] constant[v7.0A]] begin[:]
if compare[name[arch] equal[==] constant[x86]] begin[:]
variable[arch] assign[=] constant[]
variable[lib] assign[=] call[name[os].path.join, parameter[name[self].sdk_dir, constant[lib], name[arch]]]
if call[name[os].path.isdir, parameter[name[lib]]] begin[:]
call[name[logging].info, parameter[call[name[_], parameter[constant[using lib: %s]]], name[lib]]]
return[list[[<ast.Name object at 0x7da2044c0e80>]]]
call[name[logging].debug, parameter[call[name[_], parameter[constant[lib not found: %s]]], name[lib]]]
return[list[[]]]
if compare[name[self].sdk_version equal[==] constant[v8.1]] begin[:]
if name[native] begin[:]
variable[extra] assign[=] call[name[os].path.join, parameter[constant[winv6.3], constant[km]]]
variable[lib] assign[=] call[name[os].path.join, parameter[name[self].sdk_dir, constant[lib], name[extra], name[arch]]]
if call[name[os].path.isdir, parameter[name[lib]]] begin[:]
call[name[logging].info, parameter[call[name[_], parameter[constant[using lib: %s]]], name[lib]]]
return[list[[<ast.Name object at 0x7da2044c05b0>]]]
call[name[logging].debug, parameter[call[name[_], parameter[constant[lib not found: %s]]], name[lib]]]
return[list[[]]]
if compare[name[self].sdk_version equal[==] constant[v10.0]] begin[:]
variable[dirs] assign[=] list[[]]
variable[extra] assign[=] call[name[os].path.join, parameter[constant[lib], constant[10.0.10240.0]]]
for taget[name[mode]] in starred[list[[<ast.Constant object at 0x7da2044c18a0>, <ast.Constant object at 0x7da2044c1390>]]] begin[:]
variable[lib] assign[=] call[name[os].path.join, parameter[name[self].sdk_dir, name[extra], name[mode], name[arch]]]
if call[name[os].path.isdir, parameter[name[lib]]] begin[:]
call[name[logging].info, parameter[call[name[_], parameter[constant[using lib: %s]]], name[lib]]]
call[name[dirs].append, parameter[name[lib]]]
return[name[dirs]]
variable[message] assign[=] call[constant[unknown sdk version: {}].format, parameter[name[self].sdk_version]]
<ast.Raise object at 0x7da2044c07f0> | keyword[def] identifier[get_lib] ( identifier[self] , identifier[arch] = literal[string] , identifier[native] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[sdk_version] == literal[string] :
keyword[if] identifier[arch] == literal[string] :
identifier[arch] = literal[string]
identifier[lib] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[sdk_dir] , literal[string] , identifier[arch] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[lib] ):
identifier[logging] . identifier[info] ( identifier[_] ( literal[string] ), identifier[lib] )
keyword[return] [ identifier[lib] ]
identifier[logging] . identifier[debug] ( identifier[_] ( literal[string] ), identifier[lib] )
keyword[return] []
keyword[if] identifier[self] . identifier[sdk_version] == literal[string] :
keyword[if] identifier[native] :
identifier[extra] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] )
keyword[else] :
identifier[extra] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] )
identifier[lib] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[sdk_dir] , literal[string] , identifier[extra] , identifier[arch] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[lib] ):
identifier[logging] . identifier[info] ( identifier[_] ( literal[string] ), identifier[lib] )
keyword[return] [ identifier[lib] ]
identifier[logging] . identifier[debug] ( identifier[_] ( literal[string] ), identifier[lib] )
keyword[return] []
keyword[if] identifier[self] . identifier[sdk_version] == literal[string] :
identifier[dirs] =[]
identifier[extra] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] )
keyword[for] identifier[mode] keyword[in] [ literal[string] , literal[string] ]:
identifier[lib] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[sdk_dir] , identifier[extra] , identifier[mode] , identifier[arch] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[lib] ):
identifier[logging] . identifier[info] ( identifier[_] ( literal[string] ), identifier[lib] )
identifier[dirs] . identifier[append] ( identifier[lib] )
keyword[else] :
identifier[logging] . identifier[debug] ( identifier[_] ( literal[string] ), identifier[lib] )
keyword[return] identifier[dirs]
identifier[message] = literal[string] . identifier[format] ( identifier[self] . identifier[sdk_version] )
keyword[raise] identifier[RuntimeError] ( identifier[message] ) | def get_lib(self, arch='x86', native=False):
"""
Get lib directories of Windows SDK.
"""
if self.sdk_version == 'v7.0A':
if arch == 'x86':
arch = '' # depends on [control=['if'], data=['arch']]
lib = os.path.join(self.sdk_dir, 'lib', arch)
if os.path.isdir(lib):
logging.info(_('using lib: %s'), lib)
return [lib] # depends on [control=['if'], data=[]]
logging.debug(_('lib not found: %s'), lib)
return [] # depends on [control=['if'], data=[]]
if self.sdk_version == 'v8.1':
if native:
extra = os.path.join('winv6.3', 'km') # depends on [control=['if'], data=[]]
else:
extra = os.path.join('winv6.3', 'um')
lib = os.path.join(self.sdk_dir, 'lib', extra, arch)
if os.path.isdir(lib):
logging.info(_('using lib: %s'), lib)
return [lib] # depends on [control=['if'], data=[]]
logging.debug(_('lib not found: %s'), lib)
return [] # depends on [control=['if'], data=[]]
if self.sdk_version == 'v10.0':
dirs = []
extra = os.path.join('lib', '10.0.10240.0')
for mode in ['um', 'ucrt']:
lib = os.path.join(self.sdk_dir, extra, mode, arch)
if os.path.isdir(lib):
logging.info(_('using lib: %s'), lib)
dirs.append(lib) # depends on [control=['if'], data=[]]
else:
logging.debug(_('lib not found: %s'), lib) # depends on [control=['for'], data=['mode']]
return dirs # depends on [control=['if'], data=[]]
message = 'unknown sdk version: {}'.format(self.sdk_version)
raise RuntimeError(message) |
def from_file(self, fname, comment_lead=['c'], compressed_with='use_ext'):
"""
Read a CNF formula from a file in the DIMACS format. A file name is
expected as an argument. A default argument is ``comment_lead`` for
parsing comment lines. A given file can be compressed by either
gzip, bzip2, or lzma.
:param fname: name of a file to parse.
:param comment_lead: a list of characters leading comment lines
:param compressed_with: file compression algorithm
:type fname: str
:type comment_lead: list(str)
:type compressed_with: str
Note that the ``compressed_with`` parameter can be ``None`` (i.e.
the file is uncompressed), ``'gzip'``, ``'bzip2'``, ``'lzma'``, or
``'use_ext'``. The latter value indicates that compression type
should be automatically determined based on the file extension.
Using ``'lzma'`` in Python 2 requires the ``backports.lzma``
package to be additionally installed.
Usage example:
.. code-block:: python
>>> from pysat.formula import CNF
>>> cnf1 = CNF()
>>> cnf1.from_file('some-file.cnf.gz', compressed_with='gzip')
>>>
>>> cnf2 = CNF(from_file='another-file.cnf')
"""
with FileObject(fname, mode='r', compression=compressed_with) as fobj:
self.from_fp(fobj.fp, comment_lead) | def function[from_file, parameter[self, fname, comment_lead, compressed_with]]:
constant[
Read a CNF formula from a file in the DIMACS format. A file name is
expected as an argument. A default argument is ``comment_lead`` for
parsing comment lines. A given file can be compressed by either
gzip, bzip2, or lzma.
:param fname: name of a file to parse.
:param comment_lead: a list of characters leading comment lines
:param compressed_with: file compression algorithm
:type fname: str
:type comment_lead: list(str)
:type compressed_with: str
Note that the ``compressed_with`` parameter can be ``None`` (i.e.
the file is uncompressed), ``'gzip'``, ``'bzip2'``, ``'lzma'``, or
``'use_ext'``. The latter value indicates that compression type
should be automatically determined based on the file extension.
Using ``'lzma'`` in Python 2 requires the ``backports.lzma``
package to be additionally installed.
Usage example:
.. code-block:: python
>>> from pysat.formula import CNF
>>> cnf1 = CNF()
>>> cnf1.from_file('some-file.cnf.gz', compressed_with='gzip')
>>>
>>> cnf2 = CNF(from_file='another-file.cnf')
]
with call[name[FileObject], parameter[name[fname]]] begin[:]
call[name[self].from_fp, parameter[name[fobj].fp, name[comment_lead]]] | keyword[def] identifier[from_file] ( identifier[self] , identifier[fname] , identifier[comment_lead] =[ literal[string] ], identifier[compressed_with] = literal[string] ):
literal[string]
keyword[with] identifier[FileObject] ( identifier[fname] , identifier[mode] = literal[string] , identifier[compression] = identifier[compressed_with] ) keyword[as] identifier[fobj] :
identifier[self] . identifier[from_fp] ( identifier[fobj] . identifier[fp] , identifier[comment_lead] ) | def from_file(self, fname, comment_lead=['c'], compressed_with='use_ext'):
"""
Read a CNF formula from a file in the DIMACS format. A file name is
expected as an argument. A default argument is ``comment_lead`` for
parsing comment lines. A given file can be compressed by either
gzip, bzip2, or lzma.
:param fname: name of a file to parse.
:param comment_lead: a list of characters leading comment lines
:param compressed_with: file compression algorithm
:type fname: str
:type comment_lead: list(str)
:type compressed_with: str
Note that the ``compressed_with`` parameter can be ``None`` (i.e.
the file is uncompressed), ``'gzip'``, ``'bzip2'``, ``'lzma'``, or
``'use_ext'``. The latter value indicates that compression type
should be automatically determined based on the file extension.
Using ``'lzma'`` in Python 2 requires the ``backports.lzma``
package to be additionally installed.
Usage example:
.. code-block:: python
>>> from pysat.formula import CNF
>>> cnf1 = CNF()
>>> cnf1.from_file('some-file.cnf.gz', compressed_with='gzip')
>>>
>>> cnf2 = CNF(from_file='another-file.cnf')
"""
with FileObject(fname, mode='r', compression=compressed_with) as fobj:
self.from_fp(fobj.fp, comment_lead) # depends on [control=['with'], data=['fobj']] |
def get_output_from_steps(stmt, last_step):
'''
Extract output_from(1), output_from('step_1'), and output_from([1, 2])
to determine dependent steps
'''
opt_values = get_param_of_function(
'output_from', stmt, extra_dict=env.sos_dict.dict())
def step_name(val):
if isinstance(val, str):
return val
elif isinstance(val, int):
if val == -1:
if last_step is None:
# there is a case where a regular step is checked as auxiliary step.
# we will postpone the decision later because the step might not be
# used as such
return None
return last_step
if '_' in env.sos_dict['step_name']:
return f"{env.sos_dict['step_name'].rsplit('_',1)[0]}_{val}"
else:
return str(val)
else:
raise ValueError(f'Invalid value {val} for output_from() function')
res = []
for value in opt_values:
if len(value) == 1:
# regular argument
value = value[0]
elif value[0] == 'steps':
value = value[1]
elif value[0] in SOS_TARGETS_OPTIONS:
continue
else:
raise ValueError(
f'Unacceptable keyword argument {value[0]} for function output_from'
)
if isinstance(value, (int, str)):
res.append(step_name(value))
elif isinstance(value, Sequence):
res.extend([step_name(x) for x in value])
else:
raise ValueError(f'Invalid value for input option from {value}')
return [x for x in res if x is not None] | def function[get_output_from_steps, parameter[stmt, last_step]]:
constant[
Extract output_from(1), output_from('step_1'), and output_from([1, 2])
to determine dependent steps
]
variable[opt_values] assign[=] call[name[get_param_of_function], parameter[constant[output_from], name[stmt]]]
def function[step_name, parameter[val]]:
if call[name[isinstance], parameter[name[val], name[str]]] begin[:]
return[name[val]]
variable[res] assign[=] list[[]]
for taget[name[value]] in starred[name[opt_values]] begin[:]
if compare[call[name[len], parameter[name[value]]] equal[==] constant[1]] begin[:]
variable[value] assign[=] call[name[value]][constant[0]]
if call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da1b12976a0>, <ast.Name object at 0x7da1b12940a0>]]]] begin[:]
call[name[res].append, parameter[call[name[step_name], parameter[name[value]]]]]
return[<ast.ListComp object at 0x7da1b12c3c70>] | keyword[def] identifier[get_output_from_steps] ( identifier[stmt] , identifier[last_step] ):
literal[string]
identifier[opt_values] = identifier[get_param_of_function] (
literal[string] , identifier[stmt] , identifier[extra_dict] = identifier[env] . identifier[sos_dict] . identifier[dict] ())
keyword[def] identifier[step_name] ( identifier[val] ):
keyword[if] identifier[isinstance] ( identifier[val] , identifier[str] ):
keyword[return] identifier[val]
keyword[elif] identifier[isinstance] ( identifier[val] , identifier[int] ):
keyword[if] identifier[val] ==- literal[int] :
keyword[if] identifier[last_step] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[last_step]
keyword[if] literal[string] keyword[in] identifier[env] . identifier[sos_dict] [ literal[string] ]:
keyword[return] literal[string]
keyword[else] :
keyword[return] identifier[str] ( identifier[val] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[res] =[]
keyword[for] identifier[value] keyword[in] identifier[opt_values] :
keyword[if] identifier[len] ( identifier[value] )== literal[int] :
identifier[value] = identifier[value] [ literal[int] ]
keyword[elif] identifier[value] [ literal[int] ]== literal[string] :
identifier[value] = identifier[value] [ literal[int] ]
keyword[elif] identifier[value] [ literal[int] ] keyword[in] identifier[SOS_TARGETS_OPTIONS] :
keyword[continue]
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string]
)
keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[int] , identifier[str] )):
identifier[res] . identifier[append] ( identifier[step_name] ( identifier[value] ))
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[Sequence] ):
identifier[res] . identifier[extend] ([ identifier[step_name] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[value] ])
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[res] keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] ] | def get_output_from_steps(stmt, last_step):
"""
Extract output_from(1), output_from('step_1'), and output_from([1, 2])
to determine dependent steps
"""
opt_values = get_param_of_function('output_from', stmt, extra_dict=env.sos_dict.dict())
def step_name(val):
if isinstance(val, str):
return val # depends on [control=['if'], data=[]]
elif isinstance(val, int):
if val == -1:
if last_step is None:
# there is a case where a regular step is checked as auxiliary step.
# we will postpone the decision later because the step might not be
# used as such
return None # depends on [control=['if'], data=[]]
return last_step # depends on [control=['if'], data=[]]
if '_' in env.sos_dict['step_name']:
return f"{env.sos_dict['step_name'].rsplit('_', 1)[0]}_{val}" # depends on [control=['if'], data=[]]
else:
return str(val) # depends on [control=['if'], data=[]]
else:
raise ValueError(f'Invalid value {val} for output_from() function')
res = []
for value in opt_values:
if len(value) == 1:
# regular argument
value = value[0] # depends on [control=['if'], data=[]]
elif value[0] == 'steps':
value = value[1] # depends on [control=['if'], data=[]]
elif value[0] in SOS_TARGETS_OPTIONS:
continue # depends on [control=['if'], data=[]]
else:
raise ValueError(f'Unacceptable keyword argument {value[0]} for function output_from')
if isinstance(value, (int, str)):
res.append(step_name(value)) # depends on [control=['if'], data=[]]
elif isinstance(value, Sequence):
res.extend([step_name(x) for x in value]) # depends on [control=['if'], data=[]]
else:
raise ValueError(f'Invalid value for input option from {value}') # depends on [control=['for'], data=['value']]
return [x for x in res if x is not None] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.