code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def set_coalesce(devname, **kwargs):
'''
Changes the coalescing settings of the specified network device
CLI Example:
.. code-block:: bash
salt '*' ethtool.set_coalesce <devname> [adaptive_rx=on|off] [adaptive_tx=on|off] [rx_usecs=N] [rx_frames=N]
[rx_usecs_irq=N] [rx_frames_irq=N] [tx_usecs=N] [tx_frames=N] [tx_usecs_irq=N] [tx_frames_irq=N]
[stats_block_usecs=N] [pkt_rate_low=N] [rx_usecs_low=N] [rx_frames_low=N] [tx_usecs_low=N] [tx_frames_low=N]
[pkt_rate_high=N] [rx_usecs_high=N] [rx_frames_high=N] [tx_usecs_high=N] [tx_frames_high=N]
[sample_interval=N]
'''
try:
coalesce = ethtool.get_coalesce(devname)
except IOError:
log.error('Interrupt coalescing not supported on %s', devname)
return 'Not supported'
changed = False
for param, value in kwargs.items():
if param in ethtool_coalesce_map:
param = ethtool_coalesce_map[param]
if param in coalesce:
if coalesce[param] != value:
coalesce[param] = value
changed = True
try:
if changed:
ethtool.set_coalesce(devname, coalesce)
return show_coalesce(devname)
except IOError:
log.error('Invalid coalesce arguments on %s: %s', devname, coalesce)
return 'Invalid arguments' | def function[set_coalesce, parameter[devname]]:
constant[
Changes the coalescing settings of the specified network device
CLI Example:
.. code-block:: bash
salt '*' ethtool.set_coalesce <devname> [adaptive_rx=on|off] [adaptive_tx=on|off] [rx_usecs=N] [rx_frames=N]
[rx_usecs_irq=N] [rx_frames_irq=N] [tx_usecs=N] [tx_frames=N] [tx_usecs_irq=N] [tx_frames_irq=N]
[stats_block_usecs=N] [pkt_rate_low=N] [rx_usecs_low=N] [rx_frames_low=N] [tx_usecs_low=N] [tx_frames_low=N]
[pkt_rate_high=N] [rx_usecs_high=N] [rx_frames_high=N] [tx_usecs_high=N] [tx_frames_high=N]
[sample_interval=N]
]
<ast.Try object at 0x7da1b2345ba0>
variable[changed] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b2346bc0>, <ast.Name object at 0x7da1b2346230>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
if compare[name[param] in name[ethtool_coalesce_map]] begin[:]
variable[param] assign[=] call[name[ethtool_coalesce_map]][name[param]]
if compare[name[param] in name[coalesce]] begin[:]
if compare[call[name[coalesce]][name[param]] not_equal[!=] name[value]] begin[:]
call[name[coalesce]][name[param]] assign[=] name[value]
variable[changed] assign[=] constant[True]
<ast.Try object at 0x7da1b2346cb0> | keyword[def] identifier[set_coalesce] ( identifier[devname] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[coalesce] = identifier[ethtool] . identifier[get_coalesce] ( identifier[devname] )
keyword[except] identifier[IOError] :
identifier[log] . identifier[error] ( literal[string] , identifier[devname] )
keyword[return] literal[string]
identifier[changed] = keyword[False]
keyword[for] identifier[param] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] ():
keyword[if] identifier[param] keyword[in] identifier[ethtool_coalesce_map] :
identifier[param] = identifier[ethtool_coalesce_map] [ identifier[param] ]
keyword[if] identifier[param] keyword[in] identifier[coalesce] :
keyword[if] identifier[coalesce] [ identifier[param] ]!= identifier[value] :
identifier[coalesce] [ identifier[param] ]= identifier[value]
identifier[changed] = keyword[True]
keyword[try] :
keyword[if] identifier[changed] :
identifier[ethtool] . identifier[set_coalesce] ( identifier[devname] , identifier[coalesce] )
keyword[return] identifier[show_coalesce] ( identifier[devname] )
keyword[except] identifier[IOError] :
identifier[log] . identifier[error] ( literal[string] , identifier[devname] , identifier[coalesce] )
keyword[return] literal[string] | def set_coalesce(devname, **kwargs):
"""
Changes the coalescing settings of the specified network device
CLI Example:
.. code-block:: bash
salt '*' ethtool.set_coalesce <devname> [adaptive_rx=on|off] [adaptive_tx=on|off] [rx_usecs=N] [rx_frames=N]
[rx_usecs_irq=N] [rx_frames_irq=N] [tx_usecs=N] [tx_frames=N] [tx_usecs_irq=N] [tx_frames_irq=N]
[stats_block_usecs=N] [pkt_rate_low=N] [rx_usecs_low=N] [rx_frames_low=N] [tx_usecs_low=N] [tx_frames_low=N]
[pkt_rate_high=N] [rx_usecs_high=N] [rx_frames_high=N] [tx_usecs_high=N] [tx_frames_high=N]
[sample_interval=N]
"""
try:
coalesce = ethtool.get_coalesce(devname) # depends on [control=['try'], data=[]]
except IOError:
log.error('Interrupt coalescing not supported on %s', devname)
return 'Not supported' # depends on [control=['except'], data=[]]
changed = False
for (param, value) in kwargs.items():
if param in ethtool_coalesce_map:
param = ethtool_coalesce_map[param]
if param in coalesce:
if coalesce[param] != value:
coalesce[param] = value
changed = True # depends on [control=['if'], data=['value']] # depends on [control=['if'], data=['param', 'coalesce']] # depends on [control=['if'], data=['param', 'ethtool_coalesce_map']] # depends on [control=['for'], data=[]]
try:
if changed:
ethtool.set_coalesce(devname, coalesce) # depends on [control=['if'], data=[]]
return show_coalesce(devname) # depends on [control=['try'], data=[]]
except IOError:
log.error('Invalid coalesce arguments on %s: %s', devname, coalesce)
return 'Invalid arguments' # depends on [control=['except'], data=[]] |
def can_create_bank_with_record_types(self, bank_record_types):
"""Tests if this user can create a single ``Bank`` using the desired record types.
While ``AssessmentManager.getBankRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``Bank``.
Providing an empty array tests if a ``Bank`` can be created with
no records.
arg: bank_record_types (osid.type.Type[]): array of bank
record types
return: (boolean) - ``true`` if ``Bank`` creation using the
specified ``Types`` is supported, ``false`` otherwise
raise: NullArgument - ``bank_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_create_bin_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=bank_record_types)
return True | def function[can_create_bank_with_record_types, parameter[self, bank_record_types]]:
constant[Tests if this user can create a single ``Bank`` using the desired record types.
While ``AssessmentManager.getBankRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``Bank``.
Providing an empty array tests if a ``Bank`` can be created with
no records.
arg: bank_record_types (osid.type.Type[]): array of bank
record types
return: (boolean) - ``true`` if ``Bank`` creation using the
specified ``Types`` is supported, ``false`` otherwise
raise: NullArgument - ``bank_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.can_create_catalog_with_record_types, parameter[]]]
return[constant[True]] | keyword[def] identifier[can_create_bank_with_record_types] ( identifier[self] , identifier[bank_record_types] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[can_create_catalog_with_record_types] ( identifier[catalog_record_types] = identifier[bank_record_types] )
keyword[return] keyword[True] | def can_create_bank_with_record_types(self, bank_record_types):
"""Tests if this user can create a single ``Bank`` using the desired record types.
While ``AssessmentManager.getBankRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``Bank``.
Providing an empty array tests if a ``Bank`` can be created with
no records.
arg: bank_record_types (osid.type.Type[]): array of bank
record types
return: (boolean) - ``true`` if ``Bank`` creation using the
specified ``Types`` is supported, ``false`` otherwise
raise: NullArgument - ``bank_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_create_bin_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=bank_record_types) # depends on [control=['if'], data=[]]
return True |
def get_template_request(configuration, spec):
"""
Builds API request block.
:param configuration:
:param spec:
:return:
"""
req = RequestHolder()
req.api_method = 'GetUserObjectTemplate'
req.nonce = EBUtils.generate_nonce()
req.api_object = EBUtils.build_api_object(api_key=configuration.api_key, uo_id=0x1)
req.body = {"data": spec}
req.configuration = configuration
req.endpoint = configuration.endpoint_enroll
return req | def function[get_template_request, parameter[configuration, spec]]:
constant[
Builds API request block.
:param configuration:
:param spec:
:return:
]
variable[req] assign[=] call[name[RequestHolder], parameter[]]
name[req].api_method assign[=] constant[GetUserObjectTemplate]
name[req].nonce assign[=] call[name[EBUtils].generate_nonce, parameter[]]
name[req].api_object assign[=] call[name[EBUtils].build_api_object, parameter[]]
name[req].body assign[=] dictionary[[<ast.Constant object at 0x7da1b1602980>], [<ast.Name object at 0x7da1b1601600>]]
name[req].configuration assign[=] name[configuration]
name[req].endpoint assign[=] name[configuration].endpoint_enroll
return[name[req]] | keyword[def] identifier[get_template_request] ( identifier[configuration] , identifier[spec] ):
literal[string]
identifier[req] = identifier[RequestHolder] ()
identifier[req] . identifier[api_method] = literal[string]
identifier[req] . identifier[nonce] = identifier[EBUtils] . identifier[generate_nonce] ()
identifier[req] . identifier[api_object] = identifier[EBUtils] . identifier[build_api_object] ( identifier[api_key] = identifier[configuration] . identifier[api_key] , identifier[uo_id] = literal[int] )
identifier[req] . identifier[body] ={ literal[string] : identifier[spec] }
identifier[req] . identifier[configuration] = identifier[configuration]
identifier[req] . identifier[endpoint] = identifier[configuration] . identifier[endpoint_enroll]
keyword[return] identifier[req] | def get_template_request(configuration, spec):
"""
Builds API request block.
:param configuration:
:param spec:
:return:
"""
req = RequestHolder()
req.api_method = 'GetUserObjectTemplate'
req.nonce = EBUtils.generate_nonce()
req.api_object = EBUtils.build_api_object(api_key=configuration.api_key, uo_id=1)
req.body = {'data': spec}
req.configuration = configuration
req.endpoint = configuration.endpoint_enroll
return req |
def parse(input_string):
"""
Accepts an input string containing an LA equation, e.g.,
"M3_mymatrix = M3_anothermatrix^-1" returns C code function
calls that implement the expression.
"""
global exprStack
global targetvar
# Start with a blank exprStack and a blank targetvar
exprStack = []
targetvar=None
if input_string != '':
# try parsing the input string
try:
L=equation.parseString( input_string )
except ParseException as err:
print('Parse Failure', file=sys.stderr)
print(err.line, file=sys.stderr)
print(" "*(err.column-1) + "^", file=sys.stderr)
print(err, file=sys.stderr)
raise
# show result of parsing the input string
if debug_flag:
print(input_string, "->", L)
print("exprStack=", exprStack)
# Evaluate the stack of parsed operands, emitting C code.
try:
result=_evaluateStack(exprStack)
except TypeError:
print("Unsupported operation on right side of '%s'.\nCheck for missing or incorrect tags on non-scalar operands."%input_string, file=sys.stderr)
raise
except UnaryUnsupportedError:
print("Unary negation is not supported for vectors and matrices: '%s'"%input_string, file=sys.stderr)
raise
# Create final assignment and print it.
if debug_flag: print("var=",targetvar)
if targetvar != None:
try:
result = _assignfunc(targetvar,result)
except TypeError:
print("Left side tag does not match right side of '%s'"%input_string, file=sys.stderr)
raise
except UnaryUnsupportedError:
print("Unary negation is not supported for vectors and matrices: '%s'"%input_string, file=sys.stderr)
raise
return result
else:
print("Empty left side in '%s'"%input_string, file=sys.stderr)
raise TypeError | def function[parse, parameter[input_string]]:
constant[
Accepts an input string containing an LA equation, e.g.,
"M3_mymatrix = M3_anothermatrix^-1" returns C code function
calls that implement the expression.
]
<ast.Global object at 0x7da18f09c100>
<ast.Global object at 0x7da18f09e440>
variable[exprStack] assign[=] list[[]]
variable[targetvar] assign[=] constant[None]
if compare[name[input_string] not_equal[!=] constant[]] begin[:]
<ast.Try object at 0x7da18f09fd30>
if name[debug_flag] begin[:]
call[name[print], parameter[name[input_string], constant[->], name[L]]]
call[name[print], parameter[constant[exprStack=], name[exprStack]]]
<ast.Try object at 0x7da18f09da20>
if name[debug_flag] begin[:]
call[name[print], parameter[constant[var=], name[targetvar]]]
if compare[name[targetvar] not_equal[!=] constant[None]] begin[:]
<ast.Try object at 0x7da18f09c910>
return[name[result]] | keyword[def] identifier[parse] ( identifier[input_string] ):
literal[string]
keyword[global] identifier[exprStack]
keyword[global] identifier[targetvar]
identifier[exprStack] =[]
identifier[targetvar] = keyword[None]
keyword[if] identifier[input_string] != literal[string] :
keyword[try] :
identifier[L] = identifier[equation] . identifier[parseString] ( identifier[input_string] )
keyword[except] identifier[ParseException] keyword[as] identifier[err] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[print] ( identifier[err] . identifier[line] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[print] ( literal[string] *( identifier[err] . identifier[column] - literal[int] )+ literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[print] ( identifier[err] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[raise]
keyword[if] identifier[debug_flag] :
identifier[print] ( identifier[input_string] , literal[string] , identifier[L] )
identifier[print] ( literal[string] , identifier[exprStack] )
keyword[try] :
identifier[result] = identifier[_evaluateStack] ( identifier[exprStack] )
keyword[except] identifier[TypeError] :
identifier[print] ( literal[string] % identifier[input_string] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[raise]
keyword[except] identifier[UnaryUnsupportedError] :
identifier[print] ( literal[string] % identifier[input_string] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[raise]
keyword[if] identifier[debug_flag] : identifier[print] ( literal[string] , identifier[targetvar] )
keyword[if] identifier[targetvar] != keyword[None] :
keyword[try] :
identifier[result] = identifier[_assignfunc] ( identifier[targetvar] , identifier[result] )
keyword[except] identifier[TypeError] :
identifier[print] ( literal[string] % identifier[input_string] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[raise]
keyword[except] identifier[UnaryUnsupportedError] :
identifier[print] ( literal[string] % identifier[input_string] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[raise]
keyword[return] identifier[result]
keyword[else] :
identifier[print] ( literal[string] % identifier[input_string] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[raise] identifier[TypeError] | def parse(input_string):
"""
Accepts an input string containing an LA equation, e.g.,
"M3_mymatrix = M3_anothermatrix^-1" returns C code function
calls that implement the expression.
"""
global exprStack
global targetvar # Start with a blank exprStack and a blank targetvar
exprStack = []
targetvar = None
if input_string != '': # try parsing the input string
try:
L = equation.parseString(input_string) # depends on [control=['try'], data=[]]
except ParseException as err:
print('Parse Failure', file=sys.stderr)
print(err.line, file=sys.stderr)
print(' ' * (err.column - 1) + '^', file=sys.stderr)
print(err, file=sys.stderr)
raise # depends on [control=['except'], data=['err']] # show result of parsing the input string
if debug_flag:
print(input_string, '->', L)
print('exprStack=', exprStack) # depends on [control=['if'], data=[]] # Evaluate the stack of parsed operands, emitting C code.
try:
result = _evaluateStack(exprStack) # depends on [control=['try'], data=[]]
except TypeError:
print("Unsupported operation on right side of '%s'.\nCheck for missing or incorrect tags on non-scalar operands." % input_string, file=sys.stderr)
raise # depends on [control=['except'], data=[]]
except UnaryUnsupportedError:
print("Unary negation is not supported for vectors and matrices: '%s'" % input_string, file=sys.stderr)
raise # depends on [control=['except'], data=[]] # Create final assignment and print it.
if debug_flag:
print('var=', targetvar) # depends on [control=['if'], data=[]]
if targetvar != None:
try:
result = _assignfunc(targetvar, result) # depends on [control=['try'], data=[]]
except TypeError:
print("Left side tag does not match right side of '%s'" % input_string, file=sys.stderr)
raise # depends on [control=['except'], data=[]]
except UnaryUnsupportedError:
print("Unary negation is not supported for vectors and matrices: '%s'" % input_string, file=sys.stderr)
raise # depends on [control=['except'], data=[]]
return result # depends on [control=['if'], data=['targetvar']]
else:
print("Empty left side in '%s'" % input_string, file=sys.stderr)
raise TypeError # depends on [control=['if'], data=['input_string']] |
def search(self, file_name, imported_data=None):
""" Run models on different data configurations.
Note
----
The input json file should include ALL parameters.
Parameters
----------
file_name : str
Optional json file to read parameters.
imported_data : pd.DataFrame()
Pandas Dataframe containing data.
"""
resample_freq=['15T', 'h', 'd']
time_freq = {
'year' : [True, False, False, False, False],
'month' : [False, True, False, False, False],
'week' : [False, False, True, False, False],
'tod' : [False, False, False, True, False],
'dow' : [False, False, False, False, True],
}
optimal_score = float('-inf')
optimal_model = None
# CSV Files
if not imported_data:
with open(file_name) as f:
input_json = json.load(f)
import_json = input_json['Import']
imported_data = self.import_data(file_name=import_json['File Name'], folder_name=import_json['Folder Name'],
head_row=import_json['Head Row'], index_col=import_json['Index Col'],
convert_col=import_json['Convert Col'], concat_files=import_json['Concat Files'],
save_file=import_json['Save File'])
with open(file_name) as f:
input_json = json.load(f)
for x in resample_freq: # Resample data interval
input_json['Clean']['Frequency'] = x
for i in range(len(time_freq.items())): # Add time features
input_json['Preprocess']['Year'] = time_freq['year'][i]
input_json['Preprocess']['Month'] = time_freq['month'][i]
input_json['Preprocess']['Week'] = time_freq['week'][i]
input_json['Preprocess']['Time of Day'] = time_freq['tod'][i]
input_json['Preprocess']['Day of Week'] = time_freq['dow'][i]
# Putting comment in json file to indicate which parameters have been changed
time_feature = None
for key in time_freq:
if time_freq[key][i]:
time_feature = key
self.result['Comment'] = 'Freq: ' + x + ', ' + 'Time Feature: ' + time_feature
# Read parameters in input_json
self.read_json(file_name=None, input_json=input_json, imported_data=imported_data)
# Keep track of highest adj_r2 score
if self.result['Model']['Optimal Model\'s Metrics']['adj_r2'] > optimal_score:
optimal_score = self.result['Model']['Optimal Model\'s Metrics']['adj_r2']
optimal_model_file_name = self.results_folder_name + '/results-' + str(self.get_global_count()) + '.json'
# Wrapper.global_count += 1
print('Most optimal model: ', optimal_model_file_name)
freq = self.result['Comment'].split(' ')[1][:-1]
time_feat = self.result['Comment'].split(' ')[-1]
print('Freq: ', freq, 'Time Feature: ', time_feat) | def function[search, parameter[self, file_name, imported_data]]:
constant[ Run models on different data configurations.
Note
----
The input json file should include ALL parameters.
Parameters
----------
file_name : str
Optional json file to read parameters.
imported_data : pd.DataFrame()
Pandas Dataframe containing data.
]
variable[resample_freq] assign[=] list[[<ast.Constant object at 0x7da204346650>, <ast.Constant object at 0x7da204344e80>, <ast.Constant object at 0x7da204347f70>]]
variable[time_freq] assign[=] dictionary[[<ast.Constant object at 0x7da2043457e0>, <ast.Constant object at 0x7da2043466b0>, <ast.Constant object at 0x7da204344a00>, <ast.Constant object at 0x7da2043446d0>, <ast.Constant object at 0x7da2043477f0>], [<ast.List object at 0x7da204346b30>, <ast.List object at 0x7da204346b90>, <ast.List object at 0x7da204344f40>, <ast.List object at 0x7da204345fc0>, <ast.List object at 0x7da204347340>]]
variable[optimal_score] assign[=] call[name[float], parameter[constant[-inf]]]
variable[optimal_model] assign[=] constant[None]
if <ast.UnaryOp object at 0x7da204344b20> begin[:]
with call[name[open], parameter[name[file_name]]] begin[:]
variable[input_json] assign[=] call[name[json].load, parameter[name[f]]]
variable[import_json] assign[=] call[name[input_json]][constant[Import]]
variable[imported_data] assign[=] call[name[self].import_data, parameter[]]
with call[name[open], parameter[name[file_name]]] begin[:]
variable[input_json] assign[=] call[name[json].load, parameter[name[f]]]
for taget[name[x]] in starred[name[resample_freq]] begin[:]
call[call[name[input_json]][constant[Clean]]][constant[Frequency]] assign[=] name[x]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[call[name[time_freq].items, parameter[]]]]]]] begin[:]
call[call[name[input_json]][constant[Preprocess]]][constant[Year]] assign[=] call[call[name[time_freq]][constant[year]]][name[i]]
call[call[name[input_json]][constant[Preprocess]]][constant[Month]] assign[=] call[call[name[time_freq]][constant[month]]][name[i]]
call[call[name[input_json]][constant[Preprocess]]][constant[Week]] assign[=] call[call[name[time_freq]][constant[week]]][name[i]]
call[call[name[input_json]][constant[Preprocess]]][constant[Time of Day]] assign[=] call[call[name[time_freq]][constant[tod]]][name[i]]
call[call[name[input_json]][constant[Preprocess]]][constant[Day of Week]] assign[=] call[call[name[time_freq]][constant[dow]]][name[i]]
variable[time_feature] assign[=] constant[None]
for taget[name[key]] in starred[name[time_freq]] begin[:]
if call[call[name[time_freq]][name[key]]][name[i]] begin[:]
variable[time_feature] assign[=] name[key]
call[name[self].result][constant[Comment]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[Freq: ] + name[x]] + constant[, ]] + constant[Time Feature: ]] + name[time_feature]]
call[name[self].read_json, parameter[]]
if compare[call[call[call[name[self].result][constant[Model]]][constant[Optimal Model's Metrics]]][constant[adj_r2]] greater[>] name[optimal_score]] begin[:]
variable[optimal_score] assign[=] call[call[call[name[self].result][constant[Model]]][constant[Optimal Model's Metrics]]][constant[adj_r2]]
variable[optimal_model_file_name] assign[=] binary_operation[binary_operation[binary_operation[name[self].results_folder_name + constant[/results-]] + call[name[str], parameter[call[name[self].get_global_count, parameter[]]]]] + constant[.json]]
call[name[print], parameter[constant[Most optimal model: ], name[optimal_model_file_name]]]
variable[freq] assign[=] call[call[call[call[name[self].result][constant[Comment]].split, parameter[constant[ ]]]][constant[1]]][<ast.Slice object at 0x7da2043468f0>]
variable[time_feat] assign[=] call[call[call[name[self].result][constant[Comment]].split, parameter[constant[ ]]]][<ast.UnaryOp object at 0x7da204345990>]
call[name[print], parameter[constant[Freq: ], name[freq], constant[Time Feature: ], name[time_feat]]] | keyword[def] identifier[search] ( identifier[self] , identifier[file_name] , identifier[imported_data] = keyword[None] ):
literal[string]
identifier[resample_freq] =[ literal[string] , literal[string] , literal[string] ]
identifier[time_freq] ={
literal[string] :[ keyword[True] , keyword[False] , keyword[False] , keyword[False] , keyword[False] ],
literal[string] :[ keyword[False] , keyword[True] , keyword[False] , keyword[False] , keyword[False] ],
literal[string] :[ keyword[False] , keyword[False] , keyword[True] , keyword[False] , keyword[False] ],
literal[string] :[ keyword[False] , keyword[False] , keyword[False] , keyword[True] , keyword[False] ],
literal[string] :[ keyword[False] , keyword[False] , keyword[False] , keyword[False] , keyword[True] ],
}
identifier[optimal_score] = identifier[float] ( literal[string] )
identifier[optimal_model] = keyword[None]
keyword[if] keyword[not] identifier[imported_data] :
keyword[with] identifier[open] ( identifier[file_name] ) keyword[as] identifier[f] :
identifier[input_json] = identifier[json] . identifier[load] ( identifier[f] )
identifier[import_json] = identifier[input_json] [ literal[string] ]
identifier[imported_data] = identifier[self] . identifier[import_data] ( identifier[file_name] = identifier[import_json] [ literal[string] ], identifier[folder_name] = identifier[import_json] [ literal[string] ],
identifier[head_row] = identifier[import_json] [ literal[string] ], identifier[index_col] = identifier[import_json] [ literal[string] ],
identifier[convert_col] = identifier[import_json] [ literal[string] ], identifier[concat_files] = identifier[import_json] [ literal[string] ],
identifier[save_file] = identifier[import_json] [ literal[string] ])
keyword[with] identifier[open] ( identifier[file_name] ) keyword[as] identifier[f] :
identifier[input_json] = identifier[json] . identifier[load] ( identifier[f] )
keyword[for] identifier[x] keyword[in] identifier[resample_freq] :
identifier[input_json] [ literal[string] ][ literal[string] ]= identifier[x]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[time_freq] . identifier[items] ())):
identifier[input_json] [ literal[string] ][ literal[string] ]= identifier[time_freq] [ literal[string] ][ identifier[i] ]
identifier[input_json] [ literal[string] ][ literal[string] ]= identifier[time_freq] [ literal[string] ][ identifier[i] ]
identifier[input_json] [ literal[string] ][ literal[string] ]= identifier[time_freq] [ literal[string] ][ identifier[i] ]
identifier[input_json] [ literal[string] ][ literal[string] ]= identifier[time_freq] [ literal[string] ][ identifier[i] ]
identifier[input_json] [ literal[string] ][ literal[string] ]= identifier[time_freq] [ literal[string] ][ identifier[i] ]
identifier[time_feature] = keyword[None]
keyword[for] identifier[key] keyword[in] identifier[time_freq] :
keyword[if] identifier[time_freq] [ identifier[key] ][ identifier[i] ]:
identifier[time_feature] = identifier[key]
identifier[self] . identifier[result] [ literal[string] ]= literal[string] + identifier[x] + literal[string] + literal[string] + identifier[time_feature]
identifier[self] . identifier[read_json] ( identifier[file_name] = keyword[None] , identifier[input_json] = identifier[input_json] , identifier[imported_data] = identifier[imported_data] )
keyword[if] identifier[self] . identifier[result] [ literal[string] ][ literal[string] ][ literal[string] ]> identifier[optimal_score] :
identifier[optimal_score] = identifier[self] . identifier[result] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[optimal_model_file_name] = identifier[self] . identifier[results_folder_name] + literal[string] + identifier[str] ( identifier[self] . identifier[get_global_count] ())+ literal[string]
identifier[print] ( literal[string] , identifier[optimal_model_file_name] )
identifier[freq] = identifier[self] . identifier[result] [ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ][:- literal[int] ]
identifier[time_feat] = identifier[self] . identifier[result] [ literal[string] ]. identifier[split] ( literal[string] )[- literal[int] ]
identifier[print] ( literal[string] , identifier[freq] , literal[string] , identifier[time_feat] ) | def search(self, file_name, imported_data=None):
""" Run models on different data configurations.
Note
----
The input json file should include ALL parameters.
Parameters
----------
file_name : str
Optional json file to read parameters.
imported_data : pd.DataFrame()
Pandas Dataframe containing data.
"""
resample_freq = ['15T', 'h', 'd']
time_freq = {'year': [True, False, False, False, False], 'month': [False, True, False, False, False], 'week': [False, False, True, False, False], 'tod': [False, False, False, True, False], 'dow': [False, False, False, False, True]}
optimal_score = float('-inf')
optimal_model = None
# CSV Files
if not imported_data:
with open(file_name) as f:
input_json = json.load(f)
import_json = input_json['Import']
imported_data = self.import_data(file_name=import_json['File Name'], folder_name=import_json['Folder Name'], head_row=import_json['Head Row'], index_col=import_json['Index Col'], convert_col=import_json['Convert Col'], concat_files=import_json['Concat Files'], save_file=import_json['Save File']) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
with open(file_name) as f:
input_json = json.load(f)
for x in resample_freq: # Resample data interval
input_json['Clean']['Frequency'] = x
for i in range(len(time_freq.items())): # Add time features
input_json['Preprocess']['Year'] = time_freq['year'][i]
input_json['Preprocess']['Month'] = time_freq['month'][i]
input_json['Preprocess']['Week'] = time_freq['week'][i]
input_json['Preprocess']['Time of Day'] = time_freq['tod'][i]
input_json['Preprocess']['Day of Week'] = time_freq['dow'][i]
# Putting comment in json file to indicate which parameters have been changed
time_feature = None
for key in time_freq:
if time_freq[key][i]:
time_feature = key # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
self.result['Comment'] = 'Freq: ' + x + ', ' + 'Time Feature: ' + time_feature
# Read parameters in input_json
self.read_json(file_name=None, input_json=input_json, imported_data=imported_data)
# Keep track of highest adj_r2 score
if self.result['Model']["Optimal Model's Metrics"]['adj_r2'] > optimal_score:
optimal_score = self.result['Model']["Optimal Model's Metrics"]['adj_r2']
optimal_model_file_name = self.results_folder_name + '/results-' + str(self.get_global_count()) + '.json' # depends on [control=['if'], data=['optimal_score']] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['x']] # depends on [control=['with'], data=['f']]
# Wrapper.global_count += 1
print('Most optimal model: ', optimal_model_file_name)
freq = self.result['Comment'].split(' ')[1][:-1]
time_feat = self.result['Comment'].split(' ')[-1]
print('Freq: ', freq, 'Time Feature: ', time_feat) |
def add(self, watch_key, tensor_value):
"""Add a tensor value.
Args:
watch_key: A string representing the debugger tensor watch, e.g.,
'Dense_1/BiasAdd:0:DebugIdentity'.
tensor_value: The value of the tensor as a numpy.ndarray.
"""
if watch_key not in self._tensor_data:
self._tensor_data[watch_key] = _WatchStore(
watch_key,
mem_bytes_limit=self._watch_mem_bytes_limit)
self._tensor_data[watch_key].add(tensor_value) | def function[add, parameter[self, watch_key, tensor_value]]:
constant[Add a tensor value.
Args:
watch_key: A string representing the debugger tensor watch, e.g.,
'Dense_1/BiasAdd:0:DebugIdentity'.
tensor_value: The value of the tensor as a numpy.ndarray.
]
if compare[name[watch_key] <ast.NotIn object at 0x7da2590d7190> name[self]._tensor_data] begin[:]
call[name[self]._tensor_data][name[watch_key]] assign[=] call[name[_WatchStore], parameter[name[watch_key]]]
call[call[name[self]._tensor_data][name[watch_key]].add, parameter[name[tensor_value]]] | keyword[def] identifier[add] ( identifier[self] , identifier[watch_key] , identifier[tensor_value] ):
literal[string]
keyword[if] identifier[watch_key] keyword[not] keyword[in] identifier[self] . identifier[_tensor_data] :
identifier[self] . identifier[_tensor_data] [ identifier[watch_key] ]= identifier[_WatchStore] (
identifier[watch_key] ,
identifier[mem_bytes_limit] = identifier[self] . identifier[_watch_mem_bytes_limit] )
identifier[self] . identifier[_tensor_data] [ identifier[watch_key] ]. identifier[add] ( identifier[tensor_value] ) | def add(self, watch_key, tensor_value):
"""Add a tensor value.
Args:
watch_key: A string representing the debugger tensor watch, e.g.,
'Dense_1/BiasAdd:0:DebugIdentity'.
tensor_value: The value of the tensor as a numpy.ndarray.
"""
if watch_key not in self._tensor_data:
self._tensor_data[watch_key] = _WatchStore(watch_key, mem_bytes_limit=self._watch_mem_bytes_limit) # depends on [control=['if'], data=['watch_key']]
self._tensor_data[watch_key].add(tensor_value) |
def transformSkyCoordinateErrors(self, phi, theta, sigPhiStar, sigTheta, rhoPhiTheta=0):
"""
Converts the sky coordinate errors from one reference system to another, including the covariance
term. Equations (1.5.4) and (1.5.20) from section 1.5 in the Hipparcos Explanatory Volume 1 are used.
Parameters
----------
phi - The longitude-like angle of the position of the source (radians).
theta - The latitude-like angle of the position of the source (radians).
sigPhiStar - Standard error in the longitude-like angle of the position of the source (radians or
sexagesimal units, including cos(latitude) term)
sigTheta - Standard error in the latitude-like angle of the position of the source (radians or
sexagesimal units)
Keywords (optional)
-------------------
rhoPhiTheta - Correlation coefficient of the position errors. Set to zero if this keyword is not
provided.
Retuns
------
sigPhiRotStar - The transformed standard error in the longitude-like angle (including
cos(latitude) factor)
sigThetaRot - The transformed standard error in the latitude-like angle.
rhoPhiThetaRot - The transformed correlation coefficient.
"""
if isscalar(rhoPhiTheta) and not isscalar(sigTheta):
rhoPhiTheta=zeros_like(sigTheta)+rhoPhiTheta
c, s = self._getJacobian(phi,theta)
cSqr = c*c
sSqr = s*s
covar = sigPhiStar*sigTheta*rhoPhiTheta
varPhiStar = sigPhiStar*sigPhiStar
varTheta = sigTheta*sigTheta
varPhiStarRot = cSqr*varPhiStar+sSqr*varTheta+2.0*covar*c*s
varThetaRot = sSqr*varPhiStar+cSqr*varTheta-2.0*covar*c*s
covarRot = (cSqr-sSqr)*covar+c*s*(varTheta-varPhiStar)
return sqrt(varPhiStarRot), sqrt(varThetaRot), covarRot/sqrt(varPhiStarRot*varThetaRot) | def function[transformSkyCoordinateErrors, parameter[self, phi, theta, sigPhiStar, sigTheta, rhoPhiTheta]]:
constant[
Converts the sky coordinate errors from one reference system to another, including the covariance
term. Equations (1.5.4) and (1.5.20) from section 1.5 in the Hipparcos Explanatory Volume 1 are used.
Parameters
----------
phi - The longitude-like angle of the position of the source (radians).
theta - The latitude-like angle of the position of the source (radians).
sigPhiStar - Standard error in the longitude-like angle of the position of the source (radians or
sexagesimal units, including cos(latitude) term)
sigTheta - Standard error in the latitude-like angle of the position of the source (radians or
sexagesimal units)
Keywords (optional)
-------------------
rhoPhiTheta - Correlation coefficient of the position errors. Set to zero if this keyword is not
provided.
Retuns
------
sigPhiRotStar - The transformed standard error in the longitude-like angle (including
cos(latitude) factor)
sigThetaRot - The transformed standard error in the latitude-like angle.
rhoPhiThetaRot - The transformed correlation coefficient.
]
if <ast.BoolOp object at 0x7da2041db9a0> begin[:]
variable[rhoPhiTheta] assign[=] binary_operation[call[name[zeros_like], parameter[name[sigTheta]]] + name[rhoPhiTheta]]
<ast.Tuple object at 0x7da2041dbf10> assign[=] call[name[self]._getJacobian, parameter[name[phi], name[theta]]]
variable[cSqr] assign[=] binary_operation[name[c] * name[c]]
variable[sSqr] assign[=] binary_operation[name[s] * name[s]]
variable[covar] assign[=] binary_operation[binary_operation[name[sigPhiStar] * name[sigTheta]] * name[rhoPhiTheta]]
variable[varPhiStar] assign[=] binary_operation[name[sigPhiStar] * name[sigPhiStar]]
variable[varTheta] assign[=] binary_operation[name[sigTheta] * name[sigTheta]]
variable[varPhiStarRot] assign[=] binary_operation[binary_operation[binary_operation[name[cSqr] * name[varPhiStar]] + binary_operation[name[sSqr] * name[varTheta]]] + binary_operation[binary_operation[binary_operation[constant[2.0] * name[covar]] * name[c]] * name[s]]]
variable[varThetaRot] assign[=] binary_operation[binary_operation[binary_operation[name[sSqr] * name[varPhiStar]] + binary_operation[name[cSqr] * name[varTheta]]] - binary_operation[binary_operation[binary_operation[constant[2.0] * name[covar]] * name[c]] * name[s]]]
variable[covarRot] assign[=] binary_operation[binary_operation[binary_operation[name[cSqr] - name[sSqr]] * name[covar]] + binary_operation[binary_operation[name[c] * name[s]] * binary_operation[name[varTheta] - name[varPhiStar]]]]
return[tuple[[<ast.Call object at 0x7da1b26af8b0>, <ast.Call object at 0x7da1b26aed10>, <ast.BinOp object at 0x7da1b26acd60>]]] | keyword[def] identifier[transformSkyCoordinateErrors] ( identifier[self] , identifier[phi] , identifier[theta] , identifier[sigPhiStar] , identifier[sigTheta] , identifier[rhoPhiTheta] = literal[int] ):
literal[string]
keyword[if] identifier[isscalar] ( identifier[rhoPhiTheta] ) keyword[and] keyword[not] identifier[isscalar] ( identifier[sigTheta] ):
identifier[rhoPhiTheta] = identifier[zeros_like] ( identifier[sigTheta] )+ identifier[rhoPhiTheta]
identifier[c] , identifier[s] = identifier[self] . identifier[_getJacobian] ( identifier[phi] , identifier[theta] )
identifier[cSqr] = identifier[c] * identifier[c]
identifier[sSqr] = identifier[s] * identifier[s]
identifier[covar] = identifier[sigPhiStar] * identifier[sigTheta] * identifier[rhoPhiTheta]
identifier[varPhiStar] = identifier[sigPhiStar] * identifier[sigPhiStar]
identifier[varTheta] = identifier[sigTheta] * identifier[sigTheta]
identifier[varPhiStarRot] = identifier[cSqr] * identifier[varPhiStar] + identifier[sSqr] * identifier[varTheta] + literal[int] * identifier[covar] * identifier[c] * identifier[s]
identifier[varThetaRot] = identifier[sSqr] * identifier[varPhiStar] + identifier[cSqr] * identifier[varTheta] - literal[int] * identifier[covar] * identifier[c] * identifier[s]
identifier[covarRot] =( identifier[cSqr] - identifier[sSqr] )* identifier[covar] + identifier[c] * identifier[s] *( identifier[varTheta] - identifier[varPhiStar] )
keyword[return] identifier[sqrt] ( identifier[varPhiStarRot] ), identifier[sqrt] ( identifier[varThetaRot] ), identifier[covarRot] / identifier[sqrt] ( identifier[varPhiStarRot] * identifier[varThetaRot] ) | def transformSkyCoordinateErrors(self, phi, theta, sigPhiStar, sigTheta, rhoPhiTheta=0):
"""
Converts the sky coordinate errors from one reference system to another, including the covariance
term. Equations (1.5.4) and (1.5.20) from section 1.5 in the Hipparcos Explanatory Volume 1 are used.
Parameters
----------
phi - The longitude-like angle of the position of the source (radians).
theta - The latitude-like angle of the position of the source (radians).
sigPhiStar - Standard error in the longitude-like angle of the position of the source (radians or
sexagesimal units, including cos(latitude) term)
sigTheta - Standard error in the latitude-like angle of the position of the source (radians or
sexagesimal units)
Keywords (optional)
-------------------
rhoPhiTheta - Correlation coefficient of the position errors. Set to zero if this keyword is not
provided.
Retuns
------
sigPhiRotStar - The transformed standard error in the longitude-like angle (including
cos(latitude) factor)
sigThetaRot - The transformed standard error in the latitude-like angle.
rhoPhiThetaRot - The transformed correlation coefficient.
"""
if isscalar(rhoPhiTheta) and (not isscalar(sigTheta)):
rhoPhiTheta = zeros_like(sigTheta) + rhoPhiTheta # depends on [control=['if'], data=[]]
(c, s) = self._getJacobian(phi, theta)
cSqr = c * c
sSqr = s * s
covar = sigPhiStar * sigTheta * rhoPhiTheta
varPhiStar = sigPhiStar * sigPhiStar
varTheta = sigTheta * sigTheta
varPhiStarRot = cSqr * varPhiStar + sSqr * varTheta + 2.0 * covar * c * s
varThetaRot = sSqr * varPhiStar + cSqr * varTheta - 2.0 * covar * c * s
covarRot = (cSqr - sSqr) * covar + c * s * (varTheta - varPhiStar)
return (sqrt(varPhiStarRot), sqrt(varThetaRot), covarRot / sqrt(varPhiStarRot * varThetaRot)) |
def format_jid_instance_ext(jid, job):
'''
Format the jid correctly with jid included
'''
ret = format_job_instance(job)
ret.update({
'JID': jid,
'StartTime': jid_to_time(jid)})
return ret | def function[format_jid_instance_ext, parameter[jid, job]]:
constant[
Format the jid correctly with jid included
]
variable[ret] assign[=] call[name[format_job_instance], parameter[name[job]]]
call[name[ret].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1ca1990>, <ast.Constant object at 0x7da1b1ca15a0>], [<ast.Name object at 0x7da1b1ca16c0>, <ast.Call object at 0x7da1b1ca1840>]]]]
return[name[ret]] | keyword[def] identifier[format_jid_instance_ext] ( identifier[jid] , identifier[job] ):
literal[string]
identifier[ret] = identifier[format_job_instance] ( identifier[job] )
identifier[ret] . identifier[update] ({
literal[string] : identifier[jid] ,
literal[string] : identifier[jid_to_time] ( identifier[jid] )})
keyword[return] identifier[ret] | def format_jid_instance_ext(jid, job):
"""
Format the jid correctly with jid included
"""
ret = format_job_instance(job)
ret.update({'JID': jid, 'StartTime': jid_to_time(jid)})
return ret |
def report(self, filename=None):
""" Write details of each versioned target to file
:param string filename: file to write out the report to
Fields in the report:
invocation_id: A sequence number that increases each time a task is invoked
task_name: The name of the task
targets_hash: an id from a hash of all target ids to identify a VersionedTargetSet
target_id: target id
cache_key_id: the Id for the cache key
cache_key_hash: computed hash for the cache key
phase: What part of the validation check the values were captured
valid: True if the cache is valid for the VersionedTargetSet
"""
# TODO(zundel) set report to stream to the file
filename = filename or self._filename
if filename:
# Usually the directory exists from reporting initialization, but not if clean-all was a goal.
with safe_open(filename, 'w') as writer:
writer.write(
'invocation_id,task_name,targets_hash,target_id,cache_key_id,cache_key_hash,phase,valid'
+ '\n')
for task_report in self._task_reports.values():
task_report.report(writer) | def function[report, parameter[self, filename]]:
constant[ Write details of each versioned target to file
:param string filename: file to write out the report to
Fields in the report:
invocation_id: A sequence number that increases each time a task is invoked
task_name: The name of the task
targets_hash: an id from a hash of all target ids to identify a VersionedTargetSet
target_id: target id
cache_key_id: the Id for the cache key
cache_key_hash: computed hash for the cache key
phase: What part of the validation check the values were captured
valid: True if the cache is valid for the VersionedTargetSet
]
variable[filename] assign[=] <ast.BoolOp object at 0x7da1b1eef070>
if name[filename] begin[:]
with call[name[safe_open], parameter[name[filename], constant[w]]] begin[:]
call[name[writer].write, parameter[binary_operation[constant[invocation_id,task_name,targets_hash,target_id,cache_key_id,cache_key_hash,phase,valid] + constant[
]]]]
for taget[name[task_report]] in starred[call[name[self]._task_reports.values, parameter[]]] begin[:]
call[name[task_report].report, parameter[name[writer]]] | keyword[def] identifier[report] ( identifier[self] , identifier[filename] = keyword[None] ):
literal[string]
identifier[filename] = identifier[filename] keyword[or] identifier[self] . identifier[_filename]
keyword[if] identifier[filename] :
keyword[with] identifier[safe_open] ( identifier[filename] , literal[string] ) keyword[as] identifier[writer] :
identifier[writer] . identifier[write] (
literal[string]
+ literal[string] )
keyword[for] identifier[task_report] keyword[in] identifier[self] . identifier[_task_reports] . identifier[values] ():
identifier[task_report] . identifier[report] ( identifier[writer] ) | def report(self, filename=None):
""" Write details of each versioned target to file
:param string filename: file to write out the report to
Fields in the report:
invocation_id: A sequence number that increases each time a task is invoked
task_name: The name of the task
targets_hash: an id from a hash of all target ids to identify a VersionedTargetSet
target_id: target id
cache_key_id: the Id for the cache key
cache_key_hash: computed hash for the cache key
phase: What part of the validation check the values were captured
valid: True if the cache is valid for the VersionedTargetSet
"""
# TODO(zundel) set report to stream to the file
filename = filename or self._filename
if filename:
# Usually the directory exists from reporting initialization, but not if clean-all was a goal.
with safe_open(filename, 'w') as writer:
writer.write('invocation_id,task_name,targets_hash,target_id,cache_key_id,cache_key_hash,phase,valid' + '\n')
for task_report in self._task_reports.values():
task_report.report(writer) # depends on [control=['for'], data=['task_report']] # depends on [control=['with'], data=['writer']] # depends on [control=['if'], data=[]] |
def catch_and_return_token(func, self, *args, **kwargs):
"""Optionally defer exceptions and return a token instead
When `__DEFER_STEP_ERRORS__` is set on the implementing class
or instance, methods wrapped with this wrapper will
catch and save their :python:`GSSError` exceptions and
instead return the result token attached to the exception.
The exception can be later retrived through :python:`_last_err`
(and :python:`_last_tb` when Python 2 is in use).
"""
try:
return func(self, *args, **kwargs)
except GSSError as e:
if e.token is not None and self.__DEFER_STEP_ERRORS__:
self._last_err = e
# skip the "return func" line above in the traceback
if six.PY2:
self._last_tb = sys.exc_info()[2].tb_next.tb_next
else:
self._last_err.__traceback__ = e.__traceback__.tb_next
return e.token
else:
raise | def function[catch_and_return_token, parameter[func, self]]:
constant[Optionally defer exceptions and return a token instead
When `__DEFER_STEP_ERRORS__` is set on the implementing class
or instance, methods wrapped with this wrapper will
catch and save their :python:`GSSError` exceptions and
instead return the result token attached to the exception.
The exception can be later retrived through :python:`_last_err`
(and :python:`_last_tb` when Python 2 is in use).
]
<ast.Try object at 0x7da2054a7370> | keyword[def] identifier[catch_and_return_token] ( identifier[func] , identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
keyword[return] identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[GSSError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[token] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[__DEFER_STEP_ERRORS__] :
identifier[self] . identifier[_last_err] = identifier[e]
keyword[if] identifier[six] . identifier[PY2] :
identifier[self] . identifier[_last_tb] = identifier[sys] . identifier[exc_info] ()[ literal[int] ]. identifier[tb_next] . identifier[tb_next]
keyword[else] :
identifier[self] . identifier[_last_err] . identifier[__traceback__] = identifier[e] . identifier[__traceback__] . identifier[tb_next]
keyword[return] identifier[e] . identifier[token]
keyword[else] :
keyword[raise] | def catch_and_return_token(func, self, *args, **kwargs):
"""Optionally defer exceptions and return a token instead
When `__DEFER_STEP_ERRORS__` is set on the implementing class
or instance, methods wrapped with this wrapper will
catch and save their :python:`GSSError` exceptions and
instead return the result token attached to the exception.
The exception can be later retrived through :python:`_last_err`
(and :python:`_last_tb` when Python 2 is in use).
"""
try:
return func(self, *args, **kwargs) # depends on [control=['try'], data=[]]
except GSSError as e:
if e.token is not None and self.__DEFER_STEP_ERRORS__:
self._last_err = e
# skip the "return func" line above in the traceback
if six.PY2:
self._last_tb = sys.exc_info()[2].tb_next.tb_next # depends on [control=['if'], data=[]]
else:
self._last_err.__traceback__ = e.__traceback__.tb_next
return e.token # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']] |
async def add_shade_to_scene(self, shade_id, scene_id, position=None):
"""Add a shade to a scene."""
if position is None:
_shade = await self.get_shade(shade_id)
position = await _shade.get_current_position()
await (SceneMembers(self.request)).create_scene_member(
position, scene_id, shade_id
) | <ast.AsyncFunctionDef object at 0x7da1b0ae1240> | keyword[async] keyword[def] identifier[add_shade_to_scene] ( identifier[self] , identifier[shade_id] , identifier[scene_id] , identifier[position] = keyword[None] ):
literal[string]
keyword[if] identifier[position] keyword[is] keyword[None] :
identifier[_shade] = keyword[await] identifier[self] . identifier[get_shade] ( identifier[shade_id] )
identifier[position] = keyword[await] identifier[_shade] . identifier[get_current_position] ()
keyword[await] ( identifier[SceneMembers] ( identifier[self] . identifier[request] )). identifier[create_scene_member] (
identifier[position] , identifier[scene_id] , identifier[shade_id]
) | async def add_shade_to_scene(self, shade_id, scene_id, position=None):
"""Add a shade to a scene."""
if position is None:
_shade = await self.get_shade(shade_id)
position = await _shade.get_current_position() # depends on [control=['if'], data=['position']]
await SceneMembers(self.request).create_scene_member(position, scene_id, shade_id) |
def s3_get_url(url,
altexts=None,
client=None,
raiseonfail=False):
"""This gets a file from an S3 bucket based on its s3:// URL.
Parameters
----------
url : str
S3 URL to download. This should begin with 's3://'.
altexts : None or list of str
If not None, this is a list of alternate extensions to try for the file
other than the one provided in `filename`. For example, to get anything
that's an .sqlite where .sqlite.gz is expected, use altexts=[''] to
strip the .gz.
client : boto3.Client or None
If None, this function will instantiate a new `boto3.Client` object to
use in its operations. Alternatively, pass in an existing `boto3.Client`
instance to re-use it here.
raiseonfail : bool
If True, will re-raise whatever Exception caused the operation to fail
and break out immediately.
Returns
-------
str
Path to the downloaded filename or None if the download was
unsuccessful. The file will be downloaded into the current working
directory and will have a filename == basename of the file on S3.
"""
bucket_item = url.replace('s3://','')
bucket_item = bucket_item.split('/')
bucket = bucket_item[0]
filekey = '/'.join(bucket_item[1:])
return s3_get_file(bucket,
filekey,
bucket_item[-1],
altexts=altexts,
client=client,
raiseonfail=raiseonfail) | def function[s3_get_url, parameter[url, altexts, client, raiseonfail]]:
constant[This gets a file from an S3 bucket based on its s3:// URL.
Parameters
----------
url : str
S3 URL to download. This should begin with 's3://'.
altexts : None or list of str
If not None, this is a list of alternate extensions to try for the file
other than the one provided in `filename`. For example, to get anything
that's an .sqlite where .sqlite.gz is expected, use altexts=[''] to
strip the .gz.
client : boto3.Client or None
If None, this function will instantiate a new `boto3.Client` object to
use in its operations. Alternatively, pass in an existing `boto3.Client`
instance to re-use it here.
raiseonfail : bool
If True, will re-raise whatever Exception caused the operation to fail
and break out immediately.
Returns
-------
str
Path to the downloaded filename or None if the download was
unsuccessful. The file will be downloaded into the current working
directory and will have a filename == basename of the file on S3.
]
variable[bucket_item] assign[=] call[name[url].replace, parameter[constant[s3://], constant[]]]
variable[bucket_item] assign[=] call[name[bucket_item].split, parameter[constant[/]]]
variable[bucket] assign[=] call[name[bucket_item]][constant[0]]
variable[filekey] assign[=] call[constant[/].join, parameter[call[name[bucket_item]][<ast.Slice object at 0x7da1b00bca30>]]]
return[call[name[s3_get_file], parameter[name[bucket], name[filekey], call[name[bucket_item]][<ast.UnaryOp object at 0x7da1b00bdf00>]]]] | keyword[def] identifier[s3_get_url] ( identifier[url] ,
identifier[altexts] = keyword[None] ,
identifier[client] = keyword[None] ,
identifier[raiseonfail] = keyword[False] ):
literal[string]
identifier[bucket_item] = identifier[url] . identifier[replace] ( literal[string] , literal[string] )
identifier[bucket_item] = identifier[bucket_item] . identifier[split] ( literal[string] )
identifier[bucket] = identifier[bucket_item] [ literal[int] ]
identifier[filekey] = literal[string] . identifier[join] ( identifier[bucket_item] [ literal[int] :])
keyword[return] identifier[s3_get_file] ( identifier[bucket] ,
identifier[filekey] ,
identifier[bucket_item] [- literal[int] ],
identifier[altexts] = identifier[altexts] ,
identifier[client] = identifier[client] ,
identifier[raiseonfail] = identifier[raiseonfail] ) | def s3_get_url(url, altexts=None, client=None, raiseonfail=False):
"""This gets a file from an S3 bucket based on its s3:// URL.
Parameters
----------
url : str
S3 URL to download. This should begin with 's3://'.
altexts : None or list of str
If not None, this is a list of alternate extensions to try for the file
other than the one provided in `filename`. For example, to get anything
that's an .sqlite where .sqlite.gz is expected, use altexts=[''] to
strip the .gz.
client : boto3.Client or None
If None, this function will instantiate a new `boto3.Client` object to
use in its operations. Alternatively, pass in an existing `boto3.Client`
instance to re-use it here.
raiseonfail : bool
If True, will re-raise whatever Exception caused the operation to fail
and break out immediately.
Returns
-------
str
Path to the downloaded filename or None if the download was
unsuccessful. The file will be downloaded into the current working
directory and will have a filename == basename of the file on S3.
"""
bucket_item = url.replace('s3://', '')
bucket_item = bucket_item.split('/')
bucket = bucket_item[0]
filekey = '/'.join(bucket_item[1:])
return s3_get_file(bucket, filekey, bucket_item[-1], altexts=altexts, client=client, raiseonfail=raiseonfail) |
def build_graph(self, project, site, subject, session, scan,
size, email=None, invariants=Invariants.ALL,
fiber_file=DEFAULT_FIBER_FILE, atlas_file=None,
use_threads=False, callback=None):
"""
Builds a graph using the graph-services endpoint.
Arguments:
project (str): The project to use
site (str): The site in question
subject (str): The subject's identifier
session (str): The session (per subject)
scan (str): The scan identifier
size (str): Whether to return a big (grute.BIG) or small
(grute.SMALL) graph. For a better explanation, see m2g.io.
email (str : self.email)*: An email to notify
invariants (str[]: Invariants.ALL)*: An array of invariants to
compute. You can use the grute.Invariants class to construct a
list, or simply pass grute.Invariants.ALL to compute them all.
fiber_file (str: DEFAULT_FIBER_FILE)*: A local filename of an
MRI Studio .dat file
atlas_file (str: None)*: A local atlas file, in NIFTI .nii format.
If none is specified, the Desikan atlas is used by default.
use_threads (bool: False)*: Whether to run the download in a Python
thread. If set to True, the call to `build_graph` will end
quickly, and the `callback` will be called with the returned
status-code of the restful call as its only argument.
callback (function: None)*: The function to run upon completion of
the call, if using threads. (Will not be called if use_threads
is set to False.)
Returns:
HTTP Response if use_threads is False. Otherwise, None
Raises:
ValueError: When the supplied values are invalid (contain invalid
characters, bad email address supplied, etc.)
RemoteDataNotFoundError: When the data cannot be processed due to
a server error.
"""
if email is None:
email = self.email
if not set(invariants) <= set(Invariants.ALL):
raise ValueError("Invariants must be a subset of Invariants.ALL.")
if use_threads and callback is not None:
if not hasattr(callback, '__call__'):
raise ValueError("callback must be a function.")
if len(inspect.getargspec(callback).args) != 1:
raise ValueError("callback must take exactly 1 argument.")
# Once we get here, we know the callback is
if size not in [self.BIG, self.SMALL]:
raise ValueError("size must be either grute.BIG or grute.SMALL.")
url = "buildgraph/{}/{}/{}/{}/{}/{}/{}/{}/".format(
project,
site,
subject,
session,
scan,
size,
email,
"/".join(invariants)
)
if " " in url:
raise ValueError("Arguments must not contain spaces.")
if use_threads:
# Run in the background.
download_thread = threading.Thread(
target=self._run_build_graph,
args=[url, fiber_file, atlas_file, callback]
)
download_thread.start()
else:
# Run in the foreground.
return self._run_build_graph(url, fiber_file, atlas_file)
return | def function[build_graph, parameter[self, project, site, subject, session, scan, size, email, invariants, fiber_file, atlas_file, use_threads, callback]]:
constant[
Builds a graph using the graph-services endpoint.
Arguments:
project (str): The project to use
site (str): The site in question
subject (str): The subject's identifier
session (str): The session (per subject)
scan (str): The scan identifier
size (str): Whether to return a big (grute.BIG) or small
(grute.SMALL) graph. For a better explanation, see m2g.io.
email (str : self.email)*: An email to notify
invariants (str[]: Invariants.ALL)*: An array of invariants to
compute. You can use the grute.Invariants class to construct a
list, or simply pass grute.Invariants.ALL to compute them all.
fiber_file (str: DEFAULT_FIBER_FILE)*: A local filename of an
MRI Studio .dat file
atlas_file (str: None)*: A local atlas file, in NIFTI .nii format.
If none is specified, the Desikan atlas is used by default.
use_threads (bool: False)*: Whether to run the download in a Python
thread. If set to True, the call to `build_graph` will end
quickly, and the `callback` will be called with the returned
status-code of the restful call as its only argument.
callback (function: None)*: The function to run upon completion of
the call, if using threads. (Will not be called if use_threads
is set to False.)
Returns:
HTTP Response if use_threads is False. Otherwise, None
Raises:
ValueError: When the supplied values are invalid (contain invalid
characters, bad email address supplied, etc.)
RemoteDataNotFoundError: When the data cannot be processed due to
a server error.
]
if compare[name[email] is constant[None]] begin[:]
variable[email] assign[=] name[self].email
if <ast.UnaryOp object at 0x7da1b0292dd0> begin[:]
<ast.Raise object at 0x7da1b0290e50>
if <ast.BoolOp object at 0x7da1b0290c10> begin[:]
if <ast.UnaryOp object at 0x7da1b0290e80> begin[:]
<ast.Raise object at 0x7da1b02920e0>
if compare[call[name[len], parameter[call[name[inspect].getargspec, parameter[name[callback]]].args]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b0293e20>
if compare[name[size] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da1b0291300>, <ast.Attribute object at 0x7da1b0290670>]]] begin[:]
<ast.Raise object at 0x7da1b0291a20>
variable[url] assign[=] call[constant[buildgraph/{}/{}/{}/{}/{}/{}/{}/{}/].format, parameter[name[project], name[site], name[subject], name[session], name[scan], name[size], name[email], call[constant[/].join, parameter[name[invariants]]]]]
if compare[constant[ ] in name[url]] begin[:]
<ast.Raise object at 0x7da1b021f190>
if name[use_threads] begin[:]
variable[download_thread] assign[=] call[name[threading].Thread, parameter[]]
call[name[download_thread].start, parameter[]]
return[None] | keyword[def] identifier[build_graph] ( identifier[self] , identifier[project] , identifier[site] , identifier[subject] , identifier[session] , identifier[scan] ,
identifier[size] , identifier[email] = keyword[None] , identifier[invariants] = identifier[Invariants] . identifier[ALL] ,
identifier[fiber_file] = identifier[DEFAULT_FIBER_FILE] , identifier[atlas_file] = keyword[None] ,
identifier[use_threads] = keyword[False] , identifier[callback] = keyword[None] ):
literal[string]
keyword[if] identifier[email] keyword[is] keyword[None] :
identifier[email] = identifier[self] . identifier[email]
keyword[if] keyword[not] identifier[set] ( identifier[invariants] )<= identifier[set] ( identifier[Invariants] . identifier[ALL] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[use_threads] keyword[and] identifier[callback] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[callback] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[len] ( identifier[inspect] . identifier[getargspec] ( identifier[callback] ). identifier[args] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[size] keyword[not] keyword[in] [ identifier[self] . identifier[BIG] , identifier[self] . identifier[SMALL] ]:
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[url] = literal[string] . identifier[format] (
identifier[project] ,
identifier[site] ,
identifier[subject] ,
identifier[session] ,
identifier[scan] ,
identifier[size] ,
identifier[email] ,
literal[string] . identifier[join] ( identifier[invariants] )
)
keyword[if] literal[string] keyword[in] identifier[url] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[use_threads] :
identifier[download_thread] = identifier[threading] . identifier[Thread] (
identifier[target] = identifier[self] . identifier[_run_build_graph] ,
identifier[args] =[ identifier[url] , identifier[fiber_file] , identifier[atlas_file] , identifier[callback] ]
)
identifier[download_thread] . identifier[start] ()
keyword[else] :
keyword[return] identifier[self] . identifier[_run_build_graph] ( identifier[url] , identifier[fiber_file] , identifier[atlas_file] )
keyword[return] | def build_graph(self, project, site, subject, session, scan, size, email=None, invariants=Invariants.ALL, fiber_file=DEFAULT_FIBER_FILE, atlas_file=None, use_threads=False, callback=None):
"""
Builds a graph using the graph-services endpoint.
Arguments:
project (str): The project to use
site (str): The site in question
subject (str): The subject's identifier
session (str): The session (per subject)
scan (str): The scan identifier
size (str): Whether to return a big (grute.BIG) or small
(grute.SMALL) graph. For a better explanation, see m2g.io.
email (str : self.email)*: An email to notify
invariants (str[]: Invariants.ALL)*: An array of invariants to
compute. You can use the grute.Invariants class to construct a
list, or simply pass grute.Invariants.ALL to compute them all.
fiber_file (str: DEFAULT_FIBER_FILE)*: A local filename of an
MRI Studio .dat file
atlas_file (str: None)*: A local atlas file, in NIFTI .nii format.
If none is specified, the Desikan atlas is used by default.
use_threads (bool: False)*: Whether to run the download in a Python
thread. If set to True, the call to `build_graph` will end
quickly, and the `callback` will be called with the returned
status-code of the restful call as its only argument.
callback (function: None)*: The function to run upon completion of
the call, if using threads. (Will not be called if use_threads
is set to False.)
Returns:
HTTP Response if use_threads is False. Otherwise, None
Raises:
ValueError: When the supplied values are invalid (contain invalid
characters, bad email address supplied, etc.)
RemoteDataNotFoundError: When the data cannot be processed due to
a server error.
"""
if email is None:
email = self.email # depends on [control=['if'], data=['email']]
if not set(invariants) <= set(Invariants.ALL):
raise ValueError('Invariants must be a subset of Invariants.ALL.') # depends on [control=['if'], data=[]]
if use_threads and callback is not None:
if not hasattr(callback, '__call__'):
raise ValueError('callback must be a function.') # depends on [control=['if'], data=[]]
if len(inspect.getargspec(callback).args) != 1:
raise ValueError('callback must take exactly 1 argument.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Once we get here, we know the callback is
if size not in [self.BIG, self.SMALL]:
raise ValueError('size must be either grute.BIG or grute.SMALL.') # depends on [control=['if'], data=[]]
url = 'buildgraph/{}/{}/{}/{}/{}/{}/{}/{}/'.format(project, site, subject, session, scan, size, email, '/'.join(invariants))
if ' ' in url:
raise ValueError('Arguments must not contain spaces.') # depends on [control=['if'], data=[]]
if use_threads:
# Run in the background.
download_thread = threading.Thread(target=self._run_build_graph, args=[url, fiber_file, atlas_file, callback])
download_thread.start() # depends on [control=['if'], data=[]]
else:
# Run in the foreground.
return self._run_build_graph(url, fiber_file, atlas_file)
return |
def split_kwargs(relaxation_kwds):
"""Split relaxation keywords to keywords for optimizer and others"""
optimizer_keys_list = [
'step_method',
'linesearch',
'eta_max',
'eta',
'm',
'linesearch_first'
]
optimizer_kwargs = { k:relaxation_kwds.pop(k) for k in optimizer_keys_list if k in relaxation_kwds }
if 'm' in optimizer_kwargs:
optimizer_kwargs['momentum'] = optimizer_kwargs.pop('m')
return optimizer_kwargs, relaxation_kwds | def function[split_kwargs, parameter[relaxation_kwds]]:
constant[Split relaxation keywords to keywords for optimizer and others]
variable[optimizer_keys_list] assign[=] list[[<ast.Constant object at 0x7da1b12f3d90>, <ast.Constant object at 0x7da1b12f3d60>, <ast.Constant object at 0x7da1b12f3d30>, <ast.Constant object at 0x7da1b12f3d00>, <ast.Constant object at 0x7da1b12f3cd0>, <ast.Constant object at 0x7da1b12f3ca0>]]
variable[optimizer_kwargs] assign[=] <ast.DictComp object at 0x7da1b12f3c10>
if compare[constant[m] in name[optimizer_kwargs]] begin[:]
call[name[optimizer_kwargs]][constant[momentum]] assign[=] call[name[optimizer_kwargs].pop, parameter[constant[m]]]
return[tuple[[<ast.Name object at 0x7da1b12f38b0>, <ast.Name object at 0x7da1b12f3850>]]] | keyword[def] identifier[split_kwargs] ( identifier[relaxation_kwds] ):
literal[string]
identifier[optimizer_keys_list] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
]
identifier[optimizer_kwargs] ={ identifier[k] : identifier[relaxation_kwds] . identifier[pop] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[optimizer_keys_list] keyword[if] identifier[k] keyword[in] identifier[relaxation_kwds] }
keyword[if] literal[string] keyword[in] identifier[optimizer_kwargs] :
identifier[optimizer_kwargs] [ literal[string] ]= identifier[optimizer_kwargs] . identifier[pop] ( literal[string] )
keyword[return] identifier[optimizer_kwargs] , identifier[relaxation_kwds] | def split_kwargs(relaxation_kwds):
"""Split relaxation keywords to keywords for optimizer and others"""
optimizer_keys_list = ['step_method', 'linesearch', 'eta_max', 'eta', 'm', 'linesearch_first']
optimizer_kwargs = {k: relaxation_kwds.pop(k) for k in optimizer_keys_list if k in relaxation_kwds}
if 'm' in optimizer_kwargs:
optimizer_kwargs['momentum'] = optimizer_kwargs.pop('m') # depends on [control=['if'], data=['optimizer_kwargs']]
return (optimizer_kwargs, relaxation_kwds) |
def listen(self, address, ssl=False, family=0, flags=0, ipc=False, backlog=128):
"""Create a new transport, bind it to *address*, and start listening
for new connections.
See :func:`create_server` for a description of *address* and the
supported keyword arguments.
"""
handles = []
handle_args = ()
if isinstance(address, six.string_types):
handle_type = pyuv.Pipe
handle_args = (ipc,)
addresses = [address]
elif isinstance(address, tuple):
handle_type = pyuv.TCP
result = getaddrinfo(address[0], address[1], family, socket.SOCK_STREAM,
socket.IPPROTO_TCP, flags)
addresses = [res[4] for res in result]
elif isinstance(address, pyuv.Stream):
handles.append(address)
addresses = []
else:
raise TypeError('expecting a string, tuple or pyuv.Stream')
for addr in addresses:
handle = handle_type(self._hub.loop, *handle_args)
try:
if compat.pyuv_pipe_helper(handle, handle_args, 'bind', addr):
handles.append(handle)
break
handle.bind(addr)
except pyuv.error.UVError as e:
self._log.warning('bind error {!r}, skipping {}', e[0], saddr(addr))
continue
handles.append(handle)
addresses = []
for handle in handles:
if backlog is not None:
callback = functools.partial(self._on_new_connection, ssl=ssl)
handle.listen(callback, backlog)
addr = handle.getsockname()
self._log.debug('listen on {}', saddr(addr))
addresses.append(addr)
self._handles += handles
self._addresses += addresses | def function[listen, parameter[self, address, ssl, family, flags, ipc, backlog]]:
constant[Create a new transport, bind it to *address*, and start listening
for new connections.
See :func:`create_server` for a description of *address* and the
supported keyword arguments.
]
variable[handles] assign[=] list[[]]
variable[handle_args] assign[=] tuple[[]]
if call[name[isinstance], parameter[name[address], name[six].string_types]] begin[:]
variable[handle_type] assign[=] name[pyuv].Pipe
variable[handle_args] assign[=] tuple[[<ast.Name object at 0x7da1b0300f10>]]
variable[addresses] assign[=] list[[<ast.Name object at 0x7da1b0303610>]]
for taget[name[addr]] in starred[name[addresses]] begin[:]
variable[handle] assign[=] call[name[handle_type], parameter[name[self]._hub.loop, <ast.Starred object at 0x7da1b0275180>]]
<ast.Try object at 0x7da1b02748e0>
call[name[handles].append, parameter[name[handle]]]
variable[addresses] assign[=] list[[]]
for taget[name[handle]] in starred[name[handles]] begin[:]
if compare[name[backlog] is_not constant[None]] begin[:]
variable[callback] assign[=] call[name[functools].partial, parameter[name[self]._on_new_connection]]
call[name[handle].listen, parameter[name[callback], name[backlog]]]
variable[addr] assign[=] call[name[handle].getsockname, parameter[]]
call[name[self]._log.debug, parameter[constant[listen on {}], call[name[saddr], parameter[name[addr]]]]]
call[name[addresses].append, parameter[name[addr]]]
<ast.AugAssign object at 0x7da20c990190>
<ast.AugAssign object at 0x7da20c9906a0> | keyword[def] identifier[listen] ( identifier[self] , identifier[address] , identifier[ssl] = keyword[False] , identifier[family] = literal[int] , identifier[flags] = literal[int] , identifier[ipc] = keyword[False] , identifier[backlog] = literal[int] ):
literal[string]
identifier[handles] =[]
identifier[handle_args] =()
keyword[if] identifier[isinstance] ( identifier[address] , identifier[six] . identifier[string_types] ):
identifier[handle_type] = identifier[pyuv] . identifier[Pipe]
identifier[handle_args] =( identifier[ipc] ,)
identifier[addresses] =[ identifier[address] ]
keyword[elif] identifier[isinstance] ( identifier[address] , identifier[tuple] ):
identifier[handle_type] = identifier[pyuv] . identifier[TCP]
identifier[result] = identifier[getaddrinfo] ( identifier[address] [ literal[int] ], identifier[address] [ literal[int] ], identifier[family] , identifier[socket] . identifier[SOCK_STREAM] ,
identifier[socket] . identifier[IPPROTO_TCP] , identifier[flags] )
identifier[addresses] =[ identifier[res] [ literal[int] ] keyword[for] identifier[res] keyword[in] identifier[result] ]
keyword[elif] identifier[isinstance] ( identifier[address] , identifier[pyuv] . identifier[Stream] ):
identifier[handles] . identifier[append] ( identifier[address] )
identifier[addresses] =[]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[for] identifier[addr] keyword[in] identifier[addresses] :
identifier[handle] = identifier[handle_type] ( identifier[self] . identifier[_hub] . identifier[loop] ,* identifier[handle_args] )
keyword[try] :
keyword[if] identifier[compat] . identifier[pyuv_pipe_helper] ( identifier[handle] , identifier[handle_args] , literal[string] , identifier[addr] ):
identifier[handles] . identifier[append] ( identifier[handle] )
keyword[break]
identifier[handle] . identifier[bind] ( identifier[addr] )
keyword[except] identifier[pyuv] . identifier[error] . identifier[UVError] keyword[as] identifier[e] :
identifier[self] . identifier[_log] . identifier[warning] ( literal[string] , identifier[e] [ literal[int] ], identifier[saddr] ( identifier[addr] ))
keyword[continue]
identifier[handles] . identifier[append] ( identifier[handle] )
identifier[addresses] =[]
keyword[for] identifier[handle] keyword[in] identifier[handles] :
keyword[if] identifier[backlog] keyword[is] keyword[not] keyword[None] :
identifier[callback] = identifier[functools] . identifier[partial] ( identifier[self] . identifier[_on_new_connection] , identifier[ssl] = identifier[ssl] )
identifier[handle] . identifier[listen] ( identifier[callback] , identifier[backlog] )
identifier[addr] = identifier[handle] . identifier[getsockname] ()
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] , identifier[saddr] ( identifier[addr] ))
identifier[addresses] . identifier[append] ( identifier[addr] )
identifier[self] . identifier[_handles] += identifier[handles]
identifier[self] . identifier[_addresses] += identifier[addresses] | def listen(self, address, ssl=False, family=0, flags=0, ipc=False, backlog=128):
"""Create a new transport, bind it to *address*, and start listening
for new connections.
See :func:`create_server` for a description of *address* and the
supported keyword arguments.
"""
handles = []
handle_args = ()
if isinstance(address, six.string_types):
handle_type = pyuv.Pipe
handle_args = (ipc,)
addresses = [address] # depends on [control=['if'], data=[]]
elif isinstance(address, tuple):
handle_type = pyuv.TCP
result = getaddrinfo(address[0], address[1], family, socket.SOCK_STREAM, socket.IPPROTO_TCP, flags)
addresses = [res[4] for res in result] # depends on [control=['if'], data=[]]
elif isinstance(address, pyuv.Stream):
handles.append(address)
addresses = [] # depends on [control=['if'], data=[]]
else:
raise TypeError('expecting a string, tuple or pyuv.Stream')
for addr in addresses:
handle = handle_type(self._hub.loop, *handle_args)
try:
if compat.pyuv_pipe_helper(handle, handle_args, 'bind', addr):
handles.append(handle)
break # depends on [control=['if'], data=[]]
handle.bind(addr) # depends on [control=['try'], data=[]]
except pyuv.error.UVError as e:
self._log.warning('bind error {!r}, skipping {}', e[0], saddr(addr))
continue # depends on [control=['except'], data=['e']]
handles.append(handle) # depends on [control=['for'], data=['addr']]
addresses = []
for handle in handles:
if backlog is not None:
callback = functools.partial(self._on_new_connection, ssl=ssl)
handle.listen(callback, backlog)
addr = handle.getsockname()
self._log.debug('listen on {}', saddr(addr)) # depends on [control=['if'], data=['backlog']]
addresses.append(addr) # depends on [control=['for'], data=['handle']]
self._handles += handles
self._addresses += addresses |
def interev_mag(times, mags, size=(10.5, 7.5), **kwargs):
"""
Plot inter-event times against magnitude.
:type times: list
:param times: list of the detection times, must be sorted the same as mags
:type mags: list
:param mags: list of magnitudes
:type size: tuple
:param size: Size of figure in inches.
:returns: :class:`matplotlib.figure.Figure`
.. rubric:: Example
>>> from obspy.clients.fdsn import Client
>>> from obspy import UTCDateTime
>>> from eqcorrscan.utils.plotting import interev_mag
>>> client = Client('IRIS')
>>> t1 = UTCDateTime('2012-03-26T00:00:00')
>>> t2 = t1 + (3 * 86400)
>>> catalog = client.get_events(starttime=t1, endtime=t2, minmagnitude=3)
>>> magnitudes = [event.preferred_magnitude().mag for event in catalog]
>>> times = [event.preferred_origin().time for event in catalog]
>>> interev_mag(times, magnitudes) # doctest: +SKIP
.. plot::
from obspy.clients.fdsn import Client
from obspy import UTCDateTime
from eqcorrscan.utils.plotting import interev_mag
client = Client('IRIS')
t1 = UTCDateTime('2012-03-26T00:00:00')
t2 = t1 + (3 * 86400)
catalog = client.get_events(starttime=t1, endtime=t2, minmagnitude=3)
magnitudes = [event.preferred_magnitude().mag for event in catalog]
times = [event.preferred_origin().time for event in catalog]
interev_mag(times, magnitudes)
"""
import matplotlib.pyplot as plt
info = [(times[i], mags[i]) for i in range(len(times))]
info.sort(key=lambda tup: tup[0])
times = [x[0] for x in info]
mags = [x[1] for x in info]
# Make two subplots next to each other of time before and time after
fig, axes = plt.subplots(1, 2, sharey=True, figsize=size)
axes = axes.ravel()
pre_times = []
post_times = []
for i in range(len(times)):
if i > 0:
pre_times.append((times[i] - times[i - 1]) / 60)
if i < len(times) - 1:
post_times.append((times[i + 1] - times[i]) / 60)
axes[0].scatter(pre_times, mags[1:])
axes[0].set_title('Pre-event times')
axes[0].set_ylabel('Magnitude')
axes[0].set_xlabel('Time (Minutes)')
plt.setp(axes[0].xaxis.get_majorticklabels(), rotation=30)
axes[1].scatter(pre_times, mags[:-1])
axes[1].set_title('Post-event times')
axes[1].set_xlabel('Time (Minutes)')
axes[0].autoscale(enable=True, tight=True)
axes[1].autoscale(enable=True, tight=True)
plt.setp(axes[1].xaxis.get_majorticklabels(), rotation=30)
fig = _finalise_figure(fig=fig, **kwargs) # pragma: no cover
return fig | def function[interev_mag, parameter[times, mags, size]]:
constant[
Plot inter-event times against magnitude.
:type times: list
:param times: list of the detection times, must be sorted the same as mags
:type mags: list
:param mags: list of magnitudes
:type size: tuple
:param size: Size of figure in inches.
:returns: :class:`matplotlib.figure.Figure`
.. rubric:: Example
>>> from obspy.clients.fdsn import Client
>>> from obspy import UTCDateTime
>>> from eqcorrscan.utils.plotting import interev_mag
>>> client = Client('IRIS')
>>> t1 = UTCDateTime('2012-03-26T00:00:00')
>>> t2 = t1 + (3 * 86400)
>>> catalog = client.get_events(starttime=t1, endtime=t2, minmagnitude=3)
>>> magnitudes = [event.preferred_magnitude().mag for event in catalog]
>>> times = [event.preferred_origin().time for event in catalog]
>>> interev_mag(times, magnitudes) # doctest: +SKIP
.. plot::
from obspy.clients.fdsn import Client
from obspy import UTCDateTime
from eqcorrscan.utils.plotting import interev_mag
client = Client('IRIS')
t1 = UTCDateTime('2012-03-26T00:00:00')
t2 = t1 + (3 * 86400)
catalog = client.get_events(starttime=t1, endtime=t2, minmagnitude=3)
magnitudes = [event.preferred_magnitude().mag for event in catalog]
times = [event.preferred_origin().time for event in catalog]
interev_mag(times, magnitudes)
]
import module[matplotlib.pyplot] as alias[plt]
variable[info] assign[=] <ast.ListComp object at 0x7da20c76c6a0>
call[name[info].sort, parameter[]]
variable[times] assign[=] <ast.ListComp object at 0x7da20c76cd30>
variable[mags] assign[=] <ast.ListComp object at 0x7da20c76dc30>
<ast.Tuple object at 0x7da20c76c700> assign[=] call[name[plt].subplots, parameter[constant[1], constant[2]]]
variable[axes] assign[=] call[name[axes].ravel, parameter[]]
variable[pre_times] assign[=] list[[]]
variable[post_times] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[times]]]]]] begin[:]
if compare[name[i] greater[>] constant[0]] begin[:]
call[name[pre_times].append, parameter[binary_operation[binary_operation[call[name[times]][name[i]] - call[name[times]][binary_operation[name[i] - constant[1]]]] / constant[60]]]]
if compare[name[i] less[<] binary_operation[call[name[len], parameter[name[times]]] - constant[1]]] begin[:]
call[name[post_times].append, parameter[binary_operation[binary_operation[call[name[times]][binary_operation[name[i] + constant[1]]] - call[name[times]][name[i]]] / constant[60]]]]
call[call[name[axes]][constant[0]].scatter, parameter[name[pre_times], call[name[mags]][<ast.Slice object at 0x7da20c76d930>]]]
call[call[name[axes]][constant[0]].set_title, parameter[constant[Pre-event times]]]
call[call[name[axes]][constant[0]].set_ylabel, parameter[constant[Magnitude]]]
call[call[name[axes]][constant[0]].set_xlabel, parameter[constant[Time (Minutes)]]]
call[name[plt].setp, parameter[call[call[name[axes]][constant[0]].xaxis.get_majorticklabels, parameter[]]]]
call[call[name[axes]][constant[1]].scatter, parameter[name[pre_times], call[name[mags]][<ast.Slice object at 0x7da20c76cc70>]]]
call[call[name[axes]][constant[1]].set_title, parameter[constant[Post-event times]]]
call[call[name[axes]][constant[1]].set_xlabel, parameter[constant[Time (Minutes)]]]
call[call[name[axes]][constant[0]].autoscale, parameter[]]
call[call[name[axes]][constant[1]].autoscale, parameter[]]
call[name[plt].setp, parameter[call[call[name[axes]][constant[1]].xaxis.get_majorticklabels, parameter[]]]]
variable[fig] assign[=] call[name[_finalise_figure], parameter[]]
return[name[fig]] | keyword[def] identifier[interev_mag] ( identifier[times] , identifier[mags] , identifier[size] =( literal[int] , literal[int] ),** identifier[kwargs] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
identifier[info] =[( identifier[times] [ identifier[i] ], identifier[mags] [ identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[times] ))]
identifier[info] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[tup] : identifier[tup] [ literal[int] ])
identifier[times] =[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[info] ]
identifier[mags] =[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[info] ]
identifier[fig] , identifier[axes] = identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[sharey] = keyword[True] , identifier[figsize] = identifier[size] )
identifier[axes] = identifier[axes] . identifier[ravel] ()
identifier[pre_times] =[]
identifier[post_times] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[times] )):
keyword[if] identifier[i] > literal[int] :
identifier[pre_times] . identifier[append] (( identifier[times] [ identifier[i] ]- identifier[times] [ identifier[i] - literal[int] ])/ literal[int] )
keyword[if] identifier[i] < identifier[len] ( identifier[times] )- literal[int] :
identifier[post_times] . identifier[append] (( identifier[times] [ identifier[i] + literal[int] ]- identifier[times] [ identifier[i] ])/ literal[int] )
identifier[axes] [ literal[int] ]. identifier[scatter] ( identifier[pre_times] , identifier[mags] [ literal[int] :])
identifier[axes] [ literal[int] ]. identifier[set_title] ( literal[string] )
identifier[axes] [ literal[int] ]. identifier[set_ylabel] ( literal[string] )
identifier[axes] [ literal[int] ]. identifier[set_xlabel] ( literal[string] )
identifier[plt] . identifier[setp] ( identifier[axes] [ literal[int] ]. identifier[xaxis] . identifier[get_majorticklabels] (), identifier[rotation] = literal[int] )
identifier[axes] [ literal[int] ]. identifier[scatter] ( identifier[pre_times] , identifier[mags] [:- literal[int] ])
identifier[axes] [ literal[int] ]. identifier[set_title] ( literal[string] )
identifier[axes] [ literal[int] ]. identifier[set_xlabel] ( literal[string] )
identifier[axes] [ literal[int] ]. identifier[autoscale] ( identifier[enable] = keyword[True] , identifier[tight] = keyword[True] )
identifier[axes] [ literal[int] ]. identifier[autoscale] ( identifier[enable] = keyword[True] , identifier[tight] = keyword[True] )
identifier[plt] . identifier[setp] ( identifier[axes] [ literal[int] ]. identifier[xaxis] . identifier[get_majorticklabels] (), identifier[rotation] = literal[int] )
identifier[fig] = identifier[_finalise_figure] ( identifier[fig] = identifier[fig] ,** identifier[kwargs] )
keyword[return] identifier[fig] | def interev_mag(times, mags, size=(10.5, 7.5), **kwargs):
"""
Plot inter-event times against magnitude.
:type times: list
:param times: list of the detection times, must be sorted the same as mags
:type mags: list
:param mags: list of magnitudes
:type size: tuple
:param size: Size of figure in inches.
:returns: :class:`matplotlib.figure.Figure`
.. rubric:: Example
>>> from obspy.clients.fdsn import Client
>>> from obspy import UTCDateTime
>>> from eqcorrscan.utils.plotting import interev_mag
>>> client = Client('IRIS')
>>> t1 = UTCDateTime('2012-03-26T00:00:00')
>>> t2 = t1 + (3 * 86400)
>>> catalog = client.get_events(starttime=t1, endtime=t2, minmagnitude=3)
>>> magnitudes = [event.preferred_magnitude().mag for event in catalog]
>>> times = [event.preferred_origin().time for event in catalog]
>>> interev_mag(times, magnitudes) # doctest: +SKIP
.. plot::
from obspy.clients.fdsn import Client
from obspy import UTCDateTime
from eqcorrscan.utils.plotting import interev_mag
client = Client('IRIS')
t1 = UTCDateTime('2012-03-26T00:00:00')
t2 = t1 + (3 * 86400)
catalog = client.get_events(starttime=t1, endtime=t2, minmagnitude=3)
magnitudes = [event.preferred_magnitude().mag for event in catalog]
times = [event.preferred_origin().time for event in catalog]
interev_mag(times, magnitudes)
"""
import matplotlib.pyplot as plt
info = [(times[i], mags[i]) for i in range(len(times))]
info.sort(key=lambda tup: tup[0])
times = [x[0] for x in info]
mags = [x[1] for x in info]
# Make two subplots next to each other of time before and time after
(fig, axes) = plt.subplots(1, 2, sharey=True, figsize=size)
axes = axes.ravel()
pre_times = []
post_times = []
for i in range(len(times)):
if i > 0:
pre_times.append((times[i] - times[i - 1]) / 60) # depends on [control=['if'], data=['i']]
if i < len(times) - 1:
post_times.append((times[i + 1] - times[i]) / 60) # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['i']]
axes[0].scatter(pre_times, mags[1:])
axes[0].set_title('Pre-event times')
axes[0].set_ylabel('Magnitude')
axes[0].set_xlabel('Time (Minutes)')
plt.setp(axes[0].xaxis.get_majorticklabels(), rotation=30)
axes[1].scatter(pre_times, mags[:-1])
axes[1].set_title('Post-event times')
axes[1].set_xlabel('Time (Minutes)')
axes[0].autoscale(enable=True, tight=True)
axes[1].autoscale(enable=True, tight=True)
plt.setp(axes[1].xaxis.get_majorticklabels(), rotation=30)
fig = _finalise_figure(fig=fig, **kwargs) # pragma: no cover
return fig |
def run_direct(self, **kwargs):
"""
Run the motor at the duty cycle specified by `duty_cycle_sp`.
Unlike other run commands, changing `duty_cycle_sp` while running *will*
take effect immediately.
"""
for key in kwargs:
setattr(self, key, kwargs[key])
self.command = self.COMMAND_RUN_DIRECT | def function[run_direct, parameter[self]]:
constant[
Run the motor at the duty cycle specified by `duty_cycle_sp`.
Unlike other run commands, changing `duty_cycle_sp` while running *will*
take effect immediately.
]
for taget[name[key]] in starred[name[kwargs]] begin[:]
call[name[setattr], parameter[name[self], name[key], call[name[kwargs]][name[key]]]]
name[self].command assign[=] name[self].COMMAND_RUN_DIRECT | keyword[def] identifier[run_direct] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[key] keyword[in] identifier[kwargs] :
identifier[setattr] ( identifier[self] , identifier[key] , identifier[kwargs] [ identifier[key] ])
identifier[self] . identifier[command] = identifier[self] . identifier[COMMAND_RUN_DIRECT] | def run_direct(self, **kwargs):
"""
Run the motor at the duty cycle specified by `duty_cycle_sp`.
Unlike other run commands, changing `duty_cycle_sp` while running *will*
take effect immediately.
"""
for key in kwargs:
setattr(self, key, kwargs[key]) # depends on [control=['for'], data=['key']]
self.command = self.COMMAND_RUN_DIRECT |
def register(self, field, shape, dtype):
'''Register a field as a tensor with specified shape and type.
A `Tensor` of the given shape and type will be registered in this
object's `fields` dict.
Parameters
----------
field : str
The name of the field
shape : iterable of `int` or `None`
The shape of the output variable.
This does not include a dimension for multiple outputs.
`None` may be used to indicate variable-length outputs
dtype : type
The data type of the field
Raises
------
ParameterError
If dtype or shape are improperly specified
'''
if not isinstance(dtype, type):
raise ParameterError('dtype={} must be a type'.format(dtype))
if not (isinstance(shape, Iterable) and
all([s is None or isinstance(s, int) for s in shape])):
raise ParameterError('shape={} must be an iterable of integers'.format(shape))
self.fields[self.scope(field)] = Tensor(tuple(shape), dtype) | def function[register, parameter[self, field, shape, dtype]]:
constant[Register a field as a tensor with specified shape and type.
A `Tensor` of the given shape and type will be registered in this
object's `fields` dict.
Parameters
----------
field : str
The name of the field
shape : iterable of `int` or `None`
The shape of the output variable.
This does not include a dimension for multiple outputs.
`None` may be used to indicate variable-length outputs
dtype : type
The data type of the field
Raises
------
ParameterError
If dtype or shape are improperly specified
]
if <ast.UnaryOp object at 0x7da1b10af130> begin[:]
<ast.Raise object at 0x7da1b10ae5f0>
if <ast.UnaryOp object at 0x7da1b10adc00> begin[:]
<ast.Raise object at 0x7da1b10ae890>
call[name[self].fields][call[name[self].scope, parameter[name[field]]]] assign[=] call[name[Tensor], parameter[call[name[tuple], parameter[name[shape]]], name[dtype]]] | keyword[def] identifier[register] ( identifier[self] , identifier[field] , identifier[shape] , identifier[dtype] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[dtype] , identifier[type] ):
keyword[raise] identifier[ParameterError] ( literal[string] . identifier[format] ( identifier[dtype] ))
keyword[if] keyword[not] ( identifier[isinstance] ( identifier[shape] , identifier[Iterable] ) keyword[and]
identifier[all] ([ identifier[s] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[s] , identifier[int] ) keyword[for] identifier[s] keyword[in] identifier[shape] ])):
keyword[raise] identifier[ParameterError] ( literal[string] . identifier[format] ( identifier[shape] ))
identifier[self] . identifier[fields] [ identifier[self] . identifier[scope] ( identifier[field] )]= identifier[Tensor] ( identifier[tuple] ( identifier[shape] ), identifier[dtype] ) | def register(self, field, shape, dtype):
"""Register a field as a tensor with specified shape and type.
A `Tensor` of the given shape and type will be registered in this
object's `fields` dict.
Parameters
----------
field : str
The name of the field
shape : iterable of `int` or `None`
The shape of the output variable.
This does not include a dimension for multiple outputs.
`None` may be used to indicate variable-length outputs
dtype : type
The data type of the field
Raises
------
ParameterError
If dtype or shape are improperly specified
"""
if not isinstance(dtype, type):
raise ParameterError('dtype={} must be a type'.format(dtype)) # depends on [control=['if'], data=[]]
if not (isinstance(shape, Iterable) and all([s is None or isinstance(s, int) for s in shape])):
raise ParameterError('shape={} must be an iterable of integers'.format(shape)) # depends on [control=['if'], data=[]]
self.fields[self.scope(field)] = Tensor(tuple(shape), dtype) |
def proxies(self, url):
"""
Get the transport proxy configuration
:param url: string
:return: Proxy configuration dictionary
:rtype: Dictionary
"""
netloc = urllib.parse.urlparse(url).netloc
proxies = {}
if settings.PROXIES and settings.PROXIES.get(netloc):
proxies["http"] = settings.PROXIES[netloc]
proxies["https"] = settings.PROXIES[netloc]
elif settings.PROXY_URL:
proxies["http"] = settings.PROXY_URL
proxies["https"] = settings.PROXY_URL
return proxies | def function[proxies, parameter[self, url]]:
constant[
Get the transport proxy configuration
:param url: string
:return: Proxy configuration dictionary
:rtype: Dictionary
]
variable[netloc] assign[=] call[name[urllib].parse.urlparse, parameter[name[url]]].netloc
variable[proxies] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da2047e9960> begin[:]
call[name[proxies]][constant[http]] assign[=] call[name[settings].PROXIES][name[netloc]]
call[name[proxies]][constant[https]] assign[=] call[name[settings].PROXIES][name[netloc]]
return[name[proxies]] | keyword[def] identifier[proxies] ( identifier[self] , identifier[url] ):
literal[string]
identifier[netloc] = identifier[urllib] . identifier[parse] . identifier[urlparse] ( identifier[url] ). identifier[netloc]
identifier[proxies] ={}
keyword[if] identifier[settings] . identifier[PROXIES] keyword[and] identifier[settings] . identifier[PROXIES] . identifier[get] ( identifier[netloc] ):
identifier[proxies] [ literal[string] ]= identifier[settings] . identifier[PROXIES] [ identifier[netloc] ]
identifier[proxies] [ literal[string] ]= identifier[settings] . identifier[PROXIES] [ identifier[netloc] ]
keyword[elif] identifier[settings] . identifier[PROXY_URL] :
identifier[proxies] [ literal[string] ]= identifier[settings] . identifier[PROXY_URL]
identifier[proxies] [ literal[string] ]= identifier[settings] . identifier[PROXY_URL]
keyword[return] identifier[proxies] | def proxies(self, url):
"""
Get the transport proxy configuration
:param url: string
:return: Proxy configuration dictionary
:rtype: Dictionary
"""
netloc = urllib.parse.urlparse(url).netloc
proxies = {}
if settings.PROXIES and settings.PROXIES.get(netloc):
proxies['http'] = settings.PROXIES[netloc]
proxies['https'] = settings.PROXIES[netloc] # depends on [control=['if'], data=[]]
elif settings.PROXY_URL:
proxies['http'] = settings.PROXY_URL
proxies['https'] = settings.PROXY_URL # depends on [control=['if'], data=[]]
return proxies |
def get_user_info(self):
"""
Returns a UserInfo object for the logged in user.
Returns:
UserInfo: object representing the student current grades
"""
# the request is done in behalf of the current logged in user
resp = self.requester.get(
urljoin(
self.base_url,
'/api/mobile/v0.5/my_user_info'
)
)
resp.raise_for_status()
return Info(resp.json()) | def function[get_user_info, parameter[self]]:
constant[
Returns a UserInfo object for the logged in user.
Returns:
UserInfo: object representing the student current grades
]
variable[resp] assign[=] call[name[self].requester.get, parameter[call[name[urljoin], parameter[name[self].base_url, constant[/api/mobile/v0.5/my_user_info]]]]]
call[name[resp].raise_for_status, parameter[]]
return[call[name[Info], parameter[call[name[resp].json, parameter[]]]]] | keyword[def] identifier[get_user_info] ( identifier[self] ):
literal[string]
identifier[resp] = identifier[self] . identifier[requester] . identifier[get] (
identifier[urljoin] (
identifier[self] . identifier[base_url] ,
literal[string]
)
)
identifier[resp] . identifier[raise_for_status] ()
keyword[return] identifier[Info] ( identifier[resp] . identifier[json] ()) | def get_user_info(self):
"""
Returns a UserInfo object for the logged in user.
Returns:
UserInfo: object representing the student current grades
"""
# the request is done in behalf of the current logged in user
resp = self.requester.get(urljoin(self.base_url, '/api/mobile/v0.5/my_user_info'))
resp.raise_for_status()
return Info(resp.json()) |
def populationStability(vectors, numSamples=None):
"""
Returns the stability for the population averaged over multiple time steps
Parameters:
-----------------------------------------------
vectors: the vectors for which the stability is calculated
numSamples the number of time steps where stability is counted
At each time step, count the fraction of the active elements which are stable
from the previous step
Average all the fraction
"""
# ----------------------------------------------------------------------
# Calculate the stability
numVectors = len(vectors)
if numSamples is None:
numSamples = numVectors-1
countOn = range(numVectors-1)
else:
countOn = numpy.random.randint(0, numVectors-1, numSamples)
sigmap = 0.0
for i in countOn:
match = checkMatch(vectors[i], vectors[i+1], sparse=False)
# Ignore reset vectors (all 0's)
if match[1] != 0:
sigmap += float(match[0])/match[1]
return sigmap / numSamples | def function[populationStability, parameter[vectors, numSamples]]:
constant[
Returns the stability for the population averaged over multiple time steps
Parameters:
-----------------------------------------------
vectors: the vectors for which the stability is calculated
numSamples the number of time steps where stability is counted
At each time step, count the fraction of the active elements which are stable
from the previous step
Average all the fraction
]
variable[numVectors] assign[=] call[name[len], parameter[name[vectors]]]
if compare[name[numSamples] is constant[None]] begin[:]
variable[numSamples] assign[=] binary_operation[name[numVectors] - constant[1]]
variable[countOn] assign[=] call[name[range], parameter[binary_operation[name[numVectors] - constant[1]]]]
variable[sigmap] assign[=] constant[0.0]
for taget[name[i]] in starred[name[countOn]] begin[:]
variable[match] assign[=] call[name[checkMatch], parameter[call[name[vectors]][name[i]], call[name[vectors]][binary_operation[name[i] + constant[1]]]]]
if compare[call[name[match]][constant[1]] not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da20c6aa830>
return[binary_operation[name[sigmap] / name[numSamples]]] | keyword[def] identifier[populationStability] ( identifier[vectors] , identifier[numSamples] = keyword[None] ):
literal[string]
identifier[numVectors] = identifier[len] ( identifier[vectors] )
keyword[if] identifier[numSamples] keyword[is] keyword[None] :
identifier[numSamples] = identifier[numVectors] - literal[int]
identifier[countOn] = identifier[range] ( identifier[numVectors] - literal[int] )
keyword[else] :
identifier[countOn] = identifier[numpy] . identifier[random] . identifier[randint] ( literal[int] , identifier[numVectors] - literal[int] , identifier[numSamples] )
identifier[sigmap] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[countOn] :
identifier[match] = identifier[checkMatch] ( identifier[vectors] [ identifier[i] ], identifier[vectors] [ identifier[i] + literal[int] ], identifier[sparse] = keyword[False] )
keyword[if] identifier[match] [ literal[int] ]!= literal[int] :
identifier[sigmap] += identifier[float] ( identifier[match] [ literal[int] ])/ identifier[match] [ literal[int] ]
keyword[return] identifier[sigmap] / identifier[numSamples] | def populationStability(vectors, numSamples=None):
"""
Returns the stability for the population averaged over multiple time steps
Parameters:
-----------------------------------------------
vectors: the vectors for which the stability is calculated
numSamples the number of time steps where stability is counted
At each time step, count the fraction of the active elements which are stable
from the previous step
Average all the fraction
"""
# ----------------------------------------------------------------------
# Calculate the stability
numVectors = len(vectors)
if numSamples is None:
numSamples = numVectors - 1
countOn = range(numVectors - 1) # depends on [control=['if'], data=['numSamples']]
else:
countOn = numpy.random.randint(0, numVectors - 1, numSamples)
sigmap = 0.0
for i in countOn:
match = checkMatch(vectors[i], vectors[i + 1], sparse=False)
# Ignore reset vectors (all 0's)
if match[1] != 0:
sigmap += float(match[0]) / match[1] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return sigmap / numSamples |
def set_value(ctx, key, value):
"""Assigns values to config file entries. If the value is omitted,
you will be prompted, with the input hidden if it is sensitive.
\b
$ ddev config set github.user foo
New setting:
[github]
user = "foo"
"""
scrubbing = False
if value is None:
scrubbing = key in SECRET_KEYS
value = click.prompt('Value for `{}`'.format(key), hide_input=scrubbing)
if key in ('core', 'extras', 'agent') and not value.startswith('~'):
value = os.path.abspath(value)
user_config = new_config = ctx.obj
user_config.pop('repo_choice', None)
data = [value]
data.extend(reversed(key.split('.')))
key = data.pop()
value = data.pop()
# Use a separate mapping to show only what has changed in the end
branch_config_root = branch_config = {}
# Consider dots as keys
while data:
default_branch = {value: ''}
branch_config[key] = default_branch
branch_config = branch_config[key]
new_value = new_config.get(key)
if not hasattr(new_value, 'get'):
new_value = default_branch
new_config[key] = new_value
new_config = new_config[key]
key = value
value = data.pop()
value = string_to_toml_type(value)
branch_config[key] = new_config[key] = value
save_config(user_config)
output_config = scrub_secrets(branch_config_root) if scrubbing else branch_config_root
echo_success('New setting:')
echo_info(toml.dumps(output_config).rstrip()) | def function[set_value, parameter[ctx, key, value]]:
constant[Assigns values to config file entries. If the value is omitted,
you will be prompted, with the input hidden if it is sensitive.
$ ddev config set github.user foo
New setting:
[github]
user = "foo"
]
variable[scrubbing] assign[=] constant[False]
if compare[name[value] is constant[None]] begin[:]
variable[scrubbing] assign[=] compare[name[key] in name[SECRET_KEYS]]
variable[value] assign[=] call[name[click].prompt, parameter[call[constant[Value for `{}`].format, parameter[name[key]]]]]
if <ast.BoolOp object at 0x7da18dc9b9d0> begin[:]
variable[value] assign[=] call[name[os].path.abspath, parameter[name[value]]]
variable[user_config] assign[=] name[ctx].obj
call[name[user_config].pop, parameter[constant[repo_choice], constant[None]]]
variable[data] assign[=] list[[<ast.Name object at 0x7da18dc9a260>]]
call[name[data].extend, parameter[call[name[reversed], parameter[call[name[key].split, parameter[constant[.]]]]]]]
variable[key] assign[=] call[name[data].pop, parameter[]]
variable[value] assign[=] call[name[data].pop, parameter[]]
variable[branch_config_root] assign[=] dictionary[[], []]
while name[data] begin[:]
variable[default_branch] assign[=] dictionary[[<ast.Name object at 0x7da18dc9a6b0>], [<ast.Constant object at 0x7da18dc98220>]]
call[name[branch_config]][name[key]] assign[=] name[default_branch]
variable[branch_config] assign[=] call[name[branch_config]][name[key]]
variable[new_value] assign[=] call[name[new_config].get, parameter[name[key]]]
if <ast.UnaryOp object at 0x7da18dc9b670> begin[:]
variable[new_value] assign[=] name[default_branch]
call[name[new_config]][name[key]] assign[=] name[new_value]
variable[new_config] assign[=] call[name[new_config]][name[key]]
variable[key] assign[=] name[value]
variable[value] assign[=] call[name[data].pop, parameter[]]
variable[value] assign[=] call[name[string_to_toml_type], parameter[name[value]]]
call[name[branch_config]][name[key]] assign[=] name[value]
call[name[save_config], parameter[name[user_config]]]
variable[output_config] assign[=] <ast.IfExp object at 0x7da18dc9ac50>
call[name[echo_success], parameter[constant[New setting:]]]
call[name[echo_info], parameter[call[call[name[toml].dumps, parameter[name[output_config]]].rstrip, parameter[]]]] | keyword[def] identifier[set_value] ( identifier[ctx] , identifier[key] , identifier[value] ):
literal[string]
identifier[scrubbing] = keyword[False]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[scrubbing] = identifier[key] keyword[in] identifier[SECRET_KEYS]
identifier[value] = identifier[click] . identifier[prompt] ( literal[string] . identifier[format] ( identifier[key] ), identifier[hide_input] = identifier[scrubbing] )
keyword[if] identifier[key] keyword[in] ( literal[string] , literal[string] , literal[string] ) keyword[and] keyword[not] identifier[value] . identifier[startswith] ( literal[string] ):
identifier[value] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[value] )
identifier[user_config] = identifier[new_config] = identifier[ctx] . identifier[obj]
identifier[user_config] . identifier[pop] ( literal[string] , keyword[None] )
identifier[data] =[ identifier[value] ]
identifier[data] . identifier[extend] ( identifier[reversed] ( identifier[key] . identifier[split] ( literal[string] )))
identifier[key] = identifier[data] . identifier[pop] ()
identifier[value] = identifier[data] . identifier[pop] ()
identifier[branch_config_root] = identifier[branch_config] ={}
keyword[while] identifier[data] :
identifier[default_branch] ={ identifier[value] : literal[string] }
identifier[branch_config] [ identifier[key] ]= identifier[default_branch]
identifier[branch_config] = identifier[branch_config] [ identifier[key] ]
identifier[new_value] = identifier[new_config] . identifier[get] ( identifier[key] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[new_value] , literal[string] ):
identifier[new_value] = identifier[default_branch]
identifier[new_config] [ identifier[key] ]= identifier[new_value]
identifier[new_config] = identifier[new_config] [ identifier[key] ]
identifier[key] = identifier[value]
identifier[value] = identifier[data] . identifier[pop] ()
identifier[value] = identifier[string_to_toml_type] ( identifier[value] )
identifier[branch_config] [ identifier[key] ]= identifier[new_config] [ identifier[key] ]= identifier[value]
identifier[save_config] ( identifier[user_config] )
identifier[output_config] = identifier[scrub_secrets] ( identifier[branch_config_root] ) keyword[if] identifier[scrubbing] keyword[else] identifier[branch_config_root]
identifier[echo_success] ( literal[string] )
identifier[echo_info] ( identifier[toml] . identifier[dumps] ( identifier[output_config] ). identifier[rstrip] ()) | def set_value(ctx, key, value):
"""Assigns values to config file entries. If the value is omitted,
you will be prompted, with the input hidden if it is sensitive.
\x08
$ ddev config set github.user foo
New setting:
[github]
user = "foo"
"""
scrubbing = False
if value is None:
scrubbing = key in SECRET_KEYS
value = click.prompt('Value for `{}`'.format(key), hide_input=scrubbing) # depends on [control=['if'], data=['value']]
if key in ('core', 'extras', 'agent') and (not value.startswith('~')):
value = os.path.abspath(value) # depends on [control=['if'], data=[]]
user_config = new_config = ctx.obj
user_config.pop('repo_choice', None)
data = [value]
data.extend(reversed(key.split('.')))
key = data.pop()
value = data.pop()
# Use a separate mapping to show only what has changed in the end
branch_config_root = branch_config = {}
# Consider dots as keys
while data:
default_branch = {value: ''}
branch_config[key] = default_branch
branch_config = branch_config[key]
new_value = new_config.get(key)
if not hasattr(new_value, 'get'):
new_value = default_branch # depends on [control=['if'], data=[]]
new_config[key] = new_value
new_config = new_config[key]
key = value
value = data.pop() # depends on [control=['while'], data=[]]
value = string_to_toml_type(value)
branch_config[key] = new_config[key] = value
save_config(user_config)
output_config = scrub_secrets(branch_config_root) if scrubbing else branch_config_root
echo_success('New setting:')
echo_info(toml.dumps(output_config).rstrip()) |
def add_table(self, id_, name, rows, cols, x, y, cx, cy):
"""
Append a ``<p:graphicFrame>`` shape containing a table as specified
in call.
"""
graphicFrame = CT_GraphicalObjectFrame.new_table_graphicFrame(
id_, name, rows, cols, x, y, cx, cy
)
self.insert_element_before(graphicFrame, 'p:extLst')
return graphicFrame | def function[add_table, parameter[self, id_, name, rows, cols, x, y, cx, cy]]:
constant[
Append a ``<p:graphicFrame>`` shape containing a table as specified
in call.
]
variable[graphicFrame] assign[=] call[name[CT_GraphicalObjectFrame].new_table_graphicFrame, parameter[name[id_], name[name], name[rows], name[cols], name[x], name[y], name[cx], name[cy]]]
call[name[self].insert_element_before, parameter[name[graphicFrame], constant[p:extLst]]]
return[name[graphicFrame]] | keyword[def] identifier[add_table] ( identifier[self] , identifier[id_] , identifier[name] , identifier[rows] , identifier[cols] , identifier[x] , identifier[y] , identifier[cx] , identifier[cy] ):
literal[string]
identifier[graphicFrame] = identifier[CT_GraphicalObjectFrame] . identifier[new_table_graphicFrame] (
identifier[id_] , identifier[name] , identifier[rows] , identifier[cols] , identifier[x] , identifier[y] , identifier[cx] , identifier[cy]
)
identifier[self] . identifier[insert_element_before] ( identifier[graphicFrame] , literal[string] )
keyword[return] identifier[graphicFrame] | def add_table(self, id_, name, rows, cols, x, y, cx, cy):
"""
Append a ``<p:graphicFrame>`` shape containing a table as specified
in call.
"""
graphicFrame = CT_GraphicalObjectFrame.new_table_graphicFrame(id_, name, rows, cols, x, y, cx, cy)
self.insert_element_before(graphicFrame, 'p:extLst')
return graphicFrame |
def get_node(self, node_id, project_id=None):
"""
Returns a Node instance.
:param node_id: Node identifier
:param project_id: Project identifier
:returns: Node instance
"""
if project_id:
# check the project_id exists
project = ProjectManager.instance().get_project(project_id)
try:
UUID(node_id, version=4)
except ValueError:
raise aiohttp.web.HTTPBadRequest(text="Node ID {} is not a valid UUID".format(node_id))
if node_id not in self._nodes:
raise aiohttp.web.HTTPNotFound(text="Node ID {} doesn't exist".format(node_id))
node = self._nodes[node_id]
if project_id:
if node.project.id != project.id:
raise aiohttp.web.HTTPNotFound(text="Project ID {} doesn't belong to node {}".format(project_id, node.name))
return node | def function[get_node, parameter[self, node_id, project_id]]:
constant[
Returns a Node instance.
:param node_id: Node identifier
:param project_id: Project identifier
:returns: Node instance
]
if name[project_id] begin[:]
variable[project] assign[=] call[call[name[ProjectManager].instance, parameter[]].get_project, parameter[name[project_id]]]
<ast.Try object at 0x7da204621540>
if compare[name[node_id] <ast.NotIn object at 0x7da2590d7190> name[self]._nodes] begin[:]
<ast.Raise object at 0x7da2054a6590>
variable[node] assign[=] call[name[self]._nodes][name[node_id]]
if name[project_id] begin[:]
if compare[name[node].project.id not_equal[!=] name[project].id] begin[:]
<ast.Raise object at 0x7da2054a5cc0>
return[name[node]] | keyword[def] identifier[get_node] ( identifier[self] , identifier[node_id] , identifier[project_id] = keyword[None] ):
literal[string]
keyword[if] identifier[project_id] :
identifier[project] = identifier[ProjectManager] . identifier[instance] (). identifier[get_project] ( identifier[project_id] )
keyword[try] :
identifier[UUID] ( identifier[node_id] , identifier[version] = literal[int] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[aiohttp] . identifier[web] . identifier[HTTPBadRequest] ( identifier[text] = literal[string] . identifier[format] ( identifier[node_id] ))
keyword[if] identifier[node_id] keyword[not] keyword[in] identifier[self] . identifier[_nodes] :
keyword[raise] identifier[aiohttp] . identifier[web] . identifier[HTTPNotFound] ( identifier[text] = literal[string] . identifier[format] ( identifier[node_id] ))
identifier[node] = identifier[self] . identifier[_nodes] [ identifier[node_id] ]
keyword[if] identifier[project_id] :
keyword[if] identifier[node] . identifier[project] . identifier[id] != identifier[project] . identifier[id] :
keyword[raise] identifier[aiohttp] . identifier[web] . identifier[HTTPNotFound] ( identifier[text] = literal[string] . identifier[format] ( identifier[project_id] , identifier[node] . identifier[name] ))
keyword[return] identifier[node] | def get_node(self, node_id, project_id=None):
"""
Returns a Node instance.
:param node_id: Node identifier
:param project_id: Project identifier
:returns: Node instance
"""
if project_id:
# check the project_id exists
project = ProjectManager.instance().get_project(project_id) # depends on [control=['if'], data=[]]
try:
UUID(node_id, version=4) # depends on [control=['try'], data=[]]
except ValueError:
raise aiohttp.web.HTTPBadRequest(text='Node ID {} is not a valid UUID'.format(node_id)) # depends on [control=['except'], data=[]]
if node_id not in self._nodes:
raise aiohttp.web.HTTPNotFound(text="Node ID {} doesn't exist".format(node_id)) # depends on [control=['if'], data=['node_id']]
node = self._nodes[node_id]
if project_id:
if node.project.id != project.id:
raise aiohttp.web.HTTPNotFound(text="Project ID {} doesn't belong to node {}".format(project_id, node.name)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return node |
def update_mute(self, data):
"""Update mute."""
self._group['muted'] = data['mute']
self.callback()
_LOGGER.info('updated mute on %s', self.friendly_name) | def function[update_mute, parameter[self, data]]:
constant[Update mute.]
call[name[self]._group][constant[muted]] assign[=] call[name[data]][constant[mute]]
call[name[self].callback, parameter[]]
call[name[_LOGGER].info, parameter[constant[updated mute on %s], name[self].friendly_name]] | keyword[def] identifier[update_mute] ( identifier[self] , identifier[data] ):
literal[string]
identifier[self] . identifier[_group] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[self] . identifier[callback] ()
identifier[_LOGGER] . identifier[info] ( literal[string] , identifier[self] . identifier[friendly_name] ) | def update_mute(self, data):
"""Update mute."""
self._group['muted'] = data['mute']
self.callback()
_LOGGER.info('updated mute on %s', self.friendly_name) |
def _encode_datetime(obj):
"""Encode a msgpck'ed datetime."""
if isinstance(obj, datetime.datetime):
obj = {'__datetime__': True, 'as_str': obj.strftime("%Y%m%dT%H:%M:%S.%f").encode()}
return obj | def function[_encode_datetime, parameter[obj]]:
constant[Encode a msgpck'ed datetime.]
if call[name[isinstance], parameter[name[obj], name[datetime].datetime]] begin[:]
variable[obj] assign[=] dictionary[[<ast.Constant object at 0x7da207f03130>, <ast.Constant object at 0x7da207f03af0>], [<ast.Constant object at 0x7da207f00700>, <ast.Call object at 0x7da207f01240>]]
return[name[obj]] | keyword[def] identifier[_encode_datetime] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[datetime] . identifier[datetime] ):
identifier[obj] ={ literal[string] : keyword[True] , literal[string] : identifier[obj] . identifier[strftime] ( literal[string] ). identifier[encode] ()}
keyword[return] identifier[obj] | def _encode_datetime(obj):
"""Encode a msgpck'ed datetime."""
if isinstance(obj, datetime.datetime):
obj = {'__datetime__': True, 'as_str': obj.strftime('%Y%m%dT%H:%M:%S.%f').encode()} # depends on [control=['if'], data=[]]
return obj |
def geist_replay(wrapped, instance, args, kwargs):
"""Wraps a test of other function and injects a Geist GUI which will
enable replay (set environment variable GEIST_REPLAY_MODE to 'record' to
active record mode."""
path_parts = []
file_parts = []
if hasattr(wrapped, '__module__'):
module = wrapped.__module__
module_file = sys.modules[module].__file__
root, _file = os.path.split(module_file)
path_parts.append(root)
_file, _ = os.path.splitext(_file)
file_parts.append(_file)
if hasattr(wrapped, '__objclass__'):
file_parts.append(wrapped.__objclass__.__name__)
elif hasattr(wrapped, '__self__'):
file_parts.append(wrapped.__self__.__class__.__name__)
file_parts.append(wrapped.__name__ + '.log')
path_parts.append('_'.join(file_parts))
filename = os.path.join(*path_parts)
if is_in_record_mode():
platform_backend = get_platform_backend()
backend = RecordingBackend(
source_backend=platform_backend,
recording_filename=filename
)
else:
backend = PlaybackBackend(
recording_filename=filename
)
gui = GUI(backend)
return wrapped(gui, *args, **kwargs) | def function[geist_replay, parameter[wrapped, instance, args, kwargs]]:
constant[Wraps a test of other function and injects a Geist GUI which will
enable replay (set environment variable GEIST_REPLAY_MODE to 'record' to
active record mode.]
variable[path_parts] assign[=] list[[]]
variable[file_parts] assign[=] list[[]]
if call[name[hasattr], parameter[name[wrapped], constant[__module__]]] begin[:]
variable[module] assign[=] name[wrapped].__module__
variable[module_file] assign[=] call[name[sys].modules][name[module]].__file__
<ast.Tuple object at 0x7da1b28fcee0> assign[=] call[name[os].path.split, parameter[name[module_file]]]
call[name[path_parts].append, parameter[name[root]]]
<ast.Tuple object at 0x7da1b28fc550> assign[=] call[name[os].path.splitext, parameter[name[_file]]]
call[name[file_parts].append, parameter[name[_file]]]
if call[name[hasattr], parameter[name[wrapped], constant[__objclass__]]] begin[:]
call[name[file_parts].append, parameter[name[wrapped].__objclass__.__name__]]
call[name[file_parts].append, parameter[binary_operation[name[wrapped].__name__ + constant[.log]]]]
call[name[path_parts].append, parameter[call[constant[_].join, parameter[name[file_parts]]]]]
variable[filename] assign[=] call[name[os].path.join, parameter[<ast.Starred object at 0x7da1b28fe2c0>]]
if call[name[is_in_record_mode], parameter[]] begin[:]
variable[platform_backend] assign[=] call[name[get_platform_backend], parameter[]]
variable[backend] assign[=] call[name[RecordingBackend], parameter[]]
variable[gui] assign[=] call[name[GUI], parameter[name[backend]]]
return[call[name[wrapped], parameter[name[gui], <ast.Starred object at 0x7da1b27768f0>]]] | keyword[def] identifier[geist_replay] ( identifier[wrapped] , identifier[instance] , identifier[args] , identifier[kwargs] ):
literal[string]
identifier[path_parts] =[]
identifier[file_parts] =[]
keyword[if] identifier[hasattr] ( identifier[wrapped] , literal[string] ):
identifier[module] = identifier[wrapped] . identifier[__module__]
identifier[module_file] = identifier[sys] . identifier[modules] [ identifier[module] ]. identifier[__file__]
identifier[root] , identifier[_file] = identifier[os] . identifier[path] . identifier[split] ( identifier[module_file] )
identifier[path_parts] . identifier[append] ( identifier[root] )
identifier[_file] , identifier[_] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[_file] )
identifier[file_parts] . identifier[append] ( identifier[_file] )
keyword[if] identifier[hasattr] ( identifier[wrapped] , literal[string] ):
identifier[file_parts] . identifier[append] ( identifier[wrapped] . identifier[__objclass__] . identifier[__name__] )
keyword[elif] identifier[hasattr] ( identifier[wrapped] , literal[string] ):
identifier[file_parts] . identifier[append] ( identifier[wrapped] . identifier[__self__] . identifier[__class__] . identifier[__name__] )
identifier[file_parts] . identifier[append] ( identifier[wrapped] . identifier[__name__] + literal[string] )
identifier[path_parts] . identifier[append] ( literal[string] . identifier[join] ( identifier[file_parts] ))
identifier[filename] = identifier[os] . identifier[path] . identifier[join] (* identifier[path_parts] )
keyword[if] identifier[is_in_record_mode] ():
identifier[platform_backend] = identifier[get_platform_backend] ()
identifier[backend] = identifier[RecordingBackend] (
identifier[source_backend] = identifier[platform_backend] ,
identifier[recording_filename] = identifier[filename]
)
keyword[else] :
identifier[backend] = identifier[PlaybackBackend] (
identifier[recording_filename] = identifier[filename]
)
identifier[gui] = identifier[GUI] ( identifier[backend] )
keyword[return] identifier[wrapped] ( identifier[gui] ,* identifier[args] ,** identifier[kwargs] ) | def geist_replay(wrapped, instance, args, kwargs):
"""Wraps a test of other function and injects a Geist GUI which will
enable replay (set environment variable GEIST_REPLAY_MODE to 'record' to
active record mode."""
path_parts = []
file_parts = []
if hasattr(wrapped, '__module__'):
module = wrapped.__module__
module_file = sys.modules[module].__file__
(root, _file) = os.path.split(module_file)
path_parts.append(root)
(_file, _) = os.path.splitext(_file)
file_parts.append(_file) # depends on [control=['if'], data=[]]
if hasattr(wrapped, '__objclass__'):
file_parts.append(wrapped.__objclass__.__name__) # depends on [control=['if'], data=[]]
elif hasattr(wrapped, '__self__'):
file_parts.append(wrapped.__self__.__class__.__name__) # depends on [control=['if'], data=[]]
file_parts.append(wrapped.__name__ + '.log')
path_parts.append('_'.join(file_parts))
filename = os.path.join(*path_parts)
if is_in_record_mode():
platform_backend = get_platform_backend()
backend = RecordingBackend(source_backend=platform_backend, recording_filename=filename) # depends on [control=['if'], data=[]]
else:
backend = PlaybackBackend(recording_filename=filename)
gui = GUI(backend)
return wrapped(gui, *args, **kwargs) |
def legacy_conv_act_layer(from_layer, name, num_filter, kernel=(1,1), pad=(0,0), \
stride=(1,1), act_type="relu", use_batchnorm=False):
"""
wrapper for a small Convolution group
Parameters:
----------
from_layer : mx.symbol
continue on which layer
name : str
base name of the new layers
num_filter : int
how many filters to use in Convolution layer
kernel : tuple (int, int)
kernel size (h, w)
pad : tuple (int, int)
padding size (h, w)
stride : tuple (int, int)
stride size (h, w)
act_type : str
activation type, can be relu...
use_batchnorm : bool
whether to use batch normalization
Returns:
----------
(conv, relu) mx.Symbols
"""
assert not use_batchnorm, "batchnorm not yet supported"
bias = mx.symbol.Variable(name="conv{}_bias".format(name),
init=mx.init.Constant(0.0), attr={'__lr_mult__': '2.0'})
conv = mx.symbol.Convolution(data=from_layer, bias=bias, kernel=kernel, pad=pad, \
stride=stride, num_filter=num_filter, name="conv{}".format(name))
relu = mx.symbol.Activation(data=conv, act_type=act_type, \
name="{}{}".format(act_type, name))
if use_batchnorm:
relu = mx.symbol.BatchNorm(data=relu, name="bn{}".format(name))
return conv, relu | def function[legacy_conv_act_layer, parameter[from_layer, name, num_filter, kernel, pad, stride, act_type, use_batchnorm]]:
constant[
wrapper for a small Convolution group
Parameters:
----------
from_layer : mx.symbol
continue on which layer
name : str
base name of the new layers
num_filter : int
how many filters to use in Convolution layer
kernel : tuple (int, int)
kernel size (h, w)
pad : tuple (int, int)
padding size (h, w)
stride : tuple (int, int)
stride size (h, w)
act_type : str
activation type, can be relu...
use_batchnorm : bool
whether to use batch normalization
Returns:
----------
(conv, relu) mx.Symbols
]
assert[<ast.UnaryOp object at 0x7da1b1ef2890>]
variable[bias] assign[=] call[name[mx].symbol.Variable, parameter[]]
variable[conv] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu] assign[=] call[name[mx].symbol.Activation, parameter[]]
if name[use_batchnorm] begin[:]
variable[relu] assign[=] call[name[mx].symbol.BatchNorm, parameter[]]
return[tuple[[<ast.Name object at 0x7da1b1e16bc0>, <ast.Name object at 0x7da1b1e157e0>]]] | keyword[def] identifier[legacy_conv_act_layer] ( identifier[from_layer] , identifier[name] , identifier[num_filter] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[stride] =( literal[int] , literal[int] ), identifier[act_type] = literal[string] , identifier[use_batchnorm] = keyword[False] ):
literal[string]
keyword[assert] keyword[not] identifier[use_batchnorm] , literal[string]
identifier[bias] = identifier[mx] . identifier[symbol] . identifier[Variable] ( identifier[name] = literal[string] . identifier[format] ( identifier[name] ),
identifier[init] = identifier[mx] . identifier[init] . identifier[Constant] ( literal[int] ), identifier[attr] ={ literal[string] : literal[string] })
identifier[conv] = identifier[mx] . identifier[symbol] . identifier[Convolution] ( identifier[data] = identifier[from_layer] , identifier[bias] = identifier[bias] , identifier[kernel] = identifier[kernel] , identifier[pad] = identifier[pad] , identifier[stride] = identifier[stride] , identifier[num_filter] = identifier[num_filter] , identifier[name] = literal[string] . identifier[format] ( identifier[name] ))
identifier[relu] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv] , identifier[act_type] = identifier[act_type] , identifier[name] = literal[string] . identifier[format] ( identifier[act_type] , identifier[name] ))
keyword[if] identifier[use_batchnorm] :
identifier[relu] = identifier[mx] . identifier[symbol] . identifier[BatchNorm] ( identifier[data] = identifier[relu] , identifier[name] = literal[string] . identifier[format] ( identifier[name] ))
keyword[return] identifier[conv] , identifier[relu] | def legacy_conv_act_layer(from_layer, name, num_filter, kernel=(1, 1), pad=(0, 0), stride=(1, 1), act_type='relu', use_batchnorm=False):
"""
wrapper for a small Convolution group
Parameters:
----------
from_layer : mx.symbol
continue on which layer
name : str
base name of the new layers
num_filter : int
how many filters to use in Convolution layer
kernel : tuple (int, int)
kernel size (h, w)
pad : tuple (int, int)
padding size (h, w)
stride : tuple (int, int)
stride size (h, w)
act_type : str
activation type, can be relu...
use_batchnorm : bool
whether to use batch normalization
Returns:
----------
(conv, relu) mx.Symbols
"""
assert not use_batchnorm, 'batchnorm not yet supported'
bias = mx.symbol.Variable(name='conv{}_bias'.format(name), init=mx.init.Constant(0.0), attr={'__lr_mult__': '2.0'})
conv = mx.symbol.Convolution(data=from_layer, bias=bias, kernel=kernel, pad=pad, stride=stride, num_filter=num_filter, name='conv{}'.format(name))
relu = mx.symbol.Activation(data=conv, act_type=act_type, name='{}{}'.format(act_type, name))
if use_batchnorm:
relu = mx.symbol.BatchNorm(data=relu, name='bn{}'.format(name)) # depends on [control=['if'], data=[]]
return (conv, relu) |
def snake2ucamel(value):
"""Casts a snake_case string to an UpperCamelCase string."""
UNDER, LETTER, OTHER = object(), object(), object()
def group_key_function(char):
if char == "_":
return UNDER
if char in string.ascii_letters:
return LETTER
return OTHER
def process_group(idx, key, chars):
if key is LETTER:
return "".join([chars[0].upper()] + chars[1:])
if key is OTHER \
or len(chars) != 1 \
or idx in [0, last] \
or LETTER not in (groups[idx-1][1], groups[idx+1][1]):
return "".join(chars)
return ""
raw_groups_gen = itertools.groupby(value, key=group_key_function)
groups = [(idx, key, list(group_gen))
for idx, (key, group_gen) in enumerate(raw_groups_gen)]
last = len(groups) - 1
return "".join(itertools.starmap(process_group, groups)) | def function[snake2ucamel, parameter[value]]:
constant[Casts a snake_case string to an UpperCamelCase string.]
<ast.Tuple object at 0x7da1b25d5ab0> assign[=] tuple[[<ast.Call object at 0x7da1b25d5f00>, <ast.Call object at 0x7da1b25d5960>, <ast.Call object at 0x7da1b25d5b40>]]
def function[group_key_function, parameter[char]]:
if compare[name[char] equal[==] constant[_]] begin[:]
return[name[UNDER]]
if compare[name[char] in name[string].ascii_letters] begin[:]
return[name[LETTER]]
return[name[OTHER]]
def function[process_group, parameter[idx, key, chars]]:
if compare[name[key] is name[LETTER]] begin[:]
return[call[constant[].join, parameter[binary_operation[list[[<ast.Call object at 0x7da1b25d6c50>]] + call[name[chars]][<ast.Slice object at 0x7da1b25d58a0>]]]]]
if <ast.BoolOp object at 0x7da1b25d5a20> begin[:]
return[call[constant[].join, parameter[name[chars]]]]
return[constant[]]
variable[raw_groups_gen] assign[=] call[name[itertools].groupby, parameter[name[value]]]
variable[groups] assign[=] <ast.ListComp object at 0x7da1b25827a0>
variable[last] assign[=] binary_operation[call[name[len], parameter[name[groups]]] - constant[1]]
return[call[constant[].join, parameter[call[name[itertools].starmap, parameter[name[process_group], name[groups]]]]]] | keyword[def] identifier[snake2ucamel] ( identifier[value] ):
literal[string]
identifier[UNDER] , identifier[LETTER] , identifier[OTHER] = identifier[object] (), identifier[object] (), identifier[object] ()
keyword[def] identifier[group_key_function] ( identifier[char] ):
keyword[if] identifier[char] == literal[string] :
keyword[return] identifier[UNDER]
keyword[if] identifier[char] keyword[in] identifier[string] . identifier[ascii_letters] :
keyword[return] identifier[LETTER]
keyword[return] identifier[OTHER]
keyword[def] identifier[process_group] ( identifier[idx] , identifier[key] , identifier[chars] ):
keyword[if] identifier[key] keyword[is] identifier[LETTER] :
keyword[return] literal[string] . identifier[join] ([ identifier[chars] [ literal[int] ]. identifier[upper] ()]+ identifier[chars] [ literal[int] :])
keyword[if] identifier[key] keyword[is] identifier[OTHER] keyword[or] identifier[len] ( identifier[chars] )!= literal[int] keyword[or] identifier[idx] keyword[in] [ literal[int] , identifier[last] ] keyword[or] identifier[LETTER] keyword[not] keyword[in] ( identifier[groups] [ identifier[idx] - literal[int] ][ literal[int] ], identifier[groups] [ identifier[idx] + literal[int] ][ literal[int] ]):
keyword[return] literal[string] . identifier[join] ( identifier[chars] )
keyword[return] literal[string]
identifier[raw_groups_gen] = identifier[itertools] . identifier[groupby] ( identifier[value] , identifier[key] = identifier[group_key_function] )
identifier[groups] =[( identifier[idx] , identifier[key] , identifier[list] ( identifier[group_gen] ))
keyword[for] identifier[idx] ,( identifier[key] , identifier[group_gen] ) keyword[in] identifier[enumerate] ( identifier[raw_groups_gen] )]
identifier[last] = identifier[len] ( identifier[groups] )- literal[int]
keyword[return] literal[string] . identifier[join] ( identifier[itertools] . identifier[starmap] ( identifier[process_group] , identifier[groups] )) | def snake2ucamel(value):
"""Casts a snake_case string to an UpperCamelCase string."""
(UNDER, LETTER, OTHER) = (object(), object(), object())
def group_key_function(char):
if char == '_':
return UNDER # depends on [control=['if'], data=[]]
if char in string.ascii_letters:
return LETTER # depends on [control=['if'], data=[]]
return OTHER
def process_group(idx, key, chars):
if key is LETTER:
return ''.join([chars[0].upper()] + chars[1:]) # depends on [control=['if'], data=[]]
if key is OTHER or len(chars) != 1 or idx in [0, last] or (LETTER not in (groups[idx - 1][1], groups[idx + 1][1])):
return ''.join(chars) # depends on [control=['if'], data=[]]
return ''
raw_groups_gen = itertools.groupby(value, key=group_key_function)
groups = [(idx, key, list(group_gen)) for (idx, (key, group_gen)) in enumerate(raw_groups_gen)]
last = len(groups) - 1
return ''.join(itertools.starmap(process_group, groups)) |
def create_ikepolicy(self, name, **kwargs):
'''
Creates a new IKEPolicy
'''
body = {'name': name}
if 'phase1_negotiation_mode' in kwargs:
body['phase1_negotiation_mode'] = kwargs['phase1_negotiation_mode']
if 'auth_algorithm' in kwargs:
body['auth_algorithm'] = kwargs['auth_algorithm']
if 'encryption_algorithm' in kwargs:
body['encryption_algorithm'] = kwargs['encryption_algorithm']
if 'pfs' in kwargs:
body['pfs'] = kwargs['pfs']
if 'ike_version' in kwargs:
body['ike_version'] = kwargs['ike_version']
if 'units' in kwargs:
body['lifetime'] = {'units': kwargs['units']}
if 'value' in kwargs:
if 'lifetime' not in body:
body['lifetime'] = {}
body['lifetime']['value'] = kwargs['value']
return self.network_conn.create_ikepolicy(body={'ikepolicy': body}) | def function[create_ikepolicy, parameter[self, name]]:
constant[
Creates a new IKEPolicy
]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da18f722d70>], [<ast.Name object at 0x7da18f723b20>]]
if compare[constant[phase1_negotiation_mode] in name[kwargs]] begin[:]
call[name[body]][constant[phase1_negotiation_mode]] assign[=] call[name[kwargs]][constant[phase1_negotiation_mode]]
if compare[constant[auth_algorithm] in name[kwargs]] begin[:]
call[name[body]][constant[auth_algorithm]] assign[=] call[name[kwargs]][constant[auth_algorithm]]
if compare[constant[encryption_algorithm] in name[kwargs]] begin[:]
call[name[body]][constant[encryption_algorithm]] assign[=] call[name[kwargs]][constant[encryption_algorithm]]
if compare[constant[pfs] in name[kwargs]] begin[:]
call[name[body]][constant[pfs]] assign[=] call[name[kwargs]][constant[pfs]]
if compare[constant[ike_version] in name[kwargs]] begin[:]
call[name[body]][constant[ike_version]] assign[=] call[name[kwargs]][constant[ike_version]]
if compare[constant[units] in name[kwargs]] begin[:]
call[name[body]][constant[lifetime]] assign[=] dictionary[[<ast.Constant object at 0x7da18f7235b0>], [<ast.Subscript object at 0x7da18f721810>]]
if compare[constant[value] in name[kwargs]] begin[:]
if compare[constant[lifetime] <ast.NotIn object at 0x7da2590d7190> name[body]] begin[:]
call[name[body]][constant[lifetime]] assign[=] dictionary[[], []]
call[call[name[body]][constant[lifetime]]][constant[value]] assign[=] call[name[kwargs]][constant[value]]
return[call[name[self].network_conn.create_ikepolicy, parameter[]]] | keyword[def] identifier[create_ikepolicy] ( identifier[self] , identifier[name] ,** identifier[kwargs] ):
literal[string]
identifier[body] ={ literal[string] : identifier[name] }
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[body] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[body] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[body] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[body] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[body] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[body] [ literal[string] ]={ literal[string] : identifier[kwargs] [ literal[string] ]}
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[body] :
identifier[body] [ literal[string] ]={}
identifier[body] [ literal[string] ][ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[return] identifier[self] . identifier[network_conn] . identifier[create_ikepolicy] ( identifier[body] ={ literal[string] : identifier[body] }) | def create_ikepolicy(self, name, **kwargs):
"""
Creates a new IKEPolicy
"""
body = {'name': name}
if 'phase1_negotiation_mode' in kwargs:
body['phase1_negotiation_mode'] = kwargs['phase1_negotiation_mode'] # depends on [control=['if'], data=['kwargs']]
if 'auth_algorithm' in kwargs:
body['auth_algorithm'] = kwargs['auth_algorithm'] # depends on [control=['if'], data=['kwargs']]
if 'encryption_algorithm' in kwargs:
body['encryption_algorithm'] = kwargs['encryption_algorithm'] # depends on [control=['if'], data=['kwargs']]
if 'pfs' in kwargs:
body['pfs'] = kwargs['pfs'] # depends on [control=['if'], data=['kwargs']]
if 'ike_version' in kwargs:
body['ike_version'] = kwargs['ike_version'] # depends on [control=['if'], data=['kwargs']]
if 'units' in kwargs:
body['lifetime'] = {'units': kwargs['units']} # depends on [control=['if'], data=['kwargs']]
if 'value' in kwargs:
if 'lifetime' not in body:
body['lifetime'] = {} # depends on [control=['if'], data=['body']]
body['lifetime']['value'] = kwargs['value'] # depends on [control=['if'], data=['kwargs']]
return self.network_conn.create_ikepolicy(body={'ikepolicy': body}) |
def update(self, attributes=values.unset, assignment_status=values.unset,
reason=values.unset, priority=values.unset,
task_channel=values.unset):
"""
Update the TaskInstance
:param unicode attributes: The user-defined JSON data describing the custom attributes of this task.
:param TaskInstance.Status assignment_status: A 'pending' or 'reserved' Task may be canceled by posting AssignmentStatus='canceled'.
:param unicode reason: This is only required if the Task is canceled or completed.
:param unicode priority: Override priority for the Task.
:param unicode task_channel: The task_channel
:returns: Updated TaskInstance
:rtype: twilio.rest.taskrouter.v1.workspace.task.TaskInstance
"""
return self._proxy.update(
attributes=attributes,
assignment_status=assignment_status,
reason=reason,
priority=priority,
task_channel=task_channel,
) | def function[update, parameter[self, attributes, assignment_status, reason, priority, task_channel]]:
constant[
Update the TaskInstance
:param unicode attributes: The user-defined JSON data describing the custom attributes of this task.
:param TaskInstance.Status assignment_status: A 'pending' or 'reserved' Task may be canceled by posting AssignmentStatus='canceled'.
:param unicode reason: This is only required if the Task is canceled or completed.
:param unicode priority: Override priority for the Task.
:param unicode task_channel: The task_channel
:returns: Updated TaskInstance
:rtype: twilio.rest.taskrouter.v1.workspace.task.TaskInstance
]
return[call[name[self]._proxy.update, parameter[]]] | keyword[def] identifier[update] ( identifier[self] , identifier[attributes] = identifier[values] . identifier[unset] , identifier[assignment_status] = identifier[values] . identifier[unset] ,
identifier[reason] = identifier[values] . identifier[unset] , identifier[priority] = identifier[values] . identifier[unset] ,
identifier[task_channel] = identifier[values] . identifier[unset] ):
literal[string]
keyword[return] identifier[self] . identifier[_proxy] . identifier[update] (
identifier[attributes] = identifier[attributes] ,
identifier[assignment_status] = identifier[assignment_status] ,
identifier[reason] = identifier[reason] ,
identifier[priority] = identifier[priority] ,
identifier[task_channel] = identifier[task_channel] ,
) | def update(self, attributes=values.unset, assignment_status=values.unset, reason=values.unset, priority=values.unset, task_channel=values.unset):
"""
Update the TaskInstance
:param unicode attributes: The user-defined JSON data describing the custom attributes of this task.
:param TaskInstance.Status assignment_status: A 'pending' or 'reserved' Task may be canceled by posting AssignmentStatus='canceled'.
:param unicode reason: This is only required if the Task is canceled or completed.
:param unicode priority: Override priority for the Task.
:param unicode task_channel: The task_channel
:returns: Updated TaskInstance
:rtype: twilio.rest.taskrouter.v1.workspace.task.TaskInstance
"""
return self._proxy.update(attributes=attributes, assignment_status=assignment_status, reason=reason, priority=priority, task_channel=task_channel) |
def connected(self, **kwargs):
"""triger the server_ready event"""
self.bot.log.info('Server config: %r', self.bot.server_config)
# recompile when I'm sure of my nickname
self.bot.config['nick'] = kwargs['me']
self.bot.recompile()
# Let all plugins know that server can handle commands
self.bot.notify('server_ready')
# detach useless events
self.bot.detach_events(*self.before_connect_events) | def function[connected, parameter[self]]:
constant[triger the server_ready event]
call[name[self].bot.log.info, parameter[constant[Server config: %r], name[self].bot.server_config]]
call[name[self].bot.config][constant[nick]] assign[=] call[name[kwargs]][constant[me]]
call[name[self].bot.recompile, parameter[]]
call[name[self].bot.notify, parameter[constant[server_ready]]]
call[name[self].bot.detach_events, parameter[<ast.Starred object at 0x7da18ede7730>]] | keyword[def] identifier[connected] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[bot] . identifier[log] . identifier[info] ( literal[string] , identifier[self] . identifier[bot] . identifier[server_config] )
identifier[self] . identifier[bot] . identifier[config] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
identifier[self] . identifier[bot] . identifier[recompile] ()
identifier[self] . identifier[bot] . identifier[notify] ( literal[string] )
identifier[self] . identifier[bot] . identifier[detach_events] (* identifier[self] . identifier[before_connect_events] ) | def connected(self, **kwargs):
"""triger the server_ready event"""
self.bot.log.info('Server config: %r', self.bot.server_config)
# recompile when I'm sure of my nickname
self.bot.config['nick'] = kwargs['me']
self.bot.recompile()
# Let all plugins know that server can handle commands
self.bot.notify('server_ready')
# detach useless events
self.bot.detach_events(*self.before_connect_events) |
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response | def function[delete_request, parameter[self, alias, uri, data, json, params, headers, allow_redirects, timeout]]:
constant[ Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
]
variable[session] assign[=] call[name[self]._cache.switch, parameter[name[alias]]]
variable[data] assign[=] call[name[self]._format_data_according_to_header, parameter[name[session], name[data], name[headers]]]
variable[redir] assign[=] <ast.IfExp object at 0x7da20c6c5060>
variable[response] assign[=] call[name[self]._delete_request, parameter[name[session], name[uri], name[data], name[json], name[params], name[headers], name[redir], name[timeout]]]
if call[name[isinstance], parameter[name[data], name[bytes]]] begin[:]
variable[data] assign[=] call[name[data].decode, parameter[constant[utf-8]]]
call[name[logger].info, parameter[binary_operation[constant[Delete Request using : alias=%s, uri=%s, data=%s, headers=%s, allow_redirects=%s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6c6ef0>, <ast.Name object at 0x7da20c6c7460>, <ast.Name object at 0x7da20c6c52a0>, <ast.Name object at 0x7da20c6c48e0>, <ast.Name object at 0x7da20c6c69e0>]]]]]
return[name[response]] | keyword[def] identifier[delete_request] (
identifier[self] ,
identifier[alias] ,
identifier[uri] ,
identifier[data] = keyword[None] ,
identifier[json] = keyword[None] ,
identifier[params] = keyword[None] ,
identifier[headers] = keyword[None] ,
identifier[allow_redirects] = keyword[None] ,
identifier[timeout] = keyword[None] ):
literal[string]
identifier[session] = identifier[self] . identifier[_cache] . identifier[switch] ( identifier[alias] )
identifier[data] = identifier[self] . identifier[_format_data_according_to_header] ( identifier[session] , identifier[data] , identifier[headers] )
identifier[redir] = keyword[True] keyword[if] identifier[allow_redirects] keyword[is] keyword[None] keyword[else] identifier[allow_redirects]
identifier[response] = identifier[self] . identifier[_delete_request] (
identifier[session] , identifier[uri] , identifier[data] , identifier[json] , identifier[params] , identifier[headers] , identifier[redir] , identifier[timeout] )
keyword[if] identifier[isinstance] ( identifier[data] , identifier[bytes] ):
identifier[data] = identifier[data] . identifier[decode] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] %( identifier[alias] , identifier[uri] , identifier[data] , identifier[headers] , identifier[redir] ))
keyword[return] identifier[response] | def delete_request(self, alias, uri, data=None, json=None, params=None, headers=None, allow_redirects=None, timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8') # depends on [control=['if'], data=[]]
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response |
def _GetNumericProjectId(self):
"""Get the numeric project ID for this VM.
Returns:
string, the numeric project ID if one is found.
"""
project_id = 'project/numeric-project-id'
return self.watcher.GetMetadata(metadata_key=project_id, recursive=False) | def function[_GetNumericProjectId, parameter[self]]:
constant[Get the numeric project ID for this VM.
Returns:
string, the numeric project ID if one is found.
]
variable[project_id] assign[=] constant[project/numeric-project-id]
return[call[name[self].watcher.GetMetadata, parameter[]]] | keyword[def] identifier[_GetNumericProjectId] ( identifier[self] ):
literal[string]
identifier[project_id] = literal[string]
keyword[return] identifier[self] . identifier[watcher] . identifier[GetMetadata] ( identifier[metadata_key] = identifier[project_id] , identifier[recursive] = keyword[False] ) | def _GetNumericProjectId(self):
"""Get the numeric project ID for this VM.
Returns:
string, the numeric project ID if one is found.
"""
project_id = 'project/numeric-project-id'
return self.watcher.GetMetadata(metadata_key=project_id, recursive=False) |
def run(model_specification, results_directory, verbose, log, with_debugger):
"""Run a simulation from the command line.
The simulation itself is defined by the given MODEL_SPECIFICATION yaml file.
Within the results directory, which defaults to ~/vivarium_results if none
is provided, a subdirectory will be created with the same name as the
MODEL_SPECIFICATION if one does not exist. Results will be written to a
further subdirectory named after the start time of the simulation run."""
log_level = logging.DEBUG if verbose else logging.ERROR
logging.basicConfig(filename=log, level=log_level)
try:
run_simulation(model_specification, results_directory)
except (BdbQuit, KeyboardInterrupt):
raise
except Exception as e:
if with_debugger:
import pdb
import traceback
traceback.print_exc()
pdb.post_mortem()
else:
logging.exception("Uncaught exception {}".format(e))
raise | def function[run, parameter[model_specification, results_directory, verbose, log, with_debugger]]:
constant[Run a simulation from the command line.
The simulation itself is defined by the given MODEL_SPECIFICATION yaml file.
Within the results directory, which defaults to ~/vivarium_results if none
is provided, a subdirectory will be created with the same name as the
MODEL_SPECIFICATION if one does not exist. Results will be written to a
further subdirectory named after the start time of the simulation run.]
variable[log_level] assign[=] <ast.IfExp object at 0x7da1b04d5d50>
call[name[logging].basicConfig, parameter[]]
<ast.Try object at 0x7da1b04d7a60> | keyword[def] identifier[run] ( identifier[model_specification] , identifier[results_directory] , identifier[verbose] , identifier[log] , identifier[with_debugger] ):
literal[string]
identifier[log_level] = identifier[logging] . identifier[DEBUG] keyword[if] identifier[verbose] keyword[else] identifier[logging] . identifier[ERROR]
identifier[logging] . identifier[basicConfig] ( identifier[filename] = identifier[log] , identifier[level] = identifier[log_level] )
keyword[try] :
identifier[run_simulation] ( identifier[model_specification] , identifier[results_directory] )
keyword[except] ( identifier[BdbQuit] , identifier[KeyboardInterrupt] ):
keyword[raise]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] identifier[with_debugger] :
keyword[import] identifier[pdb]
keyword[import] identifier[traceback]
identifier[traceback] . identifier[print_exc] ()
identifier[pdb] . identifier[post_mortem] ()
keyword[else] :
identifier[logging] . identifier[exception] ( literal[string] . identifier[format] ( identifier[e] ))
keyword[raise] | def run(model_specification, results_directory, verbose, log, with_debugger):
"""Run a simulation from the command line.
The simulation itself is defined by the given MODEL_SPECIFICATION yaml file.
Within the results directory, which defaults to ~/vivarium_results if none
is provided, a subdirectory will be created with the same name as the
MODEL_SPECIFICATION if one does not exist. Results will be written to a
further subdirectory named after the start time of the simulation run."""
log_level = logging.DEBUG if verbose else logging.ERROR
logging.basicConfig(filename=log, level=log_level)
try:
run_simulation(model_specification, results_directory) # depends on [control=['try'], data=[]]
except (BdbQuit, KeyboardInterrupt):
raise # depends on [control=['except'], data=[]]
except Exception as e:
if with_debugger:
import pdb
import traceback
traceback.print_exc()
pdb.post_mortem() # depends on [control=['if'], data=[]]
else:
logging.exception('Uncaught exception {}'.format(e))
raise # depends on [control=['except'], data=['e']] |
def update_check(self, existing, new):
""" Check device state to see if we need to fire the callback.
True if either state is 'Playing'
False if both states are: 'Paused', 'Idle', or 'Off'
True on any state transition.
"""
old_state = existing.state
if 'NowPlayingItem' in existing.session_raw:
try:
old_theme = existing.session_raw['NowPlayingItem']['IsThemeMedia']
except KeyError:
old_theme = False
else:
old_theme = False
if 'NowPlayingItem' in new:
if new['PlayState']['IsPaused']:
new_state = STATE_PAUSED
else:
new_state = STATE_PLAYING
try:
new_theme = new['NowPlayingItem']['IsThemeMedia']
except KeyError:
new_theme = False
else:
new_state = STATE_IDLE
new_theme = False
if old_theme or new_theme:
return False
elif old_state == STATE_PLAYING or new_state == STATE_PLAYING:
return True
elif old_state != new_state:
return True
else:
return False | def function[update_check, parameter[self, existing, new]]:
constant[ Check device state to see if we need to fire the callback.
True if either state is 'Playing'
False if both states are: 'Paused', 'Idle', or 'Off'
True on any state transition.
]
variable[old_state] assign[=] name[existing].state
if compare[constant[NowPlayingItem] in name[existing].session_raw] begin[:]
<ast.Try object at 0x7da1b25d81f0>
if compare[constant[NowPlayingItem] in name[new]] begin[:]
if call[call[name[new]][constant[PlayState]]][constant[IsPaused]] begin[:]
variable[new_state] assign[=] name[STATE_PAUSED]
<ast.Try object at 0x7da1b25dbb80>
if <ast.BoolOp object at 0x7da1b259f760> begin[:]
return[constant[False]] | keyword[def] identifier[update_check] ( identifier[self] , identifier[existing] , identifier[new] ):
literal[string]
identifier[old_state] = identifier[existing] . identifier[state]
keyword[if] literal[string] keyword[in] identifier[existing] . identifier[session_raw] :
keyword[try] :
identifier[old_theme] = identifier[existing] . identifier[session_raw] [ literal[string] ][ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[old_theme] = keyword[False]
keyword[else] :
identifier[old_theme] = keyword[False]
keyword[if] literal[string] keyword[in] identifier[new] :
keyword[if] identifier[new] [ literal[string] ][ literal[string] ]:
identifier[new_state] = identifier[STATE_PAUSED]
keyword[else] :
identifier[new_state] = identifier[STATE_PLAYING]
keyword[try] :
identifier[new_theme] = identifier[new] [ literal[string] ][ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[new_theme] = keyword[False]
keyword[else] :
identifier[new_state] = identifier[STATE_IDLE]
identifier[new_theme] = keyword[False]
keyword[if] identifier[old_theme] keyword[or] identifier[new_theme] :
keyword[return] keyword[False]
keyword[elif] identifier[old_state] == identifier[STATE_PLAYING] keyword[or] identifier[new_state] == identifier[STATE_PLAYING] :
keyword[return] keyword[True]
keyword[elif] identifier[old_state] != identifier[new_state] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def update_check(self, existing, new):
""" Check device state to see if we need to fire the callback.
True if either state is 'Playing'
False if both states are: 'Paused', 'Idle', or 'Off'
True on any state transition.
"""
old_state = existing.state
if 'NowPlayingItem' in existing.session_raw:
try:
old_theme = existing.session_raw['NowPlayingItem']['IsThemeMedia'] # depends on [control=['try'], data=[]]
except KeyError:
old_theme = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
old_theme = False
if 'NowPlayingItem' in new:
if new['PlayState']['IsPaused']:
new_state = STATE_PAUSED # depends on [control=['if'], data=[]]
else:
new_state = STATE_PLAYING
try:
new_theme = new['NowPlayingItem']['IsThemeMedia'] # depends on [control=['try'], data=[]]
except KeyError:
new_theme = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['new']]
else:
new_state = STATE_IDLE
new_theme = False
if old_theme or new_theme:
return False # depends on [control=['if'], data=[]]
elif old_state == STATE_PLAYING or new_state == STATE_PLAYING:
return True # depends on [control=['if'], data=[]]
elif old_state != new_state:
return True # depends on [control=['if'], data=[]]
else:
return False |
def caa_rec(rdatas):
'''
Validate and parse DNS record data for a CAA record
:param rdata: DNS record data
:return: dict w/fields
'''
rschema = OrderedDict((
('flags', lambda flag: ['critical'] if int(flag) > 0 else []),
('tag', RFC.CAA_TAGS),
('value', lambda val: val.strip('\',"'))
))
res = _data2rec_group(rschema, rdatas, 'tag')
for tag in ('issue', 'issuewild'):
tag_res = res.get(tag, False)
if not tag_res:
continue
for idx, val in enumerate(tag_res):
if ';' not in val:
continue
val, params = val.split(';', 1)
params = dict(param.split('=') for param in shlex.split(params))
tag_res[idx] = {val: params}
return res | def function[caa_rec, parameter[rdatas]]:
constant[
Validate and parse DNS record data for a CAA record
:param rdata: DNS record data
:return: dict w/fields
]
variable[rschema] assign[=] call[name[OrderedDict], parameter[tuple[[<ast.Tuple object at 0x7da1b1fe7bb0>, <ast.Tuple object at 0x7da1b1fe4640>, <ast.Tuple object at 0x7da1b1fe4490>]]]]
variable[res] assign[=] call[name[_data2rec_group], parameter[name[rschema], name[rdatas], constant[tag]]]
for taget[name[tag]] in starred[tuple[[<ast.Constant object at 0x7da1b1fe7460>, <ast.Constant object at 0x7da1b1fe6ef0>]]] begin[:]
variable[tag_res] assign[=] call[name[res].get, parameter[name[tag], constant[False]]]
if <ast.UnaryOp object at 0x7da1b1fe4670> begin[:]
continue
for taget[tuple[[<ast.Name object at 0x7da1b1fe5ea0>, <ast.Name object at 0x7da1b1fe6410>]]] in starred[call[name[enumerate], parameter[name[tag_res]]]] begin[:]
if compare[constant[;] <ast.NotIn object at 0x7da2590d7190> name[val]] begin[:]
continue
<ast.Tuple object at 0x7da1b1fe4ca0> assign[=] call[name[val].split, parameter[constant[;], constant[1]]]
variable[params] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b1fe56c0>]]
call[name[tag_res]][name[idx]] assign[=] dictionary[[<ast.Name object at 0x7da1b1fe6a40>], [<ast.Name object at 0x7da1b1fe6950>]]
return[name[res]] | keyword[def] identifier[caa_rec] ( identifier[rdatas] ):
literal[string]
identifier[rschema] = identifier[OrderedDict] ((
( literal[string] , keyword[lambda] identifier[flag] :[ literal[string] ] keyword[if] identifier[int] ( identifier[flag] )> literal[int] keyword[else] []),
( literal[string] , identifier[RFC] . identifier[CAA_TAGS] ),
( literal[string] , keyword[lambda] identifier[val] : identifier[val] . identifier[strip] ( literal[string] ))
))
identifier[res] = identifier[_data2rec_group] ( identifier[rschema] , identifier[rdatas] , literal[string] )
keyword[for] identifier[tag] keyword[in] ( literal[string] , literal[string] ):
identifier[tag_res] = identifier[res] . identifier[get] ( identifier[tag] , keyword[False] )
keyword[if] keyword[not] identifier[tag_res] :
keyword[continue]
keyword[for] identifier[idx] , identifier[val] keyword[in] identifier[enumerate] ( identifier[tag_res] ):
keyword[if] literal[string] keyword[not] keyword[in] identifier[val] :
keyword[continue]
identifier[val] , identifier[params] = identifier[val] . identifier[split] ( literal[string] , literal[int] )
identifier[params] = identifier[dict] ( identifier[param] . identifier[split] ( literal[string] ) keyword[for] identifier[param] keyword[in] identifier[shlex] . identifier[split] ( identifier[params] ))
identifier[tag_res] [ identifier[idx] ]={ identifier[val] : identifier[params] }
keyword[return] identifier[res] | def caa_rec(rdatas):
"""
Validate and parse DNS record data for a CAA record
:param rdata: DNS record data
:return: dict w/fields
"""
rschema = OrderedDict((('flags', lambda flag: ['critical'] if int(flag) > 0 else []), ('tag', RFC.CAA_TAGS), ('value', lambda val: val.strip('\',"'))))
res = _data2rec_group(rschema, rdatas, 'tag')
for tag in ('issue', 'issuewild'):
tag_res = res.get(tag, False)
if not tag_res:
continue # depends on [control=['if'], data=[]]
for (idx, val) in enumerate(tag_res):
if ';' not in val:
continue # depends on [control=['if'], data=[]]
(val, params) = val.split(';', 1)
params = dict((param.split('=') for param in shlex.split(params)))
tag_res[idx] = {val: params} # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['tag']]
return res |
def make_network_graph(compact, expression_names, lookup_names):
"""
Make a network graph, represented as of nodes and a set of edges.
The nodes are represented as tuples: (name: string, input_dim: Dim, label: string, output_dim: Dim, children: set[name], features: string)
# The edges are represented as dict of children to sets of parents: (child: string) -> [(parent: string, features: string)]
"""
nodes = set()
# edges = defaultdict(set) # parent -> (child, extra)
var_name_dict = dict()
if expression_names:
for e in graphviz_items: # e: Expression
if e in expression_names:
var_name_dict[e.vindex] = expression_names[e]
rnn_bldr_name = defaultdict(lambda: chr(len(rnn_bldr_name)+ord('A')))
def vidx2str(vidx): return '%s%s' % ('N', vidx)
for e in graphviz_items: # e: Expression
vidx = e.vindex
f_name = e.name
args = e.args
output_dim = e.dim
input_dim = None # basically just RNNStates use this since everything else has input_dim==output_dim
children = set()
node_type = '2_regular'
if f_name == 'vecInput':
[_dim] = args
arg_strs = []
elif f_name == 'inputVector':
[_v] = args
arg_strs = []
elif f_name == 'matInput':
[_d1, _d2] = args
arg_strs = []
elif f_name == 'inputMatrix':
[_v, _d] = args
arg_strs = []
elif f_name == 'parameters':
[_dim] = args
arg_strs = []
if compact:
if vidx in var_name_dict:
f_name = var_name_dict[vidx]
node_type = '1_param'
elif f_name == 'lookup_parameters':
[_dim] = args
arg_strs = []
if compact:
if vidx in var_name_dict:
f_name = var_name_dict[vidx]
node_type = '1_param'
elif f_name == 'lookup':
[p, idx, update] = args
[_dim] = p.args
if vidx in var_name_dict:
name = var_name_dict[vidx]
else:
name = None
item_name = None
if lookup_names and p in expression_names:
param_name = expression_names[p]
if param_name in lookup_names:
item_name = '\\"%s\\"' % (lookup_names[param_name][idx],)
if compact:
if item_name is not None:
f_name = item_name
elif name is not None:
f_name = '%s[%s]' % (name, idx)
else:
f_name = 'lookup(%s)' % (idx)
arg_strs = []
else:
arg_strs = [var_name_dict.get(p.vindex, 'v%d' % (p.vindex))]
if item_name is not None:
arg_strs.append(item_name)
vocab_size = _dim[0]
arg_strs.extend(['%s' % (idx), '%s' % (vocab_size), 'update' if update else 'fixed'])
#children.add(vidx2str(p.vindex))
#node_type = '1_param'
elif f_name == 'RNNState':
[arg, input_dim, bldr_type, bldr_num, state_idx] = args # arg==input_e
rnn_name = rnn_bldr_name[bldr_num]
if bldr_type.endswith('Builder'):
bldr_type[:-len('Builder')]
f_name = '%s-%s-%s' % (bldr_type, rnn_name, state_idx)
if not compact:
i = arg.vindex
s = var_name_dict.get(i, 'v%d' % (i))
arg_strs = [s]
else:
arg_strs = []
children.add(vidx2str(arg.vindex))
node_type = '3_rnn_state'
else:
arg_strs = []
for arg in args:
if isinstance(arg, Expression):
if not compact:
i = arg.vindex
s = var_name_dict.get(i, 'v%d' % (i))
arg_strs.append(s)
children.add(vidx2str(arg.vindex))
elif isinstance(arg, float) and compact:
s = re.sub('0+$', '', '%.3f' % (arg))
if s == '0.':
s = str(arg)
arg_strs.append(s)
else:
arg_strs.append(str(arg))
# f_name = { ,
# }.get(f_name, f_name)
if compact:
f_name = { 'add': '+',
'sub': '-',
'mul': '*',
'div': '/',
'cadd': '+',
'cmul': '*',
'cdiv': '/',
'scalarsub': '-',
'concatenate': 'cat',
'esum': 'sum',
'emax': 'max',
'emin': 'min',
}.get(f_name, f_name)
if arg_strs:
str_repr = '%s(%s)' % (f_name, ', '.join(arg_strs))
else:
str_repr = f_name
elif f_name == 'add':
[a,b] = arg_strs
str_repr = '%s + %s' % (a,b)
elif f_name == 'sub':
[a,b] = arg_strs
str_repr = '%s - %s' % (a,b)
elif f_name == 'mul':
[a,b] = arg_strs
str_repr = '%s * %s' % (a,b)
elif f_name == 'div':
[a,b] = arg_strs
str_repr = '%s / %s' % (a,b)
elif f_name == 'neg':
[a,] = arg_strs
str_repr = '-%s' % (a)
elif f_name == 'affine_transform':
str_repr = arg_strs[0]
for i in xrange(1, len(arg_strs), 2):
str_repr += ' + %s*%s' % tuple(arg_strs[i:i+2])
else:
if arg_strs is not None:
str_repr = '%s(%s)' % (f_name, ', '.join(arg_strs))
else:
str_repr = f_name
name = vidx2str(vidx)
var_name = '%s' % (var_name_dict.get(vidx, 'v%d' % (vidx))) if not compact else ''
# if show_dims:
# str_repr = '%s\\n%s' % (shape_str(e.dim), str_repr)
label = str_repr
if not compact:
label = '%s = %s' % (var_name, label)
features = ''
# if output_dim.invalid():
# features += " [color=red,style=filled,fillcolor=red]"
# node_def_lines.append(' %s [label="%s%s"] %s;' % (vidx2str(vidx), label_prefix, str_repr, ''))
expr_name = expression_names[e] if compact and expression_names and (e in expression_names) and (expression_names[e] != f_name) else None
nodes.add(GVNode(name, input_dim, label, output_dim, frozenset(children), features, node_type, expr_name))
return nodes | def function[make_network_graph, parameter[compact, expression_names, lookup_names]]:
constant[
Make a network graph, represented as of nodes and a set of edges.
The nodes are represented as tuples: (name: string, input_dim: Dim, label: string, output_dim: Dim, children: set[name], features: string)
# The edges are represented as dict of children to sets of parents: (child: string) -> [(parent: string, features: string)]
]
variable[nodes] assign[=] call[name[set], parameter[]]
variable[var_name_dict] assign[=] call[name[dict], parameter[]]
if name[expression_names] begin[:]
for taget[name[e]] in starred[name[graphviz_items]] begin[:]
if compare[name[e] in name[expression_names]] begin[:]
call[name[var_name_dict]][name[e].vindex] assign[=] call[name[expression_names]][name[e]]
variable[rnn_bldr_name] assign[=] call[name[defaultdict], parameter[<ast.Lambda object at 0x7da1b000efb0>]]
def function[vidx2str, parameter[vidx]]:
return[binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da204346200>, <ast.Name object at 0x7da204346e90>]]]]
for taget[name[e]] in starred[name[graphviz_items]] begin[:]
variable[vidx] assign[=] name[e].vindex
variable[f_name] assign[=] name[e].name
variable[args] assign[=] name[e].args
variable[output_dim] assign[=] name[e].dim
variable[input_dim] assign[=] constant[None]
variable[children] assign[=] call[name[set], parameter[]]
variable[node_type] assign[=] constant[2_regular]
if compare[name[f_name] equal[==] constant[vecInput]] begin[:]
<ast.List object at 0x7da1b000f940> assign[=] name[args]
variable[arg_strs] assign[=] list[[]]
if name[compact] begin[:]
variable[f_name] assign[=] call[dictionary[[<ast.Constant object at 0x7da1b0086ef0>, <ast.Constant object at 0x7da1b0086ec0>, <ast.Constant object at 0x7da1b0086e90>, <ast.Constant object at 0x7da1b0086e60>, <ast.Constant object at 0x7da1b0086e30>, <ast.Constant object at 0x7da1b0086e00>, <ast.Constant object at 0x7da1b0086dd0>, <ast.Constant object at 0x7da1b0086da0>, <ast.Constant object at 0x7da1b0086d70>, <ast.Constant object at 0x7da1b0086d40>, <ast.Constant object at 0x7da1b0086d10>, <ast.Constant object at 0x7da1b0086ce0>], [<ast.Constant object at 0x7da1b0086cb0>, <ast.Constant object at 0x7da1b0086c80>, <ast.Constant object at 0x7da1b0086c50>, <ast.Constant object at 0x7da1b0086c20>, <ast.Constant object at 0x7da1b0086bf0>, <ast.Constant object at 0x7da1b0086bc0>, <ast.Constant object at 0x7da1b0086b90>, <ast.Constant object at 0x7da1b0086b60>, <ast.Constant object at 0x7da1b0086b30>, <ast.Constant object at 0x7da1b0086b00>, <ast.Constant object at 0x7da1b0086ad0>, <ast.Constant object at 0x7da1b0086aa0>]].get, parameter[name[f_name], name[f_name]]]
if name[arg_strs] begin[:]
variable[str_repr] assign[=] binary_operation[constant[%s(%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b00868c0>, <ast.Call object at 0x7da1b0086890>]]]
variable[name] assign[=] call[name[vidx2str], parameter[name[vidx]]]
variable[var_name] assign[=] <ast.IfExp object at 0x7da1b00856f0>
variable[label] assign[=] name[str_repr]
if <ast.UnaryOp object at 0x7da1b0085a20> begin[:]
variable[label] assign[=] binary_operation[constant[%s = %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0085b70>, <ast.Name object at 0x7da1b0085ba0>]]]
variable[features] assign[=] constant[]
variable[expr_name] assign[=] <ast.IfExp object at 0x7da1b0085cc0>
call[name[nodes].add, parameter[call[name[GVNode], parameter[name[name], name[input_dim], name[label], name[output_dim], call[name[frozenset], parameter[name[children]]], name[features], name[node_type], name[expr_name]]]]]
return[name[nodes]] | keyword[def] identifier[make_network_graph] ( identifier[compact] , identifier[expression_names] , identifier[lookup_names] ):
literal[string]
identifier[nodes] = identifier[set] ()
identifier[var_name_dict] = identifier[dict] ()
keyword[if] identifier[expression_names] :
keyword[for] identifier[e] keyword[in] identifier[graphviz_items] :
keyword[if] identifier[e] keyword[in] identifier[expression_names] :
identifier[var_name_dict] [ identifier[e] . identifier[vindex] ]= identifier[expression_names] [ identifier[e] ]
identifier[rnn_bldr_name] = identifier[defaultdict] ( keyword[lambda] : identifier[chr] ( identifier[len] ( identifier[rnn_bldr_name] )+ identifier[ord] ( literal[string] )))
keyword[def] identifier[vidx2str] ( identifier[vidx] ): keyword[return] literal[string] %( literal[string] , identifier[vidx] )
keyword[for] identifier[e] keyword[in] identifier[graphviz_items] :
identifier[vidx] = identifier[e] . identifier[vindex]
identifier[f_name] = identifier[e] . identifier[name]
identifier[args] = identifier[e] . identifier[args]
identifier[output_dim] = identifier[e] . identifier[dim]
identifier[input_dim] = keyword[None]
identifier[children] = identifier[set] ()
identifier[node_type] = literal[string]
keyword[if] identifier[f_name] == literal[string] :
[ identifier[_dim] ]= identifier[args]
identifier[arg_strs] =[]
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[_v] ]= identifier[args]
identifier[arg_strs] =[]
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[_d1] , identifier[_d2] ]= identifier[args]
identifier[arg_strs] =[]
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[_v] , identifier[_d] ]= identifier[args]
identifier[arg_strs] =[]
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[_dim] ]= identifier[args]
identifier[arg_strs] =[]
keyword[if] identifier[compact] :
keyword[if] identifier[vidx] keyword[in] identifier[var_name_dict] :
identifier[f_name] = identifier[var_name_dict] [ identifier[vidx] ]
identifier[node_type] = literal[string]
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[_dim] ]= identifier[args]
identifier[arg_strs] =[]
keyword[if] identifier[compact] :
keyword[if] identifier[vidx] keyword[in] identifier[var_name_dict] :
identifier[f_name] = identifier[var_name_dict] [ identifier[vidx] ]
identifier[node_type] = literal[string]
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[p] , identifier[idx] , identifier[update] ]= identifier[args]
[ identifier[_dim] ]= identifier[p] . identifier[args]
keyword[if] identifier[vidx] keyword[in] identifier[var_name_dict] :
identifier[name] = identifier[var_name_dict] [ identifier[vidx] ]
keyword[else] :
identifier[name] = keyword[None]
identifier[item_name] = keyword[None]
keyword[if] identifier[lookup_names] keyword[and] identifier[p] keyword[in] identifier[expression_names] :
identifier[param_name] = identifier[expression_names] [ identifier[p] ]
keyword[if] identifier[param_name] keyword[in] identifier[lookup_names] :
identifier[item_name] = literal[string] %( identifier[lookup_names] [ identifier[param_name] ][ identifier[idx] ],)
keyword[if] identifier[compact] :
keyword[if] identifier[item_name] keyword[is] keyword[not] keyword[None] :
identifier[f_name] = identifier[item_name]
keyword[elif] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[f_name] = literal[string] %( identifier[name] , identifier[idx] )
keyword[else] :
identifier[f_name] = literal[string] %( identifier[idx] )
identifier[arg_strs] =[]
keyword[else] :
identifier[arg_strs] =[ identifier[var_name_dict] . identifier[get] ( identifier[p] . identifier[vindex] , literal[string] %( identifier[p] . identifier[vindex] ))]
keyword[if] identifier[item_name] keyword[is] keyword[not] keyword[None] :
identifier[arg_strs] . identifier[append] ( identifier[item_name] )
identifier[vocab_size] = identifier[_dim] [ literal[int] ]
identifier[arg_strs] . identifier[extend] ([ literal[string] %( identifier[idx] ), literal[string] %( identifier[vocab_size] ), literal[string] keyword[if] identifier[update] keyword[else] literal[string] ])
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[arg] , identifier[input_dim] , identifier[bldr_type] , identifier[bldr_num] , identifier[state_idx] ]= identifier[args]
identifier[rnn_name] = identifier[rnn_bldr_name] [ identifier[bldr_num] ]
keyword[if] identifier[bldr_type] . identifier[endswith] ( literal[string] ):
identifier[bldr_type] [:- identifier[len] ( literal[string] )]
identifier[f_name] = literal[string] %( identifier[bldr_type] , identifier[rnn_name] , identifier[state_idx] )
keyword[if] keyword[not] identifier[compact] :
identifier[i] = identifier[arg] . identifier[vindex]
identifier[s] = identifier[var_name_dict] . identifier[get] ( identifier[i] , literal[string] %( identifier[i] ))
identifier[arg_strs] =[ identifier[s] ]
keyword[else] :
identifier[arg_strs] =[]
identifier[children] . identifier[add] ( identifier[vidx2str] ( identifier[arg] . identifier[vindex] ))
identifier[node_type] = literal[string]
keyword[else] :
identifier[arg_strs] =[]
keyword[for] identifier[arg] keyword[in] identifier[args] :
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[Expression] ):
keyword[if] keyword[not] identifier[compact] :
identifier[i] = identifier[arg] . identifier[vindex]
identifier[s] = identifier[var_name_dict] . identifier[get] ( identifier[i] , literal[string] %( identifier[i] ))
identifier[arg_strs] . identifier[append] ( identifier[s] )
identifier[children] . identifier[add] ( identifier[vidx2str] ( identifier[arg] . identifier[vindex] ))
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[float] ) keyword[and] identifier[compact] :
identifier[s] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , literal[string] %( identifier[arg] ))
keyword[if] identifier[s] == literal[string] :
identifier[s] = identifier[str] ( identifier[arg] )
identifier[arg_strs] . identifier[append] ( identifier[s] )
keyword[else] :
identifier[arg_strs] . identifier[append] ( identifier[str] ( identifier[arg] ))
keyword[if] identifier[compact] :
identifier[f_name] ={ literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}. identifier[get] ( identifier[f_name] , identifier[f_name] )
keyword[if] identifier[arg_strs] :
identifier[str_repr] = literal[string] %( identifier[f_name] , literal[string] . identifier[join] ( identifier[arg_strs] ))
keyword[else] :
identifier[str_repr] = identifier[f_name]
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[a] , identifier[b] ]= identifier[arg_strs]
identifier[str_repr] = literal[string] %( identifier[a] , identifier[b] )
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[a] , identifier[b] ]= identifier[arg_strs]
identifier[str_repr] = literal[string] %( identifier[a] , identifier[b] )
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[a] , identifier[b] ]= identifier[arg_strs]
identifier[str_repr] = literal[string] %( identifier[a] , identifier[b] )
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[a] , identifier[b] ]= identifier[arg_strs]
identifier[str_repr] = literal[string] %( identifier[a] , identifier[b] )
keyword[elif] identifier[f_name] == literal[string] :
[ identifier[a] ,]= identifier[arg_strs]
identifier[str_repr] = literal[string] %( identifier[a] )
keyword[elif] identifier[f_name] == literal[string] :
identifier[str_repr] = identifier[arg_strs] [ literal[int] ]
keyword[for] identifier[i] keyword[in] identifier[xrange] ( literal[int] , identifier[len] ( identifier[arg_strs] ), literal[int] ):
identifier[str_repr] += literal[string] % identifier[tuple] ( identifier[arg_strs] [ identifier[i] : identifier[i] + literal[int] ])
keyword[else] :
keyword[if] identifier[arg_strs] keyword[is] keyword[not] keyword[None] :
identifier[str_repr] = literal[string] %( identifier[f_name] , literal[string] . identifier[join] ( identifier[arg_strs] ))
keyword[else] :
identifier[str_repr] = identifier[f_name]
identifier[name] = identifier[vidx2str] ( identifier[vidx] )
identifier[var_name] = literal[string] %( identifier[var_name_dict] . identifier[get] ( identifier[vidx] , literal[string] %( identifier[vidx] ))) keyword[if] keyword[not] identifier[compact] keyword[else] literal[string]
identifier[label] = identifier[str_repr]
keyword[if] keyword[not] identifier[compact] :
identifier[label] = literal[string] %( identifier[var_name] , identifier[label] )
identifier[features] = literal[string]
identifier[expr_name] = identifier[expression_names] [ identifier[e] ] keyword[if] identifier[compact] keyword[and] identifier[expression_names] keyword[and] ( identifier[e] keyword[in] identifier[expression_names] ) keyword[and] ( identifier[expression_names] [ identifier[e] ]!= identifier[f_name] ) keyword[else] keyword[None]
identifier[nodes] . identifier[add] ( identifier[GVNode] ( identifier[name] , identifier[input_dim] , identifier[label] , identifier[output_dim] , identifier[frozenset] ( identifier[children] ), identifier[features] , identifier[node_type] , identifier[expr_name] ))
keyword[return] identifier[nodes] | def make_network_graph(compact, expression_names, lookup_names):
"""
Make a network graph, represented as of nodes and a set of edges.
The nodes are represented as tuples: (name: string, input_dim: Dim, label: string, output_dim: Dim, children: set[name], features: string)
# The edges are represented as dict of children to sets of parents: (child: string) -> [(parent: string, features: string)]
"""
nodes = set()
# edges = defaultdict(set) # parent -> (child, extra)
var_name_dict = dict()
if expression_names:
for e in graphviz_items: # e: Expression
if e in expression_names:
var_name_dict[e.vindex] = expression_names[e] # depends on [control=['if'], data=['e', 'expression_names']] # depends on [control=['for'], data=['e']] # depends on [control=['if'], data=[]]
rnn_bldr_name = defaultdict(lambda : chr(len(rnn_bldr_name) + ord('A')))
def vidx2str(vidx):
return '%s%s' % ('N', vidx)
for e in graphviz_items: # e: Expression
vidx = e.vindex
f_name = e.name
args = e.args
output_dim = e.dim
input_dim = None # basically just RNNStates use this since everything else has input_dim==output_dim
children = set()
node_type = '2_regular'
if f_name == 'vecInput':
[_dim] = args
arg_strs = [] # depends on [control=['if'], data=[]]
elif f_name == 'inputVector':
[_v] = args
arg_strs = [] # depends on [control=['if'], data=[]]
elif f_name == 'matInput':
[_d1, _d2] = args
arg_strs = [] # depends on [control=['if'], data=[]]
elif f_name == 'inputMatrix':
[_v, _d] = args
arg_strs = [] # depends on [control=['if'], data=[]]
elif f_name == 'parameters':
[_dim] = args
arg_strs = []
if compact:
if vidx in var_name_dict:
f_name = var_name_dict[vidx] # depends on [control=['if'], data=['vidx', 'var_name_dict']] # depends on [control=['if'], data=[]]
node_type = '1_param' # depends on [control=['if'], data=['f_name']]
elif f_name == 'lookup_parameters':
[_dim] = args
arg_strs = []
if compact:
if vidx in var_name_dict:
f_name = var_name_dict[vidx] # depends on [control=['if'], data=['vidx', 'var_name_dict']] # depends on [control=['if'], data=[]]
node_type = '1_param' # depends on [control=['if'], data=['f_name']]
elif f_name == 'lookup':
[p, idx, update] = args
[_dim] = p.args
if vidx in var_name_dict:
name = var_name_dict[vidx] # depends on [control=['if'], data=['vidx', 'var_name_dict']]
else:
name = None
item_name = None
if lookup_names and p in expression_names:
param_name = expression_names[p]
if param_name in lookup_names:
item_name = '\\"%s\\"' % (lookup_names[param_name][idx],) # depends on [control=['if'], data=['param_name', 'lookup_names']] # depends on [control=['if'], data=[]]
if compact:
if item_name is not None:
f_name = item_name # depends on [control=['if'], data=['item_name']]
elif name is not None:
f_name = '%s[%s]' % (name, idx) # depends on [control=['if'], data=['name']]
else:
f_name = 'lookup(%s)' % idx
arg_strs = [] # depends on [control=['if'], data=[]]
else:
arg_strs = [var_name_dict.get(p.vindex, 'v%d' % p.vindex)]
if item_name is not None:
arg_strs.append(item_name) # depends on [control=['if'], data=['item_name']]
vocab_size = _dim[0]
arg_strs.extend(['%s' % idx, '%s' % vocab_size, 'update' if update else 'fixed']) # depends on [control=['if'], data=['f_name']]
#children.add(vidx2str(p.vindex))
#node_type = '1_param'
elif f_name == 'RNNState':
[arg, input_dim, bldr_type, bldr_num, state_idx] = args # arg==input_e
rnn_name = rnn_bldr_name[bldr_num]
if bldr_type.endswith('Builder'):
bldr_type[:-len('Builder')] # depends on [control=['if'], data=[]]
f_name = '%s-%s-%s' % (bldr_type, rnn_name, state_idx)
if not compact:
i = arg.vindex
s = var_name_dict.get(i, 'v%d' % i)
arg_strs = [s] # depends on [control=['if'], data=[]]
else:
arg_strs = []
children.add(vidx2str(arg.vindex))
node_type = '3_rnn_state' # depends on [control=['if'], data=['f_name']]
else:
arg_strs = []
for arg in args:
if isinstance(arg, Expression):
if not compact:
i = arg.vindex
s = var_name_dict.get(i, 'v%d' % i)
arg_strs.append(s) # depends on [control=['if'], data=[]]
children.add(vidx2str(arg.vindex)) # depends on [control=['if'], data=[]]
elif isinstance(arg, float) and compact:
s = re.sub('0+$', '', '%.3f' % arg)
if s == '0.':
s = str(arg) # depends on [control=['if'], data=['s']]
arg_strs.append(s) # depends on [control=['if'], data=[]]
else:
arg_strs.append(str(arg)) # depends on [control=['for'], data=['arg']]
# f_name = { ,
# }.get(f_name, f_name)
if compact:
f_name = {'add': '+', 'sub': '-', 'mul': '*', 'div': '/', 'cadd': '+', 'cmul': '*', 'cdiv': '/', 'scalarsub': '-', 'concatenate': 'cat', 'esum': 'sum', 'emax': 'max', 'emin': 'min'}.get(f_name, f_name)
if arg_strs:
str_repr = '%s(%s)' % (f_name, ', '.join(arg_strs)) # depends on [control=['if'], data=[]]
else:
str_repr = f_name # depends on [control=['if'], data=[]]
elif f_name == 'add':
[a, b] = arg_strs
str_repr = '%s + %s' % (a, b) # depends on [control=['if'], data=[]]
elif f_name == 'sub':
[a, b] = arg_strs
str_repr = '%s - %s' % (a, b) # depends on [control=['if'], data=[]]
elif f_name == 'mul':
[a, b] = arg_strs
str_repr = '%s * %s' % (a, b) # depends on [control=['if'], data=[]]
elif f_name == 'div':
[a, b] = arg_strs
str_repr = '%s / %s' % (a, b) # depends on [control=['if'], data=[]]
elif f_name == 'neg':
[a] = arg_strs
str_repr = '-%s' % a # depends on [control=['if'], data=[]]
elif f_name == 'affine_transform':
str_repr = arg_strs[0]
for i in xrange(1, len(arg_strs), 2):
str_repr += ' + %s*%s' % tuple(arg_strs[i:i + 2]) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
elif arg_strs is not None:
str_repr = '%s(%s)' % (f_name, ', '.join(arg_strs)) # depends on [control=['if'], data=['arg_strs']]
else:
str_repr = f_name
name = vidx2str(vidx)
var_name = '%s' % var_name_dict.get(vidx, 'v%d' % vidx) if not compact else ''
# if show_dims:
# str_repr = '%s\\n%s' % (shape_str(e.dim), str_repr)
label = str_repr
if not compact:
label = '%s = %s' % (var_name, label) # depends on [control=['if'], data=[]]
features = ''
# if output_dim.invalid():
# features += " [color=red,style=filled,fillcolor=red]"
# node_def_lines.append(' %s [label="%s%s"] %s;' % (vidx2str(vidx), label_prefix, str_repr, ''))
expr_name = expression_names[e] if compact and expression_names and (e in expression_names) and (expression_names[e] != f_name) else None
nodes.add(GVNode(name, input_dim, label, output_dim, frozenset(children), features, node_type, expr_name)) # depends on [control=['for'], data=['e']]
return nodes |
def parse_int(value, base_unit=None):
"""
>>> parse_int('1') == 1
True
>>> parse_int(' 0x400 MB ', '16384kB') == 64
True
>>> parse_int('1MB', 'kB') == 1024
True
>>> parse_int('1000 ms', 's') == 1
True
>>> parse_int('1GB', 'MB') is None
True
>>> parse_int(0) == 0
True
"""
convert = {
'kB': {'kB': 1, 'MB': 1024, 'GB': 1024 * 1024, 'TB': 1024 * 1024 * 1024},
'ms': {'ms': 1, 's': 1000, 'min': 1000 * 60, 'h': 1000 * 60 * 60, 'd': 1000 * 60 * 60 * 24},
's': {'ms': -1000, 's': 1, 'min': 60, 'h': 60 * 60, 'd': 60 * 60 * 24},
'min': {'ms': -1000 * 60, 's': -60, 'min': 1, 'h': 60, 'd': 60 * 24}
}
value, unit = strtol(value)
if value is not None:
if not unit:
return value
if base_unit and base_unit not in convert:
base_value, base_unit = strtol(base_unit, False)
else:
base_value = 1
if base_unit in convert and unit in convert[base_unit]:
multiplier = convert[base_unit][unit]
if multiplier < 0:
value /= -multiplier
else:
value *= multiplier
return int(value/base_value) | def function[parse_int, parameter[value, base_unit]]:
constant[
>>> parse_int('1') == 1
True
>>> parse_int(' 0x400 MB ', '16384kB') == 64
True
>>> parse_int('1MB', 'kB') == 1024
True
>>> parse_int('1000 ms', 's') == 1
True
>>> parse_int('1GB', 'MB') is None
True
>>> parse_int(0) == 0
True
]
variable[convert] assign[=] dictionary[[<ast.Constant object at 0x7da18eb56cb0>, <ast.Constant object at 0x7da18eb57400>, <ast.Constant object at 0x7da18eb54640>, <ast.Constant object at 0x7da18eb548e0>], [<ast.Dict object at 0x7da18eb54f70>, <ast.Dict object at 0x7da18eb556c0>, <ast.Dict object at 0x7da18eb54730>, <ast.Dict object at 0x7da18eb56a10>]]
<ast.Tuple object at 0x7da18eb56770> assign[=] call[name[strtol], parameter[name[value]]]
if compare[name[value] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da18eb54250> begin[:]
return[name[value]]
if <ast.BoolOp object at 0x7da18eb551e0> begin[:]
<ast.Tuple object at 0x7da18eb55930> assign[=] call[name[strtol], parameter[name[base_unit], constant[False]]]
if <ast.BoolOp object at 0x7da1b21e08b0> begin[:]
variable[multiplier] assign[=] call[call[name[convert]][name[base_unit]]][name[unit]]
if compare[name[multiplier] less[<] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b21e3490>
return[call[name[int], parameter[binary_operation[name[value] / name[base_value]]]]] | keyword[def] identifier[parse_int] ( identifier[value] , identifier[base_unit] = keyword[None] ):
literal[string]
identifier[convert] ={
literal[string] :{ literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] * literal[int] , literal[string] : literal[int] * literal[int] * literal[int] },
literal[string] :{ literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] * literal[int] , literal[string] : literal[int] * literal[int] * literal[int] , literal[string] : literal[int] * literal[int] * literal[int] * literal[int] },
literal[string] :{ literal[string] :- literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] * literal[int] , literal[string] : literal[int] * literal[int] * literal[int] },
literal[string] :{ literal[string] :- literal[int] * literal[int] , literal[string] :- literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] * literal[int] }
}
identifier[value] , identifier[unit] = identifier[strtol] ( identifier[value] )
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[unit] :
keyword[return] identifier[value]
keyword[if] identifier[base_unit] keyword[and] identifier[base_unit] keyword[not] keyword[in] identifier[convert] :
identifier[base_value] , identifier[base_unit] = identifier[strtol] ( identifier[base_unit] , keyword[False] )
keyword[else] :
identifier[base_value] = literal[int]
keyword[if] identifier[base_unit] keyword[in] identifier[convert] keyword[and] identifier[unit] keyword[in] identifier[convert] [ identifier[base_unit] ]:
identifier[multiplier] = identifier[convert] [ identifier[base_unit] ][ identifier[unit] ]
keyword[if] identifier[multiplier] < literal[int] :
identifier[value] /=- identifier[multiplier]
keyword[else] :
identifier[value] *= identifier[multiplier]
keyword[return] identifier[int] ( identifier[value] / identifier[base_value] ) | def parse_int(value, base_unit=None):
"""
>>> parse_int('1') == 1
True
>>> parse_int(' 0x400 MB ', '16384kB') == 64
True
>>> parse_int('1MB', 'kB') == 1024
True
>>> parse_int('1000 ms', 's') == 1
True
>>> parse_int('1GB', 'MB') is None
True
>>> parse_int(0) == 0
True
"""
convert = {'kB': {'kB': 1, 'MB': 1024, 'GB': 1024 * 1024, 'TB': 1024 * 1024 * 1024}, 'ms': {'ms': 1, 's': 1000, 'min': 1000 * 60, 'h': 1000 * 60 * 60, 'd': 1000 * 60 * 60 * 24}, 's': {'ms': -1000, 's': 1, 'min': 60, 'h': 60 * 60, 'd': 60 * 60 * 24}, 'min': {'ms': -1000 * 60, 's': -60, 'min': 1, 'h': 60, 'd': 60 * 24}}
(value, unit) = strtol(value)
if value is not None:
if not unit:
return value # depends on [control=['if'], data=[]]
if base_unit and base_unit not in convert:
(base_value, base_unit) = strtol(base_unit, False) # depends on [control=['if'], data=[]]
else:
base_value = 1
if base_unit in convert and unit in convert[base_unit]:
multiplier = convert[base_unit][unit]
if multiplier < 0:
value /= -multiplier # depends on [control=['if'], data=['multiplier']]
else:
value *= multiplier
return int(value / base_value) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']] |
def negative_volume_index(close_data, volume):
"""
Negative Volume Index (NVI).
Formula:
NVI0 = 1
IF Vt < Vt-1
NVIt = NVIt-1 + (CLOSEt - CLOSEt-1 / CLOSEt-1 * NVIt-1)
ELSE:
NVIt = NVIt-1
"""
catch_errors.check_for_input_len_diff(close_data, volume)
nvi = np.zeros(len(volume))
nvi[0] = 1
for idx in range(1, len(volume)):
if volume[idx] < volume[idx-1]:
nvi[idx] = volume_index_helper(nvi, idx, close_data)
else:
nvi[idx] = nvi[idx-1]
return nvi | def function[negative_volume_index, parameter[close_data, volume]]:
constant[
Negative Volume Index (NVI).
Formula:
NVI0 = 1
IF Vt < Vt-1
NVIt = NVIt-1 + (CLOSEt - CLOSEt-1 / CLOSEt-1 * NVIt-1)
ELSE:
NVIt = NVIt-1
]
call[name[catch_errors].check_for_input_len_diff, parameter[name[close_data], name[volume]]]
variable[nvi] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[volume]]]]]
call[name[nvi]][constant[0]] assign[=] constant[1]
for taget[name[idx]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[volume]]]]]] begin[:]
if compare[call[name[volume]][name[idx]] less[<] call[name[volume]][binary_operation[name[idx] - constant[1]]]] begin[:]
call[name[nvi]][name[idx]] assign[=] call[name[volume_index_helper], parameter[name[nvi], name[idx], name[close_data]]]
return[name[nvi]] | keyword[def] identifier[negative_volume_index] ( identifier[close_data] , identifier[volume] ):
literal[string]
identifier[catch_errors] . identifier[check_for_input_len_diff] ( identifier[close_data] , identifier[volume] )
identifier[nvi] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[volume] ))
identifier[nvi] [ literal[int] ]= literal[int]
keyword[for] identifier[idx] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[volume] )):
keyword[if] identifier[volume] [ identifier[idx] ]< identifier[volume] [ identifier[idx] - literal[int] ]:
identifier[nvi] [ identifier[idx] ]= identifier[volume_index_helper] ( identifier[nvi] , identifier[idx] , identifier[close_data] )
keyword[else] :
identifier[nvi] [ identifier[idx] ]= identifier[nvi] [ identifier[idx] - literal[int] ]
keyword[return] identifier[nvi] | def negative_volume_index(close_data, volume):
"""
Negative Volume Index (NVI).
Formula:
NVI0 = 1
IF Vt < Vt-1
NVIt = NVIt-1 + (CLOSEt - CLOSEt-1 / CLOSEt-1 * NVIt-1)
ELSE:
NVIt = NVIt-1
"""
catch_errors.check_for_input_len_diff(close_data, volume)
nvi = np.zeros(len(volume))
nvi[0] = 1
for idx in range(1, len(volume)):
if volume[idx] < volume[idx - 1]:
nvi[idx] = volume_index_helper(nvi, idx, close_data) # depends on [control=['if'], data=[]]
else:
nvi[idx] = nvi[idx - 1] # depends on [control=['for'], data=['idx']]
return nvi |
def pop(self):
"""Remove and return an arbitrary set element.
:raises KeyError: if the set is empty.
"""
member = self.client.spop(self.name)
if member is not None:
return member
raise KeyError() | def function[pop, parameter[self]]:
constant[Remove and return an arbitrary set element.
:raises KeyError: if the set is empty.
]
variable[member] assign[=] call[name[self].client.spop, parameter[name[self].name]]
if compare[name[member] is_not constant[None]] begin[:]
return[name[member]]
<ast.Raise object at 0x7da18ede59c0> | keyword[def] identifier[pop] ( identifier[self] ):
literal[string]
identifier[member] = identifier[self] . identifier[client] . identifier[spop] ( identifier[self] . identifier[name] )
keyword[if] identifier[member] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[member]
keyword[raise] identifier[KeyError] () | def pop(self):
"""Remove and return an arbitrary set element.
:raises KeyError: if the set is empty.
"""
member = self.client.spop(self.name)
if member is not None:
return member # depends on [control=['if'], data=['member']]
raise KeyError() |
def field(self, name, fieldType="C", size="50", decimal=0):
"""Adds a dbf field descriptor to the shapefile."""
if fieldType == "D":
size = "8"
decimal = 0
elif fieldType == "L":
size = "1"
decimal = 0
if len(self.fields) >= 2046:
raise ShapefileException(
"Shapefile Writer reached maximum number of fields: 2046.")
self.fields.append((name, fieldType, size, decimal)) | def function[field, parameter[self, name, fieldType, size, decimal]]:
constant[Adds a dbf field descriptor to the shapefile.]
if compare[name[fieldType] equal[==] constant[D]] begin[:]
variable[size] assign[=] constant[8]
variable[decimal] assign[=] constant[0]
if compare[call[name[len], parameter[name[self].fields]] greater_or_equal[>=] constant[2046]] begin[:]
<ast.Raise object at 0x7da18ede4af0>
call[name[self].fields.append, parameter[tuple[[<ast.Name object at 0x7da18ede6890>, <ast.Name object at 0x7da18ede69e0>, <ast.Name object at 0x7da18ede6c50>, <ast.Name object at 0x7da18ede50f0>]]]] | keyword[def] identifier[field] ( identifier[self] , identifier[name] , identifier[fieldType] = literal[string] , identifier[size] = literal[string] , identifier[decimal] = literal[int] ):
literal[string]
keyword[if] identifier[fieldType] == literal[string] :
identifier[size] = literal[string]
identifier[decimal] = literal[int]
keyword[elif] identifier[fieldType] == literal[string] :
identifier[size] = literal[string]
identifier[decimal] = literal[int]
keyword[if] identifier[len] ( identifier[self] . identifier[fields] )>= literal[int] :
keyword[raise] identifier[ShapefileException] (
literal[string] )
identifier[self] . identifier[fields] . identifier[append] (( identifier[name] , identifier[fieldType] , identifier[size] , identifier[decimal] )) | def field(self, name, fieldType='C', size='50', decimal=0):
"""Adds a dbf field descriptor to the shapefile."""
if fieldType == 'D':
size = '8'
decimal = 0 # depends on [control=['if'], data=[]]
elif fieldType == 'L':
size = '1'
decimal = 0 # depends on [control=['if'], data=[]]
if len(self.fields) >= 2046:
raise ShapefileException('Shapefile Writer reached maximum number of fields: 2046.') # depends on [control=['if'], data=[]]
self.fields.append((name, fieldType, size, decimal)) |
def print_benchmark(n_train, n_test, n_dim, res):
""" Print the benchmarks
Parameters
----------
n_train : int
number of points in the training set
n_test : int
number of points in the test set
n_dim : int
number of dimensions (default=2)
res : dict
a dictionary with the timing results
"""
print('='*80)
print(' '*10, 'N_dim={}, N_train={}, N_test={}'.format(n_dim,
n_train, n_test))
print('='*80)
print('\n', '# Training the model', '\n')
print('|'.join(['{:>11} '.format(el) for el in ['t_train (s)'] +
VARIOGRAM_MODELS]))
print('-' * (11 + 2) * (len(VARIOGRAM_MODELS) + 1))
print('|'.join(['{:>11} '.format('Training')] +
['{:>11.2} '.format(el) for el in
[res['t_train_{}'.format(mod)]
for mod in VARIOGRAM_MODELS]]))
print('\n', '# Predicting kriging points', '\n')
print('|'.join(['{:>11} '.format(el) for el in ['t_test (s)'] + BACKENDS]))
print('-' * (11 + 2) * (len(BACKENDS) + 1))
for n_closest_points in N_MOVING_WINDOW:
timing_results = [res.get(
't_test_{}_{}'.format(mod, n_closest_points), '')
for mod in BACKENDS]
print('|'.join(['{:>11} '.format('N_nn=' + str(n_closest_points))] +
['{:>11.2} '.format(el) for el in timing_results])) | def function[print_benchmark, parameter[n_train, n_test, n_dim, res]]:
constant[ Print the benchmarks
Parameters
----------
n_train : int
number of points in the training set
n_test : int
number of points in the test set
n_dim : int
number of dimensions (default=2)
res : dict
a dictionary with the timing results
]
call[name[print], parameter[binary_operation[constant[=] * constant[80]]]]
call[name[print], parameter[binary_operation[constant[ ] * constant[10]], call[constant[N_dim={}, N_train={}, N_test={}].format, parameter[name[n_dim], name[n_train], name[n_test]]]]]
call[name[print], parameter[binary_operation[constant[=] * constant[80]]]]
call[name[print], parameter[constant[
], constant[# Training the model], constant[
]]]
call[name[print], parameter[call[constant[|].join, parameter[<ast.ListComp object at 0x7da1b1110c40>]]]]
call[name[print], parameter[binary_operation[binary_operation[constant[-] * binary_operation[constant[11] + constant[2]]] * binary_operation[call[name[len], parameter[name[VARIOGRAM_MODELS]]] + constant[1]]]]]
call[name[print], parameter[call[constant[|].join, parameter[binary_operation[list[[<ast.Call object at 0x7da1b11126b0>]] + <ast.ListComp object at 0x7da1b1113790>]]]]]
call[name[print], parameter[constant[
], constant[# Predicting kriging points], constant[
]]]
call[name[print], parameter[call[constant[|].join, parameter[<ast.ListComp object at 0x7da20c7964d0>]]]]
call[name[print], parameter[binary_operation[binary_operation[constant[-] * binary_operation[constant[11] + constant[2]]] * binary_operation[call[name[len], parameter[name[BACKENDS]]] + constant[1]]]]]
for taget[name[n_closest_points]] in starred[name[N_MOVING_WINDOW]] begin[:]
variable[timing_results] assign[=] <ast.ListComp object at 0x7da1b1111c30>
call[name[print], parameter[call[constant[|].join, parameter[binary_operation[list[[<ast.Call object at 0x7da18c4cf130>]] + <ast.ListComp object at 0x7da18c4cecb0>]]]]] | keyword[def] identifier[print_benchmark] ( identifier[n_train] , identifier[n_test] , identifier[n_dim] , identifier[res] ):
literal[string]
identifier[print] ( literal[string] * literal[int] )
identifier[print] ( literal[string] * literal[int] , literal[string] . identifier[format] ( identifier[n_dim] ,
identifier[n_train] , identifier[n_test] ))
identifier[print] ( literal[string] * literal[int] )
identifier[print] ( literal[string] , literal[string] , literal[string] )
identifier[print] ( literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[el] ) keyword[for] identifier[el] keyword[in] [ literal[string] ]+
identifier[VARIOGRAM_MODELS] ]))
identifier[print] ( literal[string] *( literal[int] + literal[int] )*( identifier[len] ( identifier[VARIOGRAM_MODELS] )+ literal[int] ))
identifier[print] ( literal[string] . identifier[join] ([ literal[string] . identifier[format] ( literal[string] )]+
[ literal[string] . identifier[format] ( identifier[el] ) keyword[for] identifier[el] keyword[in]
[ identifier[res] [ literal[string] . identifier[format] ( identifier[mod] )]
keyword[for] identifier[mod] keyword[in] identifier[VARIOGRAM_MODELS] ]]))
identifier[print] ( literal[string] , literal[string] , literal[string] )
identifier[print] ( literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[el] ) keyword[for] identifier[el] keyword[in] [ literal[string] ]+ identifier[BACKENDS] ]))
identifier[print] ( literal[string] *( literal[int] + literal[int] )*( identifier[len] ( identifier[BACKENDS] )+ literal[int] ))
keyword[for] identifier[n_closest_points] keyword[in] identifier[N_MOVING_WINDOW] :
identifier[timing_results] =[ identifier[res] . identifier[get] (
literal[string] . identifier[format] ( identifier[mod] , identifier[n_closest_points] ), literal[string] )
keyword[for] identifier[mod] keyword[in] identifier[BACKENDS] ]
identifier[print] ( literal[string] . identifier[join] ([ literal[string] . identifier[format] ( literal[string] + identifier[str] ( identifier[n_closest_points] ))]+
[ literal[string] . identifier[format] ( identifier[el] ) keyword[for] identifier[el] keyword[in] identifier[timing_results] ])) | def print_benchmark(n_train, n_test, n_dim, res):
""" Print the benchmarks
Parameters
----------
n_train : int
number of points in the training set
n_test : int
number of points in the test set
n_dim : int
number of dimensions (default=2)
res : dict
a dictionary with the timing results
"""
print('=' * 80)
print(' ' * 10, 'N_dim={}, N_train={}, N_test={}'.format(n_dim, n_train, n_test))
print('=' * 80)
print('\n', '# Training the model', '\n')
print('|'.join(['{:>11} '.format(el) for el in ['t_train (s)'] + VARIOGRAM_MODELS]))
print('-' * (11 + 2) * (len(VARIOGRAM_MODELS) + 1))
print('|'.join(['{:>11} '.format('Training')] + ['{:>11.2} '.format(el) for el in [res['t_train_{}'.format(mod)] for mod in VARIOGRAM_MODELS]]))
print('\n', '# Predicting kriging points', '\n')
print('|'.join(['{:>11} '.format(el) for el in ['t_test (s)'] + BACKENDS]))
print('-' * (11 + 2) * (len(BACKENDS) + 1))
for n_closest_points in N_MOVING_WINDOW:
timing_results = [res.get('t_test_{}_{}'.format(mod, n_closest_points), '') for mod in BACKENDS]
print('|'.join(['{:>11} '.format('N_nn=' + str(n_closest_points))] + ['{:>11.2} '.format(el) for el in timing_results])) # depends on [control=['for'], data=['n_closest_points']] |
def endpoint(url, method="GET", expected_status=200):
"""endpoint - decorator to manipulate the REST-service endpoint.
The endpoint decorator sets the endpoint and the method for the class
to access the REST-service.
"""
def dec(obj):
obj.ENDPOINT = url
obj.METHOD = method
obj.EXPECTED_STATUS = expected_status
return obj
return dec | def function[endpoint, parameter[url, method, expected_status]]:
constant[endpoint - decorator to manipulate the REST-service endpoint.
The endpoint decorator sets the endpoint and the method for the class
to access the REST-service.
]
def function[dec, parameter[obj]]:
name[obj].ENDPOINT assign[=] name[url]
name[obj].METHOD assign[=] name[method]
name[obj].EXPECTED_STATUS assign[=] name[expected_status]
return[name[obj]]
return[name[dec]] | keyword[def] identifier[endpoint] ( identifier[url] , identifier[method] = literal[string] , identifier[expected_status] = literal[int] ):
literal[string]
keyword[def] identifier[dec] ( identifier[obj] ):
identifier[obj] . identifier[ENDPOINT] = identifier[url]
identifier[obj] . identifier[METHOD] = identifier[method]
identifier[obj] . identifier[EXPECTED_STATUS] = identifier[expected_status]
keyword[return] identifier[obj]
keyword[return] identifier[dec] | def endpoint(url, method='GET', expected_status=200):
"""endpoint - decorator to manipulate the REST-service endpoint.
The endpoint decorator sets the endpoint and the method for the class
to access the REST-service.
"""
def dec(obj):
obj.ENDPOINT = url
obj.METHOD = method
obj.EXPECTED_STATUS = expected_status
return obj
return dec |
def facts(self):
"""Iterate over the asserted Facts."""
fact = lib.EnvGetNextFact(self._env, ffi.NULL)
while fact != ffi.NULL:
yield new_fact(self._env, fact)
fact = lib.EnvGetNextFact(self._env, fact) | def function[facts, parameter[self]]:
constant[Iterate over the asserted Facts.]
variable[fact] assign[=] call[name[lib].EnvGetNextFact, parameter[name[self]._env, name[ffi].NULL]]
while compare[name[fact] not_equal[!=] name[ffi].NULL] begin[:]
<ast.Yield object at 0x7da18bccbd90>
variable[fact] assign[=] call[name[lib].EnvGetNextFact, parameter[name[self]._env, name[fact]]] | keyword[def] identifier[facts] ( identifier[self] ):
literal[string]
identifier[fact] = identifier[lib] . identifier[EnvGetNextFact] ( identifier[self] . identifier[_env] , identifier[ffi] . identifier[NULL] )
keyword[while] identifier[fact] != identifier[ffi] . identifier[NULL] :
keyword[yield] identifier[new_fact] ( identifier[self] . identifier[_env] , identifier[fact] )
identifier[fact] = identifier[lib] . identifier[EnvGetNextFact] ( identifier[self] . identifier[_env] , identifier[fact] ) | def facts(self):
"""Iterate over the asserted Facts."""
fact = lib.EnvGetNextFact(self._env, ffi.NULL)
while fact != ffi.NULL:
yield new_fact(self._env, fact)
fact = lib.EnvGetNextFact(self._env, fact) # depends on [control=['while'], data=['fact']] |
def _construct_location_stack_entry(location, num_traverses):
"""Return a LocationStackEntry namedtuple with the specified parameters."""
if not isinstance(num_traverses, int) or num_traverses < 0:
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "num_traverses" {}. This is not allowed.'
.format(num_traverses))
if not isinstance(location, Location):
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "location" {}. This is not allowed.'
.format(location))
return LocationStackEntry(location=location, num_traverses=num_traverses) | def function[_construct_location_stack_entry, parameter[location, num_traverses]]:
constant[Return a LocationStackEntry namedtuple with the specified parameters.]
if <ast.BoolOp object at 0x7da1b16479d0> begin[:]
<ast.Raise object at 0x7da1b1646e00>
if <ast.UnaryOp object at 0x7da1b1644ca0> begin[:]
<ast.Raise object at 0x7da1b1647190>
return[call[name[LocationStackEntry], parameter[]]] | keyword[def] identifier[_construct_location_stack_entry] ( identifier[location] , identifier[num_traverses] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[num_traverses] , identifier[int] ) keyword[or] identifier[num_traverses] < literal[int] :
keyword[raise] identifier[AssertionError] ( literal[string]
literal[string]
. identifier[format] ( identifier[num_traverses] ))
keyword[if] keyword[not] identifier[isinstance] ( identifier[location] , identifier[Location] ):
keyword[raise] identifier[AssertionError] ( literal[string]
literal[string]
. identifier[format] ( identifier[location] ))
keyword[return] identifier[LocationStackEntry] ( identifier[location] = identifier[location] , identifier[num_traverses] = identifier[num_traverses] ) | def _construct_location_stack_entry(location, num_traverses):
"""Return a LocationStackEntry namedtuple with the specified parameters."""
if not isinstance(num_traverses, int) or num_traverses < 0:
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid value for "num_traverses" {}. This is not allowed.'.format(num_traverses)) # depends on [control=['if'], data=[]]
if not isinstance(location, Location):
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid value for "location" {}. This is not allowed.'.format(location)) # depends on [control=['if'], data=[]]
return LocationStackEntry(location=location, num_traverses=num_traverses) |
def connect(host, default_protocol='telnet', **kwargs):
"""
Like :class:`prepare()`, but also connects to the host by calling
:class:`Protocol.connect()`. If the URL or host contain any login info, this
function also logs into the host using :class:`Protocol.login()`.
:type host: str or Host
:param host: A URL-formatted hostname or a :class:`Exscript.Host` object.
:type default_protocol: str
:param default_protocol: Protocol that is used if the URL specifies none.
:type kwargs: dict
:param kwargs: Passed to the protocol constructor.
:rtype: Protocol
:return: An instance of the protocol.
"""
host = to_host(host)
conn = prepare(host, default_protocol, **kwargs)
account = host.get_account()
conn.connect(host.get_address(), host.get_tcp_port())
if account is not None:
conn.login(account)
return conn | def function[connect, parameter[host, default_protocol]]:
constant[
Like :class:`prepare()`, but also connects to the host by calling
:class:`Protocol.connect()`. If the URL or host contain any login info, this
function also logs into the host using :class:`Protocol.login()`.
:type host: str or Host
:param host: A URL-formatted hostname or a :class:`Exscript.Host` object.
:type default_protocol: str
:param default_protocol: Protocol that is used if the URL specifies none.
:type kwargs: dict
:param kwargs: Passed to the protocol constructor.
:rtype: Protocol
:return: An instance of the protocol.
]
variable[host] assign[=] call[name[to_host], parameter[name[host]]]
variable[conn] assign[=] call[name[prepare], parameter[name[host], name[default_protocol]]]
variable[account] assign[=] call[name[host].get_account, parameter[]]
call[name[conn].connect, parameter[call[name[host].get_address, parameter[]], call[name[host].get_tcp_port, parameter[]]]]
if compare[name[account] is_not constant[None]] begin[:]
call[name[conn].login, parameter[name[account]]]
return[name[conn]] | keyword[def] identifier[connect] ( identifier[host] , identifier[default_protocol] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[host] = identifier[to_host] ( identifier[host] )
identifier[conn] = identifier[prepare] ( identifier[host] , identifier[default_protocol] ,** identifier[kwargs] )
identifier[account] = identifier[host] . identifier[get_account] ()
identifier[conn] . identifier[connect] ( identifier[host] . identifier[get_address] (), identifier[host] . identifier[get_tcp_port] ())
keyword[if] identifier[account] keyword[is] keyword[not] keyword[None] :
identifier[conn] . identifier[login] ( identifier[account] )
keyword[return] identifier[conn] | def connect(host, default_protocol='telnet', **kwargs):
"""
Like :class:`prepare()`, but also connects to the host by calling
:class:`Protocol.connect()`. If the URL or host contain any login info, this
function also logs into the host using :class:`Protocol.login()`.
:type host: str or Host
:param host: A URL-formatted hostname or a :class:`Exscript.Host` object.
:type default_protocol: str
:param default_protocol: Protocol that is used if the URL specifies none.
:type kwargs: dict
:param kwargs: Passed to the protocol constructor.
:rtype: Protocol
:return: An instance of the protocol.
"""
host = to_host(host)
conn = prepare(host, default_protocol, **kwargs)
account = host.get_account()
conn.connect(host.get_address(), host.get_tcp_port())
if account is not None:
conn.login(account) # depends on [control=['if'], data=['account']]
return conn |
def pan_delta(self, dx_px, dy_px):
"""
This causes the scene to appear to translate right and up
(i.e., what really happens is the camera is translated left and down).
This is also called "panning" in some software packages.
Passing in negative delta values causes the opposite motion.
"""
direction = self.target - self.position
distance_from_target = direction.length()
direction = direction.normalized()
speed_per_radius = self.get_translation_speed(distance_from_target)
px_per_unit = self.vport_radius_px / speed_per_radius
right = direction ^ self.up
translation = (right * (-dx_px / px_per_unit) +
self.up * (-dy_px / px_per_unit))
self.position = self.position + translation
self.target = self.target + translation | def function[pan_delta, parameter[self, dx_px, dy_px]]:
constant[
This causes the scene to appear to translate right and up
(i.e., what really happens is the camera is translated left and down).
This is also called "panning" in some software packages.
Passing in negative delta values causes the opposite motion.
]
variable[direction] assign[=] binary_operation[name[self].target - name[self].position]
variable[distance_from_target] assign[=] call[name[direction].length, parameter[]]
variable[direction] assign[=] call[name[direction].normalized, parameter[]]
variable[speed_per_radius] assign[=] call[name[self].get_translation_speed, parameter[name[distance_from_target]]]
variable[px_per_unit] assign[=] binary_operation[name[self].vport_radius_px / name[speed_per_radius]]
variable[right] assign[=] binary_operation[name[direction] <ast.BitXor object at 0x7da2590d6b00> name[self].up]
variable[translation] assign[=] binary_operation[binary_operation[name[right] * binary_operation[<ast.UnaryOp object at 0x7da1b0d1b2e0> / name[px_per_unit]]] + binary_operation[name[self].up * binary_operation[<ast.UnaryOp object at 0x7da1b0d18c10> / name[px_per_unit]]]]
name[self].position assign[=] binary_operation[name[self].position + name[translation]]
name[self].target assign[=] binary_operation[name[self].target + name[translation]] | keyword[def] identifier[pan_delta] ( identifier[self] , identifier[dx_px] , identifier[dy_px] ):
literal[string]
identifier[direction] = identifier[self] . identifier[target] - identifier[self] . identifier[position]
identifier[distance_from_target] = identifier[direction] . identifier[length] ()
identifier[direction] = identifier[direction] . identifier[normalized] ()
identifier[speed_per_radius] = identifier[self] . identifier[get_translation_speed] ( identifier[distance_from_target] )
identifier[px_per_unit] = identifier[self] . identifier[vport_radius_px] / identifier[speed_per_radius]
identifier[right] = identifier[direction] ^ identifier[self] . identifier[up]
identifier[translation] =( identifier[right] *(- identifier[dx_px] / identifier[px_per_unit] )+
identifier[self] . identifier[up] *(- identifier[dy_px] / identifier[px_per_unit] ))
identifier[self] . identifier[position] = identifier[self] . identifier[position] + identifier[translation]
identifier[self] . identifier[target] = identifier[self] . identifier[target] + identifier[translation] | def pan_delta(self, dx_px, dy_px):
"""
This causes the scene to appear to translate right and up
(i.e., what really happens is the camera is translated left and down).
This is also called "panning" in some software packages.
Passing in negative delta values causes the opposite motion.
"""
direction = self.target - self.position
distance_from_target = direction.length()
direction = direction.normalized()
speed_per_radius = self.get_translation_speed(distance_from_target)
px_per_unit = self.vport_radius_px / speed_per_radius
right = direction ^ self.up
translation = right * (-dx_px / px_per_unit) + self.up * (-dy_px / px_per_unit)
self.position = self.position + translation
self.target = self.target + translation |
def _prepare_connection(func):
"""
A decorator that unpacks the host and connection from the job argument
and passes them as separate arguments to the wrapped function.
"""
def _wrapped(job, *args, **kwargs):
job_id = id(job)
to_parent = job.data['pipe']
host = job.data['host']
# Create a protocol adapter.
mkaccount = partial(_account_factory, to_parent, host)
pargs = {'account_factory': mkaccount,
'stdout': job.data['stdout']}
pargs.update(host.get_options())
conn = prepare(host, **pargs)
# Connect and run the function.
log_options = get_label(func, 'log_to')
if log_options is not None:
# Enable logging.
proxy = LoggerProxy(to_parent, log_options['logger_id'])
log_cb = partial(proxy.log, job_id)
proxy.add_log(job_id, job.name, job.failures + 1)
conn.data_received_event.listen(log_cb)
try:
conn.connect(host.get_address(), host.get_tcp_port())
result = func(job, host, conn, *args, **kwargs)
conn.close(force=True)
except:
proxy.log_aborted(job_id, serializeable_sys_exc_info())
raise
else:
proxy.log_succeeded(job_id)
finally:
conn.data_received_event.disconnect(log_cb)
else:
conn.connect(host.get_address(), host.get_tcp_port())
result = func(job, host, conn, *args, **kwargs)
conn.close(force=True)
return result
return _wrapped | def function[_prepare_connection, parameter[func]]:
constant[
A decorator that unpacks the host and connection from the job argument
and passes them as separate arguments to the wrapped function.
]
def function[_wrapped, parameter[job]]:
variable[job_id] assign[=] call[name[id], parameter[name[job]]]
variable[to_parent] assign[=] call[name[job].data][constant[pipe]]
variable[host] assign[=] call[name[job].data][constant[host]]
variable[mkaccount] assign[=] call[name[partial], parameter[name[_account_factory], name[to_parent], name[host]]]
variable[pargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b0652c80>, <ast.Constant object at 0x7da1b0651990>], [<ast.Name object at 0x7da1b0651c00>, <ast.Subscript object at 0x7da1b0650820>]]
call[name[pargs].update, parameter[call[name[host].get_options, parameter[]]]]
variable[conn] assign[=] call[name[prepare], parameter[name[host]]]
variable[log_options] assign[=] call[name[get_label], parameter[name[func], constant[log_to]]]
if compare[name[log_options] is_not constant[None]] begin[:]
variable[proxy] assign[=] call[name[LoggerProxy], parameter[name[to_parent], call[name[log_options]][constant[logger_id]]]]
variable[log_cb] assign[=] call[name[partial], parameter[name[proxy].log, name[job_id]]]
call[name[proxy].add_log, parameter[name[job_id], name[job].name, binary_operation[name[job].failures + constant[1]]]]
call[name[conn].data_received_event.listen, parameter[name[log_cb]]]
<ast.Try object at 0x7da1b0651e70>
return[name[result]]
return[name[_wrapped]] | keyword[def] identifier[_prepare_connection] ( identifier[func] ):
literal[string]
keyword[def] identifier[_wrapped] ( identifier[job] ,* identifier[args] ,** identifier[kwargs] ):
identifier[job_id] = identifier[id] ( identifier[job] )
identifier[to_parent] = identifier[job] . identifier[data] [ literal[string] ]
identifier[host] = identifier[job] . identifier[data] [ literal[string] ]
identifier[mkaccount] = identifier[partial] ( identifier[_account_factory] , identifier[to_parent] , identifier[host] )
identifier[pargs] ={ literal[string] : identifier[mkaccount] ,
literal[string] : identifier[job] . identifier[data] [ literal[string] ]}
identifier[pargs] . identifier[update] ( identifier[host] . identifier[get_options] ())
identifier[conn] = identifier[prepare] ( identifier[host] ,** identifier[pargs] )
identifier[log_options] = identifier[get_label] ( identifier[func] , literal[string] )
keyword[if] identifier[log_options] keyword[is] keyword[not] keyword[None] :
identifier[proxy] = identifier[LoggerProxy] ( identifier[to_parent] , identifier[log_options] [ literal[string] ])
identifier[log_cb] = identifier[partial] ( identifier[proxy] . identifier[log] , identifier[job_id] )
identifier[proxy] . identifier[add_log] ( identifier[job_id] , identifier[job] . identifier[name] , identifier[job] . identifier[failures] + literal[int] )
identifier[conn] . identifier[data_received_event] . identifier[listen] ( identifier[log_cb] )
keyword[try] :
identifier[conn] . identifier[connect] ( identifier[host] . identifier[get_address] (), identifier[host] . identifier[get_tcp_port] ())
identifier[result] = identifier[func] ( identifier[job] , identifier[host] , identifier[conn] ,* identifier[args] ,** identifier[kwargs] )
identifier[conn] . identifier[close] ( identifier[force] = keyword[True] )
keyword[except] :
identifier[proxy] . identifier[log_aborted] ( identifier[job_id] , identifier[serializeable_sys_exc_info] ())
keyword[raise]
keyword[else] :
identifier[proxy] . identifier[log_succeeded] ( identifier[job_id] )
keyword[finally] :
identifier[conn] . identifier[data_received_event] . identifier[disconnect] ( identifier[log_cb] )
keyword[else] :
identifier[conn] . identifier[connect] ( identifier[host] . identifier[get_address] (), identifier[host] . identifier[get_tcp_port] ())
identifier[result] = identifier[func] ( identifier[job] , identifier[host] , identifier[conn] ,* identifier[args] ,** identifier[kwargs] )
identifier[conn] . identifier[close] ( identifier[force] = keyword[True] )
keyword[return] identifier[result]
keyword[return] identifier[_wrapped] | def _prepare_connection(func):
"""
A decorator that unpacks the host and connection from the job argument
and passes them as separate arguments to the wrapped function.
"""
def _wrapped(job, *args, **kwargs):
job_id = id(job)
to_parent = job.data['pipe']
host = job.data['host']
# Create a protocol adapter.
mkaccount = partial(_account_factory, to_parent, host)
pargs = {'account_factory': mkaccount, 'stdout': job.data['stdout']}
pargs.update(host.get_options())
conn = prepare(host, **pargs)
# Connect and run the function.
log_options = get_label(func, 'log_to')
if log_options is not None:
# Enable logging.
proxy = LoggerProxy(to_parent, log_options['logger_id'])
log_cb = partial(proxy.log, job_id)
proxy.add_log(job_id, job.name, job.failures + 1)
conn.data_received_event.listen(log_cb)
try:
conn.connect(host.get_address(), host.get_tcp_port())
result = func(job, host, conn, *args, **kwargs)
conn.close(force=True) # depends on [control=['try'], data=[]]
except:
proxy.log_aborted(job_id, serializeable_sys_exc_info())
raise # depends on [control=['except'], data=[]]
else:
proxy.log_succeeded(job_id)
finally:
conn.data_received_event.disconnect(log_cb) # depends on [control=['if'], data=['log_options']]
else:
conn.connect(host.get_address(), host.get_tcp_port())
result = func(job, host, conn, *args, **kwargs)
conn.close(force=True)
return result
return _wrapped |
def _query(self, method, path, data=None, page=False, retry=0):
"""
Fetch an object from the Graph API and parse the output, returning a tuple where the first item
is the object yielded by the Graph API and the second is the URL for the next page of results, or
``None`` if results have been exhausted.
:param method: A string describing the HTTP method.
:param path: A string describing the object in the Graph API.
:param data: A dictionary of HTTP GET parameters (for GET requests) or POST data (for POST requests).
:param page: A boolean describing whether to return an iterator that iterates over each page of results.
:param retry: An integer describing how many times the request may be retried.
"""
if(data):
data = dict(
(k.replace('_sqbro_', '['), v) for k, v in data.items())
data = dict(
(k.replace('_sqbrc_', ']'), v) for k, v in data.items())
data = dict(
(k.replace('__', ':'), v) for k, v in data.items())
data = data or {}
def load(method, url, data):
for key in data:
value = data[key]
if isinstance(value, (list, dict, set)):
data[key] = json.dumps(value)
try:
if method in ['GET', 'DELETE']:
response = self.session.request(
method, url, params=data, allow_redirects=True,
verify=self.verify_ssl_certificate, timeout=self.timeout
)
if method in ['POST', 'PUT']:
files = {}
for key in data:
if hasattr(data[key], 'read'):
files[key] = data[key]
for key in files:
data.pop(key)
response = self.session.request(
method, url, data=data, files=files,
verify=self.verify_ssl_certificate, timeout=self.timeout
)
if 500 <= response.status_code < 600:
# Facebook 5XX errors usually come with helpful messages
# as a JSON object describing the problem with the request.
# If this is the case, an error will be raised and we just
# need to re-raise it. This is most likely to happen
# with the Ads API.
# This will raise an exception if a JSON-like error object
# comes in the response.
self._parse(response.content)
# If Facebook does not provide any JSON-formatted error
# but just a plain-text, useless error, we'll just inform
# about a Facebook Internal errror occurred.
raise FacebookError(
'Internal Facebook error occurred',
response.status_code
)
except requests.RequestException as exception:
raise HTTPError(exception)
result = self._parse(response.content)
if isinstance(result, dict):
result['headers'] = response.headers
try:
next_url = result['paging']['next']
except (KeyError, TypeError):
next_url = None
return result, next_url
def load_with_retry(method, url, data):
remaining_retries = retry
while True:
try:
return load(method, url, data)
except FacepyError as e:
log.warn("Exception on %s: %s, retries remaining: %s",
url,
e,
remaining_retries,
)
if remaining_retries > 0:
remaining_retries -= 1
else:
raise
def paginate(method, url, data):
while url:
result, url = load_with_retry(method, url, data)
# Reset pagination parameters.
for key in ['offset', 'until', 'since']:
if key in data:
del data[key]
yield result
# Convert option lists to comma-separated values.
for key in data:
if isinstance(data[key], (list, set, tuple)) and all([isinstance(item, six.string_types) for item in data[key]]):
data[key] = ','.join(data[key])
# Support absolute paths too
if not path.startswith('/'):
if six.PY2:
path = '/' + six.text_type(path.decode('utf-8'))
else:
path = '/' + path
url = self._get_url(path)
if self.oauth_token:
data['access_token'] = self.oauth_token
if self.appsecret and self.oauth_token:
data['appsecret_proof'] = self._generate_appsecret_proof()
if page:
return paginate(method, url, data)
else:
return load_with_retry(method, url, data)[0] | def function[_query, parameter[self, method, path, data, page, retry]]:
constant[
Fetch an object from the Graph API and parse the output, returning a tuple where the first item
is the object yielded by the Graph API and the second is the URL for the next page of results, or
``None`` if results have been exhausted.
:param method: A string describing the HTTP method.
:param path: A string describing the object in the Graph API.
:param data: A dictionary of HTTP GET parameters (for GET requests) or POST data (for POST requests).
:param page: A boolean describing whether to return an iterator that iterates over each page of results.
:param retry: An integer describing how many times the request may be retried.
]
if name[data] begin[:]
variable[data] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da18bccba90>]]
variable[data] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da18bcc9f90>]]
variable[data] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da18bcc9a20>]]
variable[data] assign[=] <ast.BoolOp object at 0x7da18bccad40>
def function[load, parameter[method, url, data]]:
for taget[name[key]] in starred[name[data]] begin[:]
variable[value] assign[=] call[name[data]][name[key]]
if call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da18bcc8b50>, <ast.Name object at 0x7da18bcc8f40>, <ast.Name object at 0x7da18bcc9450>]]]] begin[:]
call[name[data]][name[key]] assign[=] call[name[json].dumps, parameter[name[value]]]
<ast.Try object at 0x7da18bccb520>
variable[result] assign[=] call[name[self]._parse, parameter[name[response].content]]
if call[name[isinstance], parameter[name[result], name[dict]]] begin[:]
call[name[result]][constant[headers]] assign[=] name[response].headers
<ast.Try object at 0x7da18bccb280>
return[tuple[[<ast.Name object at 0x7da18f722f50>, <ast.Name object at 0x7da18f720730>]]]
def function[load_with_retry, parameter[method, url, data]]:
variable[remaining_retries] assign[=] name[retry]
while constant[True] begin[:]
<ast.Try object at 0x7da18f7227d0>
def function[paginate, parameter[method, url, data]]:
while name[url] begin[:]
<ast.Tuple object at 0x7da18f720d90> assign[=] call[name[load_with_retry], parameter[name[method], name[url], name[data]]]
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da18f7212a0>, <ast.Constant object at 0x7da18f7224d0>, <ast.Constant object at 0x7da18f7225c0>]]] begin[:]
if compare[name[key] in name[data]] begin[:]
<ast.Delete object at 0x7da18f722740>
<ast.Yield object at 0x7da18f720b20>
for taget[name[key]] in starred[name[data]] begin[:]
if <ast.BoolOp object at 0x7da18f721ab0> begin[:]
call[name[data]][name[key]] assign[=] call[constant[,].join, parameter[call[name[data]][name[key]]]]
if <ast.UnaryOp object at 0x7da18f722e90> begin[:]
if name[six].PY2 begin[:]
variable[path] assign[=] binary_operation[constant[/] + call[name[six].text_type, parameter[call[name[path].decode, parameter[constant[utf-8]]]]]]
variable[url] assign[=] call[name[self]._get_url, parameter[name[path]]]
if name[self].oauth_token begin[:]
call[name[data]][constant[access_token]] assign[=] name[self].oauth_token
if <ast.BoolOp object at 0x7da18f721570> begin[:]
call[name[data]][constant[appsecret_proof]] assign[=] call[name[self]._generate_appsecret_proof, parameter[]]
if name[page] begin[:]
return[call[name[paginate], parameter[name[method], name[url], name[data]]]] | keyword[def] identifier[_query] ( identifier[self] , identifier[method] , identifier[path] , identifier[data] = keyword[None] , identifier[page] = keyword[False] , identifier[retry] = literal[int] ):
literal[string]
keyword[if] ( identifier[data] ):
identifier[data] = identifier[dict] (
( identifier[k] . identifier[replace] ( literal[string] , literal[string] ), identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[data] . identifier[items] ())
identifier[data] = identifier[dict] (
( identifier[k] . identifier[replace] ( literal[string] , literal[string] ), identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[data] . identifier[items] ())
identifier[data] = identifier[dict] (
( identifier[k] . identifier[replace] ( literal[string] , literal[string] ), identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[data] . identifier[items] ())
identifier[data] = identifier[data] keyword[or] {}
keyword[def] identifier[load] ( identifier[method] , identifier[url] , identifier[data] ):
keyword[for] identifier[key] keyword[in] identifier[data] :
identifier[value] = identifier[data] [ identifier[key] ]
keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[dict] , identifier[set] )):
identifier[data] [ identifier[key] ]= identifier[json] . identifier[dumps] ( identifier[value] )
keyword[try] :
keyword[if] identifier[method] keyword[in] [ literal[string] , literal[string] ]:
identifier[response] = identifier[self] . identifier[session] . identifier[request] (
identifier[method] , identifier[url] , identifier[params] = identifier[data] , identifier[allow_redirects] = keyword[True] ,
identifier[verify] = identifier[self] . identifier[verify_ssl_certificate] , identifier[timeout] = identifier[self] . identifier[timeout]
)
keyword[if] identifier[method] keyword[in] [ literal[string] , literal[string] ]:
identifier[files] ={}
keyword[for] identifier[key] keyword[in] identifier[data] :
keyword[if] identifier[hasattr] ( identifier[data] [ identifier[key] ], literal[string] ):
identifier[files] [ identifier[key] ]= identifier[data] [ identifier[key] ]
keyword[for] identifier[key] keyword[in] identifier[files] :
identifier[data] . identifier[pop] ( identifier[key] )
identifier[response] = identifier[self] . identifier[session] . identifier[request] (
identifier[method] , identifier[url] , identifier[data] = identifier[data] , identifier[files] = identifier[files] ,
identifier[verify] = identifier[self] . identifier[verify_ssl_certificate] , identifier[timeout] = identifier[self] . identifier[timeout]
)
keyword[if] literal[int] <= identifier[response] . identifier[status_code] < literal[int] :
identifier[self] . identifier[_parse] ( identifier[response] . identifier[content] )
keyword[raise] identifier[FacebookError] (
literal[string] ,
identifier[response] . identifier[status_code]
)
keyword[except] identifier[requests] . identifier[RequestException] keyword[as] identifier[exception] :
keyword[raise] identifier[HTTPError] ( identifier[exception] )
identifier[result] = identifier[self] . identifier[_parse] ( identifier[response] . identifier[content] )
keyword[if] identifier[isinstance] ( identifier[result] , identifier[dict] ):
identifier[result] [ literal[string] ]= identifier[response] . identifier[headers]
keyword[try] :
identifier[next_url] = identifier[result] [ literal[string] ][ literal[string] ]
keyword[except] ( identifier[KeyError] , identifier[TypeError] ):
identifier[next_url] = keyword[None]
keyword[return] identifier[result] , identifier[next_url]
keyword[def] identifier[load_with_retry] ( identifier[method] , identifier[url] , identifier[data] ):
identifier[remaining_retries] = identifier[retry]
keyword[while] keyword[True] :
keyword[try] :
keyword[return] identifier[load] ( identifier[method] , identifier[url] , identifier[data] )
keyword[except] identifier[FacepyError] keyword[as] identifier[e] :
identifier[log] . identifier[warn] ( literal[string] ,
identifier[url] ,
identifier[e] ,
identifier[remaining_retries] ,
)
keyword[if] identifier[remaining_retries] > literal[int] :
identifier[remaining_retries] -= literal[int]
keyword[else] :
keyword[raise]
keyword[def] identifier[paginate] ( identifier[method] , identifier[url] , identifier[data] ):
keyword[while] identifier[url] :
identifier[result] , identifier[url] = identifier[load_with_retry] ( identifier[method] , identifier[url] , identifier[data] )
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[if] identifier[key] keyword[in] identifier[data] :
keyword[del] identifier[data] [ identifier[key] ]
keyword[yield] identifier[result]
keyword[for] identifier[key] keyword[in] identifier[data] :
keyword[if] identifier[isinstance] ( identifier[data] [ identifier[key] ],( identifier[list] , identifier[set] , identifier[tuple] )) keyword[and] identifier[all] ([ identifier[isinstance] ( identifier[item] , identifier[six] . identifier[string_types] ) keyword[for] identifier[item] keyword[in] identifier[data] [ identifier[key] ]]):
identifier[data] [ identifier[key] ]= literal[string] . identifier[join] ( identifier[data] [ identifier[key] ])
keyword[if] keyword[not] identifier[path] . identifier[startswith] ( literal[string] ):
keyword[if] identifier[six] . identifier[PY2] :
identifier[path] = literal[string] + identifier[six] . identifier[text_type] ( identifier[path] . identifier[decode] ( literal[string] ))
keyword[else] :
identifier[path] = literal[string] + identifier[path]
identifier[url] = identifier[self] . identifier[_get_url] ( identifier[path] )
keyword[if] identifier[self] . identifier[oauth_token] :
identifier[data] [ literal[string] ]= identifier[self] . identifier[oauth_token]
keyword[if] identifier[self] . identifier[appsecret] keyword[and] identifier[self] . identifier[oauth_token] :
identifier[data] [ literal[string] ]= identifier[self] . identifier[_generate_appsecret_proof] ()
keyword[if] identifier[page] :
keyword[return] identifier[paginate] ( identifier[method] , identifier[url] , identifier[data] )
keyword[else] :
keyword[return] identifier[load_with_retry] ( identifier[method] , identifier[url] , identifier[data] )[ literal[int] ] | def _query(self, method, path, data=None, page=False, retry=0):
"""
Fetch an object from the Graph API and parse the output, returning a tuple where the first item
is the object yielded by the Graph API and the second is the URL for the next page of results, or
``None`` if results have been exhausted.
:param method: A string describing the HTTP method.
:param path: A string describing the object in the Graph API.
:param data: A dictionary of HTTP GET parameters (for GET requests) or POST data (for POST requests).
:param page: A boolean describing whether to return an iterator that iterates over each page of results.
:param retry: An integer describing how many times the request may be retried.
"""
if data:
data = dict(((k.replace('_sqbro_', '['), v) for (k, v) in data.items()))
data = dict(((k.replace('_sqbrc_', ']'), v) for (k, v) in data.items()))
data = dict(((k.replace('__', ':'), v) for (k, v) in data.items())) # depends on [control=['if'], data=[]]
data = data or {}
def load(method, url, data):
for key in data:
value = data[key]
if isinstance(value, (list, dict, set)):
data[key] = json.dumps(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
try:
if method in ['GET', 'DELETE']:
response = self.session.request(method, url, params=data, allow_redirects=True, verify=self.verify_ssl_certificate, timeout=self.timeout) # depends on [control=['if'], data=['method']]
if method in ['POST', 'PUT']:
files = {}
for key in data:
if hasattr(data[key], 'read'):
files[key] = data[key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
for key in files:
data.pop(key) # depends on [control=['for'], data=['key']]
response = self.session.request(method, url, data=data, files=files, verify=self.verify_ssl_certificate, timeout=self.timeout) # depends on [control=['if'], data=['method']]
if 500 <= response.status_code < 600:
# Facebook 5XX errors usually come with helpful messages
# as a JSON object describing the problem with the request.
# If this is the case, an error will be raised and we just
# need to re-raise it. This is most likely to happen
# with the Ads API.
# This will raise an exception if a JSON-like error object
# comes in the response.
self._parse(response.content)
# If Facebook does not provide any JSON-formatted error
# but just a plain-text, useless error, we'll just inform
# about a Facebook Internal errror occurred.
raise FacebookError('Internal Facebook error occurred', response.status_code) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except requests.RequestException as exception:
raise HTTPError(exception) # depends on [control=['except'], data=['exception']]
result = self._parse(response.content)
if isinstance(result, dict):
result['headers'] = response.headers # depends on [control=['if'], data=[]]
try:
next_url = result['paging']['next'] # depends on [control=['try'], data=[]]
except (KeyError, TypeError):
next_url = None # depends on [control=['except'], data=[]]
return (result, next_url)
def load_with_retry(method, url, data):
remaining_retries = retry
while True:
try:
return load(method, url, data) # depends on [control=['try'], data=[]]
except FacepyError as e:
log.warn('Exception on %s: %s, retries remaining: %s', url, e, remaining_retries)
if remaining_retries > 0:
remaining_retries -= 1 # depends on [control=['if'], data=['remaining_retries']]
else:
raise # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]]
def paginate(method, url, data):
while url:
(result, url) = load_with_retry(method, url, data)
# Reset pagination parameters.
for key in ['offset', 'until', 'since']:
if key in data:
del data[key] # depends on [control=['if'], data=['key', 'data']] # depends on [control=['for'], data=['key']]
yield result # depends on [control=['while'], data=[]]
# Convert option lists to comma-separated values.
for key in data:
if isinstance(data[key], (list, set, tuple)) and all([isinstance(item, six.string_types) for item in data[key]]):
data[key] = ','.join(data[key]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
# Support absolute paths too
if not path.startswith('/'):
if six.PY2:
path = '/' + six.text_type(path.decode('utf-8')) # depends on [control=['if'], data=[]]
else:
path = '/' + path # depends on [control=['if'], data=[]]
url = self._get_url(path)
if self.oauth_token:
data['access_token'] = self.oauth_token # depends on [control=['if'], data=[]]
if self.appsecret and self.oauth_token:
data['appsecret_proof'] = self._generate_appsecret_proof() # depends on [control=['if'], data=[]]
if page:
return paginate(method, url, data) # depends on [control=['if'], data=[]]
else:
return load_with_retry(method, url, data)[0] |
def get_application_modules(self):
"""
Instantiate all application modules (i.e.
:class:`~admin_tools.dashboard.modules.AppList`,
:class:`~fluent_dashboard.modules.AppIconList` and
:class:`~fluent_dashboard.modules.CmsAppIconList`)
for use in the dashboard.
"""
modules = []
appgroups = get_application_groups()
for title, kwargs in appgroups:
AppListClass = get_class(kwargs.pop('module')) # e.g. CmsAppIconlist, AppIconlist, Applist
modules.append(AppListClass(title, **kwargs))
return modules | def function[get_application_modules, parameter[self]]:
constant[
Instantiate all application modules (i.e.
:class:`~admin_tools.dashboard.modules.AppList`,
:class:`~fluent_dashboard.modules.AppIconList` and
:class:`~fluent_dashboard.modules.CmsAppIconList`)
for use in the dashboard.
]
variable[modules] assign[=] list[[]]
variable[appgroups] assign[=] call[name[get_application_groups], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18f58c700>, <ast.Name object at 0x7da18f58f610>]]] in starred[name[appgroups]] begin[:]
variable[AppListClass] assign[=] call[name[get_class], parameter[call[name[kwargs].pop, parameter[constant[module]]]]]
call[name[modules].append, parameter[call[name[AppListClass], parameter[name[title]]]]]
return[name[modules]] | keyword[def] identifier[get_application_modules] ( identifier[self] ):
literal[string]
identifier[modules] =[]
identifier[appgroups] = identifier[get_application_groups] ()
keyword[for] identifier[title] , identifier[kwargs] keyword[in] identifier[appgroups] :
identifier[AppListClass] = identifier[get_class] ( identifier[kwargs] . identifier[pop] ( literal[string] ))
identifier[modules] . identifier[append] ( identifier[AppListClass] ( identifier[title] ,** identifier[kwargs] ))
keyword[return] identifier[modules] | def get_application_modules(self):
"""
Instantiate all application modules (i.e.
:class:`~admin_tools.dashboard.modules.AppList`,
:class:`~fluent_dashboard.modules.AppIconList` and
:class:`~fluent_dashboard.modules.CmsAppIconList`)
for use in the dashboard.
"""
modules = []
appgroups = get_application_groups()
for (title, kwargs) in appgroups:
AppListClass = get_class(kwargs.pop('module')) # e.g. CmsAppIconlist, AppIconlist, Applist
modules.append(AppListClass(title, **kwargs)) # depends on [control=['for'], data=[]]
return modules |
def history_list(self, **kwargs):
"""History list of alarm state."""
url_str = self.base_url + '/state-history/'
if 'dimensions' in kwargs:
dimstr = self.get_dimensions_url_string(kwargs['dimensions'])
kwargs['dimensions'] = dimstr
if kwargs:
url_str = url_str + '?%s' % parse.urlencode(kwargs, True)
resp = self.client.list(url_str)
return resp['elements'] if type(resp) is dict else resp | def function[history_list, parameter[self]]:
constant[History list of alarm state.]
variable[url_str] assign[=] binary_operation[name[self].base_url + constant[/state-history/]]
if compare[constant[dimensions] in name[kwargs]] begin[:]
variable[dimstr] assign[=] call[name[self].get_dimensions_url_string, parameter[call[name[kwargs]][constant[dimensions]]]]
call[name[kwargs]][constant[dimensions]] assign[=] name[dimstr]
if name[kwargs] begin[:]
variable[url_str] assign[=] binary_operation[name[url_str] + binary_operation[constant[?%s] <ast.Mod object at 0x7da2590d6920> call[name[parse].urlencode, parameter[name[kwargs], constant[True]]]]]
variable[resp] assign[=] call[name[self].client.list, parameter[name[url_str]]]
return[<ast.IfExp object at 0x7da18c4cc250>] | keyword[def] identifier[history_list] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[url_str] = identifier[self] . identifier[base_url] + literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[dimstr] = identifier[self] . identifier[get_dimensions_url_string] ( identifier[kwargs] [ literal[string] ])
identifier[kwargs] [ literal[string] ]= identifier[dimstr]
keyword[if] identifier[kwargs] :
identifier[url_str] = identifier[url_str] + literal[string] % identifier[parse] . identifier[urlencode] ( identifier[kwargs] , keyword[True] )
identifier[resp] = identifier[self] . identifier[client] . identifier[list] ( identifier[url_str] )
keyword[return] identifier[resp] [ literal[string] ] keyword[if] identifier[type] ( identifier[resp] ) keyword[is] identifier[dict] keyword[else] identifier[resp] | def history_list(self, **kwargs):
"""History list of alarm state."""
url_str = self.base_url + '/state-history/'
if 'dimensions' in kwargs:
dimstr = self.get_dimensions_url_string(kwargs['dimensions'])
kwargs['dimensions'] = dimstr # depends on [control=['if'], data=['kwargs']]
if kwargs:
url_str = url_str + '?%s' % parse.urlencode(kwargs, True) # depends on [control=['if'], data=[]]
resp = self.client.list(url_str)
return resp['elements'] if type(resp) is dict else resp |
def __print_async(self, frame_type, headers, body):
"""
Utility function to print a message and setup the command prompt
for the next input
"""
if self.__quit:
return
if self.verbose:
self.__sysout(frame_type)
for k, v in headers.items():
self.__sysout('%s: %s' % (k, v))
else:
if 'message-id' in headers:
self.__sysout('message-id: %s' % headers['message-id'])
if 'subscription' in headers:
self.__sysout('subscription: %s' % headers['subscription'])
if self.prompt != '':
self.__sysout('')
self.__sysout(body)
if not self.__start:
self.__sysout(self.prompt, end='')
else:
self.__start = False
self.stdout.flush() | def function[__print_async, parameter[self, frame_type, headers, body]]:
constant[
Utility function to print a message and setup the command prompt
for the next input
]
if name[self].__quit begin[:]
return[None]
if name[self].verbose begin[:]
call[name[self].__sysout, parameter[name[frame_type]]]
for taget[tuple[[<ast.Name object at 0x7da204345510>, <ast.Name object at 0x7da2043454e0>]]] in starred[call[name[headers].items, parameter[]]] begin[:]
call[name[self].__sysout, parameter[binary_operation[constant[%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2043474f0>, <ast.Name object at 0x7da2043456c0>]]]]]
if compare[name[self].prompt not_equal[!=] constant[]] begin[:]
call[name[self].__sysout, parameter[constant[]]]
call[name[self].__sysout, parameter[name[body]]]
if <ast.UnaryOp object at 0x7da204346560> begin[:]
call[name[self].__sysout, parameter[name[self].prompt]]
call[name[self].stdout.flush, parameter[]] | keyword[def] identifier[__print_async] ( identifier[self] , identifier[frame_type] , identifier[headers] , identifier[body] ):
literal[string]
keyword[if] identifier[self] . identifier[__quit] :
keyword[return]
keyword[if] identifier[self] . identifier[verbose] :
identifier[self] . identifier[__sysout] ( identifier[frame_type] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[headers] . identifier[items] ():
identifier[self] . identifier[__sysout] ( literal[string] %( identifier[k] , identifier[v] ))
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[headers] :
identifier[self] . identifier[__sysout] ( literal[string] % identifier[headers] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[headers] :
identifier[self] . identifier[__sysout] ( literal[string] % identifier[headers] [ literal[string] ])
keyword[if] identifier[self] . identifier[prompt] != literal[string] :
identifier[self] . identifier[__sysout] ( literal[string] )
identifier[self] . identifier[__sysout] ( identifier[body] )
keyword[if] keyword[not] identifier[self] . identifier[__start] :
identifier[self] . identifier[__sysout] ( identifier[self] . identifier[prompt] , identifier[end] = literal[string] )
keyword[else] :
identifier[self] . identifier[__start] = keyword[False]
identifier[self] . identifier[stdout] . identifier[flush] () | def __print_async(self, frame_type, headers, body):
"""
Utility function to print a message and setup the command prompt
for the next input
"""
if self.__quit:
return # depends on [control=['if'], data=[]]
if self.verbose:
self.__sysout(frame_type)
for (k, v) in headers.items():
self.__sysout('%s: %s' % (k, v)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
if 'message-id' in headers:
self.__sysout('message-id: %s' % headers['message-id']) # depends on [control=['if'], data=['headers']]
if 'subscription' in headers:
self.__sysout('subscription: %s' % headers['subscription']) # depends on [control=['if'], data=['headers']]
if self.prompt != '':
self.__sysout('') # depends on [control=['if'], data=[]]
self.__sysout(body)
if not self.__start:
self.__sysout(self.prompt, end='') # depends on [control=['if'], data=[]]
else:
self.__start = False
self.stdout.flush() |
def generate_ngrams(args, parser):
"""Adds n-grams data to the data store."""
store = utils.get_data_store(args)
corpus = utils.get_corpus(args)
if args.catalogue:
catalogue = utils.get_catalogue(args)
else:
catalogue = None
store.add_ngrams(corpus, args.min_size, args.max_size, catalogue) | def function[generate_ngrams, parameter[args, parser]]:
constant[Adds n-grams data to the data store.]
variable[store] assign[=] call[name[utils].get_data_store, parameter[name[args]]]
variable[corpus] assign[=] call[name[utils].get_corpus, parameter[name[args]]]
if name[args].catalogue begin[:]
variable[catalogue] assign[=] call[name[utils].get_catalogue, parameter[name[args]]]
call[name[store].add_ngrams, parameter[name[corpus], name[args].min_size, name[args].max_size, name[catalogue]]] | keyword[def] identifier[generate_ngrams] ( identifier[args] , identifier[parser] ):
literal[string]
identifier[store] = identifier[utils] . identifier[get_data_store] ( identifier[args] )
identifier[corpus] = identifier[utils] . identifier[get_corpus] ( identifier[args] )
keyword[if] identifier[args] . identifier[catalogue] :
identifier[catalogue] = identifier[utils] . identifier[get_catalogue] ( identifier[args] )
keyword[else] :
identifier[catalogue] = keyword[None]
identifier[store] . identifier[add_ngrams] ( identifier[corpus] , identifier[args] . identifier[min_size] , identifier[args] . identifier[max_size] , identifier[catalogue] ) | def generate_ngrams(args, parser):
"""Adds n-grams data to the data store."""
store = utils.get_data_store(args)
corpus = utils.get_corpus(args)
if args.catalogue:
catalogue = utils.get_catalogue(args) # depends on [control=['if'], data=[]]
else:
catalogue = None
store.add_ngrams(corpus, args.min_size, args.max_size, catalogue) |
def append_items(self, items, **kwargs):
"""
Method to append data to multiple :class:`~.Item` objects.
This method differs from the normal :meth:`append_multi` in that
each `Item`'s `value` field is updated with the appended data
upon successful completion of the operation.
:param items: The item dictionary. The value for each key should
contain a ``fragment`` field containing the object to append
to the value on the server.
:type items: :class:`~couchbase.items.ItemOptionDict`.
The rest of the options are passed verbatim to
:meth:`append_multi`
.. seealso:: :meth:`append_multi`, :meth:`append`
"""
rv = self.append_multi(items, **kwargs)
# Assume this is an 'ItemOptionDict'
for k, v in items.dict.items():
if k.success:
k.value += v['fragment']
return rv | def function[append_items, parameter[self, items]]:
constant[
Method to append data to multiple :class:`~.Item` objects.
This method differs from the normal :meth:`append_multi` in that
each `Item`'s `value` field is updated with the appended data
upon successful completion of the operation.
:param items: The item dictionary. The value for each key should
contain a ``fragment`` field containing the object to append
to the value on the server.
:type items: :class:`~couchbase.items.ItemOptionDict`.
The rest of the options are passed verbatim to
:meth:`append_multi`
.. seealso:: :meth:`append_multi`, :meth:`append`
]
variable[rv] assign[=] call[name[self].append_multi, parameter[name[items]]]
for taget[tuple[[<ast.Name object at 0x7da1b2345ff0>, <ast.Name object at 0x7da1b2345300>]]] in starred[call[name[items].dict.items, parameter[]]] begin[:]
if name[k].success begin[:]
<ast.AugAssign object at 0x7da20c6e6c50>
return[name[rv]] | keyword[def] identifier[append_items] ( identifier[self] , identifier[items] ,** identifier[kwargs] ):
literal[string]
identifier[rv] = identifier[self] . identifier[append_multi] ( identifier[items] ,** identifier[kwargs] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[items] . identifier[dict] . identifier[items] ():
keyword[if] identifier[k] . identifier[success] :
identifier[k] . identifier[value] += identifier[v] [ literal[string] ]
keyword[return] identifier[rv] | def append_items(self, items, **kwargs):
"""
Method to append data to multiple :class:`~.Item` objects.
This method differs from the normal :meth:`append_multi` in that
each `Item`'s `value` field is updated with the appended data
upon successful completion of the operation.
:param items: The item dictionary. The value for each key should
contain a ``fragment`` field containing the object to append
to the value on the server.
:type items: :class:`~couchbase.items.ItemOptionDict`.
The rest of the options are passed verbatim to
:meth:`append_multi`
.. seealso:: :meth:`append_multi`, :meth:`append`
"""
rv = self.append_multi(items, **kwargs)
# Assume this is an 'ItemOptionDict'
for (k, v) in items.dict.items():
if k.success:
k.value += v['fragment'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return rv |
def update_homed_flags(self, flags=None):
'''
Returns Smoothieware's current homing-status, which is a dictionary
of boolean values for each axis (XYZABC). If an axis is False, then it
still needs to be homed, and it's coordinate cannot be trusted.
Smoothieware sets it's internal homing flags for all axes to False when
it has yet to home since booting/restarting, or an endstop/homing error
returns: dict
{
'X': False,
'Y': True,
'Z': False,
'A': True,
'B': False,
'C': True
}
'''
if flags and isinstance(flags, dict):
self.homed_flags.update(flags)
elif self.simulating:
self.homed_flags.update({ax: False for ax in AXES})
elif self.is_connected():
def _recursive_update_homed_flags(retries):
try:
res = self._send_command(GCODES['HOMING_STATUS'])
flags = _parse_homing_status_values(res)
self.homed_flags.update(flags)
except ParseError as e:
retries -= 1
if retries <= 0:
raise e
if not self.simulating:
sleep(DEFAULT_STABILIZE_DELAY)
return _recursive_update_homed_flags(retries)
_recursive_update_homed_flags(DEFAULT_COMMAND_RETRIES) | def function[update_homed_flags, parameter[self, flags]]:
constant[
Returns Smoothieware's current homing-status, which is a dictionary
of boolean values for each axis (XYZABC). If an axis is False, then it
still needs to be homed, and it's coordinate cannot be trusted.
Smoothieware sets it's internal homing flags for all axes to False when
it has yet to home since booting/restarting, or an endstop/homing error
returns: dict
{
'X': False,
'Y': True,
'Z': False,
'A': True,
'B': False,
'C': True
}
]
if <ast.BoolOp object at 0x7da1b26ac190> begin[:]
call[name[self].homed_flags.update, parameter[name[flags]]] | keyword[def] identifier[update_homed_flags] ( identifier[self] , identifier[flags] = keyword[None] ):
literal[string]
keyword[if] identifier[flags] keyword[and] identifier[isinstance] ( identifier[flags] , identifier[dict] ):
identifier[self] . identifier[homed_flags] . identifier[update] ( identifier[flags] )
keyword[elif] identifier[self] . identifier[simulating] :
identifier[self] . identifier[homed_flags] . identifier[update] ({ identifier[ax] : keyword[False] keyword[for] identifier[ax] keyword[in] identifier[AXES] })
keyword[elif] identifier[self] . identifier[is_connected] ():
keyword[def] identifier[_recursive_update_homed_flags] ( identifier[retries] ):
keyword[try] :
identifier[res] = identifier[self] . identifier[_send_command] ( identifier[GCODES] [ literal[string] ])
identifier[flags] = identifier[_parse_homing_status_values] ( identifier[res] )
identifier[self] . identifier[homed_flags] . identifier[update] ( identifier[flags] )
keyword[except] identifier[ParseError] keyword[as] identifier[e] :
identifier[retries] -= literal[int]
keyword[if] identifier[retries] <= literal[int] :
keyword[raise] identifier[e]
keyword[if] keyword[not] identifier[self] . identifier[simulating] :
identifier[sleep] ( identifier[DEFAULT_STABILIZE_DELAY] )
keyword[return] identifier[_recursive_update_homed_flags] ( identifier[retries] )
identifier[_recursive_update_homed_flags] ( identifier[DEFAULT_COMMAND_RETRIES] ) | def update_homed_flags(self, flags=None):
"""
Returns Smoothieware's current homing-status, which is a dictionary
of boolean values for each axis (XYZABC). If an axis is False, then it
still needs to be homed, and it's coordinate cannot be trusted.
Smoothieware sets it's internal homing flags for all axes to False when
it has yet to home since booting/restarting, or an endstop/homing error
returns: dict
{
'X': False,
'Y': True,
'Z': False,
'A': True,
'B': False,
'C': True
}
"""
if flags and isinstance(flags, dict):
self.homed_flags.update(flags) # depends on [control=['if'], data=[]]
elif self.simulating:
self.homed_flags.update({ax: False for ax in AXES}) # depends on [control=['if'], data=[]]
elif self.is_connected():
def _recursive_update_homed_flags(retries):
try:
res = self._send_command(GCODES['HOMING_STATUS'])
flags = _parse_homing_status_values(res)
self.homed_flags.update(flags) # depends on [control=['try'], data=[]]
except ParseError as e:
retries -= 1
if retries <= 0:
raise e # depends on [control=['if'], data=[]]
if not self.simulating:
sleep(DEFAULT_STABILIZE_DELAY) # depends on [control=['if'], data=[]]
return _recursive_update_homed_flags(retries) # depends on [control=['except'], data=['e']]
_recursive_update_homed_flags(DEFAULT_COMMAND_RETRIES) # depends on [control=['if'], data=[]] |
def credentials_loader(self, in_credentials: str = "client_secrets.json") -> dict:
"""Loads API credentials from a file, JSON or INI.
:param str in_credentials: path to the credentials file. By default,
look for a client_secrets.json file.
"""
accepted_extensions = (".ini", ".json")
# checks
if not path.isfile(in_credentials):
raise IOError("Credentials file doesn't exist: {}".format(in_credentials))
else:
in_credentials = path.normpath(in_credentials)
if path.splitext(in_credentials)[1] not in accepted_extensions:
raise ValueError(
"Extension of credentials file must be one of {}".format(
accepted_extensions
)
)
else:
kind = path.splitext(in_credentials)[1]
# load, check and set
if kind == ".json":
with open(in_credentials, "r") as f:
in_auth = json.loads(f.read())
# check structure
heads = ("installed", "web")
if not set(in_auth).intersection(set(heads)):
raise ValueError(
"Input JSON structure is not as expected."
" First key must be one of: {}".format(heads)
)
# set
if "web" in in_auth:
# json structure for group application
auth_settings = in_auth.get("web")
out_auth = {
"auth_mode": "group",
"client_id": auth_settings.get("client_id"),
"client_secret": auth_settings.get("client_secret"),
# if not specified, must be a former file then set classic scope
"scopes": auth_settings.get("scopes", ["resources:read"]),
"uri_auth": auth_settings.get("auth_uri"),
"uri_token": auth_settings.get("token_uri"),
"uri_base": self.get_url_base_from_url_token(
auth_settings.get("token_uri")
),
"uri_redirect": None,
}
else:
# assuming in_auth == 'installed'
auth_settings = in_auth.get("installed")
out_auth = {
"auth_mode": "user",
"client_id": auth_settings.get("client_id"),
"client_secret": auth_settings.get("client_secret"),
# if not specified, must be a former file then set classic scope
"scopes": auth_settings.get("scopes", ["resources:read"]),
"uri_auth": auth_settings.get("auth_uri"),
"uri_token": auth_settings.get("token_uri"),
"uri_base": self.get_url_base_from_url_token(
auth_settings.get("token_uri")
),
"uri_redirect": auth_settings.get("redirect_uris", None),
}
else:
# assuming file is an .ini
ini_parser = ConfigParser()
ini_parser.read(in_credentials)
# check structure
if "auth" in ini_parser._sections:
auth_settings = ini_parser["auth"]
else:
raise ValueError(
"Input INI structure is not as expected."
" Section of credentials must be named: auth"
)
# set
out_auth = {
"auth_mode": auth_settings.get("CLIENT_TYPE"),
"client_id": auth_settings.get("CLIENT_ID"),
"client_secret": auth_settings.get("CLIENT_SECRET"),
"uri_auth": auth_settings.get("URI_AUTH"),
"uri_token": auth_settings.get("URI_TOKEN"),
"uri_base": self.get_url_base_from_url_token(
auth_settings.get("URI_TOKEN")
),
"uri_redirect": auth_settings.get("URI_REDIRECT"),
}
# method ending
return out_auth | def function[credentials_loader, parameter[self, in_credentials]]:
constant[Loads API credentials from a file, JSON or INI.
:param str in_credentials: path to the credentials file. By default,
look for a client_secrets.json file.
]
variable[accepted_extensions] assign[=] tuple[[<ast.Constant object at 0x7da1b10c1b10>, <ast.Constant object at 0x7da1b10c0910>]]
if <ast.UnaryOp object at 0x7da1b10c0610> begin[:]
<ast.Raise object at 0x7da1b10c0b20>
if compare[call[call[name[path].splitext, parameter[name[in_credentials]]]][constant[1]] <ast.NotIn object at 0x7da2590d7190> name[accepted_extensions]] begin[:]
<ast.Raise object at 0x7da1b10c3a90>
if compare[name[kind] equal[==] constant[.json]] begin[:]
with call[name[open], parameter[name[in_credentials], constant[r]]] begin[:]
variable[in_auth] assign[=] call[name[json].loads, parameter[call[name[f].read, parameter[]]]]
variable[heads] assign[=] tuple[[<ast.Constant object at 0x7da1b10c0100>, <ast.Constant object at 0x7da1b10c2050>]]
if <ast.UnaryOp object at 0x7da1b10c1030> begin[:]
<ast.Raise object at 0x7da1b10c3310>
if compare[constant[web] in name[in_auth]] begin[:]
variable[auth_settings] assign[=] call[name[in_auth].get, parameter[constant[web]]]
variable[out_auth] assign[=] dictionary[[<ast.Constant object at 0x7da1b10c0ee0>, <ast.Constant object at 0x7da1b10c3430>, <ast.Constant object at 0x7da1b10c1f90>, <ast.Constant object at 0x7da1b10c3160>, <ast.Constant object at 0x7da1b10c1210>, <ast.Constant object at 0x7da1b10c1c30>, <ast.Constant object at 0x7da1b10c09a0>, <ast.Constant object at 0x7da1b10c1000>], [<ast.Constant object at 0x7da1b10c25f0>, <ast.Call object at 0x7da1b10c2cb0>, <ast.Call object at 0x7da1b10c3b50>, <ast.Call object at 0x7da1b10c39a0>, <ast.Call object at 0x7da1b10c1180>, <ast.Call object at 0x7da1b10c2650>, <ast.Call object at 0x7da1b10c07f0>, <ast.Constant object at 0x7da1b10c2b60>]]
return[name[out_auth]] | keyword[def] identifier[credentials_loader] ( identifier[self] , identifier[in_credentials] : identifier[str] = literal[string] )-> identifier[dict] :
literal[string]
identifier[accepted_extensions] =( literal[string] , literal[string] )
keyword[if] keyword[not] identifier[path] . identifier[isfile] ( identifier[in_credentials] ):
keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[in_credentials] ))
keyword[else] :
identifier[in_credentials] = identifier[path] . identifier[normpath] ( identifier[in_credentials] )
keyword[if] identifier[path] . identifier[splitext] ( identifier[in_credentials] )[ literal[int] ] keyword[not] keyword[in] identifier[accepted_extensions] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] (
identifier[accepted_extensions]
)
)
keyword[else] :
identifier[kind] = identifier[path] . identifier[splitext] ( identifier[in_credentials] )[ literal[int] ]
keyword[if] identifier[kind] == literal[string] :
keyword[with] identifier[open] ( identifier[in_credentials] , literal[string] ) keyword[as] identifier[f] :
identifier[in_auth] = identifier[json] . identifier[loads] ( identifier[f] . identifier[read] ())
identifier[heads] =( literal[string] , literal[string] )
keyword[if] keyword[not] identifier[set] ( identifier[in_auth] ). identifier[intersection] ( identifier[set] ( identifier[heads] )):
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] . identifier[format] ( identifier[heads] )
)
keyword[if] literal[string] keyword[in] identifier[in_auth] :
identifier[auth_settings] = identifier[in_auth] . identifier[get] ( literal[string] )
identifier[out_auth] ={
literal[string] : literal[string] ,
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ,[ literal[string] ]),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[self] . identifier[get_url_base_from_url_token] (
identifier[auth_settings] . identifier[get] ( literal[string] )
),
literal[string] : keyword[None] ,
}
keyword[else] :
identifier[auth_settings] = identifier[in_auth] . identifier[get] ( literal[string] )
identifier[out_auth] ={
literal[string] : literal[string] ,
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ,[ literal[string] ]),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[self] . identifier[get_url_base_from_url_token] (
identifier[auth_settings] . identifier[get] ( literal[string] )
),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] , keyword[None] ),
}
keyword[else] :
identifier[ini_parser] = identifier[ConfigParser] ()
identifier[ini_parser] . identifier[read] ( identifier[in_credentials] )
keyword[if] literal[string] keyword[in] identifier[ini_parser] . identifier[_sections] :
identifier[auth_settings] = identifier[ini_parser] [ literal[string] ]
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
)
identifier[out_auth] ={
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
literal[string] : identifier[self] . identifier[get_url_base_from_url_token] (
identifier[auth_settings] . identifier[get] ( literal[string] )
),
literal[string] : identifier[auth_settings] . identifier[get] ( literal[string] ),
}
keyword[return] identifier[out_auth] | def credentials_loader(self, in_credentials: str='client_secrets.json') -> dict:
"""Loads API credentials from a file, JSON or INI.
:param str in_credentials: path to the credentials file. By default,
look for a client_secrets.json file.
"""
accepted_extensions = ('.ini', '.json')
# checks
if not path.isfile(in_credentials):
raise IOError("Credentials file doesn't exist: {}".format(in_credentials)) # depends on [control=['if'], data=[]]
else:
in_credentials = path.normpath(in_credentials)
if path.splitext(in_credentials)[1] not in accepted_extensions:
raise ValueError('Extension of credentials file must be one of {}'.format(accepted_extensions)) # depends on [control=['if'], data=['accepted_extensions']]
else:
kind = path.splitext(in_credentials)[1]
# load, check and set
if kind == '.json':
with open(in_credentials, 'r') as f:
in_auth = json.loads(f.read()) # depends on [control=['with'], data=['f']]
# check structure
heads = ('installed', 'web')
if not set(in_auth).intersection(set(heads)):
raise ValueError('Input JSON structure is not as expected. First key must be one of: {}'.format(heads)) # depends on [control=['if'], data=[]]
# set
if 'web' in in_auth:
# json structure for group application
auth_settings = in_auth.get('web')
# if not specified, must be a former file then set classic scope
out_auth = {'auth_mode': 'group', 'client_id': auth_settings.get('client_id'), 'client_secret': auth_settings.get('client_secret'), 'scopes': auth_settings.get('scopes', ['resources:read']), 'uri_auth': auth_settings.get('auth_uri'), 'uri_token': auth_settings.get('token_uri'), 'uri_base': self.get_url_base_from_url_token(auth_settings.get('token_uri')), 'uri_redirect': None} # depends on [control=['if'], data=['in_auth']]
else:
# assuming in_auth == 'installed'
auth_settings = in_auth.get('installed')
# if not specified, must be a former file then set classic scope
out_auth = {'auth_mode': 'user', 'client_id': auth_settings.get('client_id'), 'client_secret': auth_settings.get('client_secret'), 'scopes': auth_settings.get('scopes', ['resources:read']), 'uri_auth': auth_settings.get('auth_uri'), 'uri_token': auth_settings.get('token_uri'), 'uri_base': self.get_url_base_from_url_token(auth_settings.get('token_uri')), 'uri_redirect': auth_settings.get('redirect_uris', None)} # depends on [control=['if'], data=[]]
else:
# assuming file is an .ini
ini_parser = ConfigParser()
ini_parser.read(in_credentials)
# check structure
if 'auth' in ini_parser._sections:
auth_settings = ini_parser['auth'] # depends on [control=['if'], data=[]]
else:
raise ValueError('Input INI structure is not as expected. Section of credentials must be named: auth')
# set
out_auth = {'auth_mode': auth_settings.get('CLIENT_TYPE'), 'client_id': auth_settings.get('CLIENT_ID'), 'client_secret': auth_settings.get('CLIENT_SECRET'), 'uri_auth': auth_settings.get('URI_AUTH'), 'uri_token': auth_settings.get('URI_TOKEN'), 'uri_base': self.get_url_base_from_url_token(auth_settings.get('URI_TOKEN')), 'uri_redirect': auth_settings.get('URI_REDIRECT')}
# method ending
return out_auth |
def call_method(self, name, args=None, kwargs=None, dyn_args=None,
dyn_kwargs=None, lineno=None):
"""Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`jinja2.nodes.Call`.
"""
if args is None:
args = []
if kwargs is None:
kwargs = []
return nodes.Call(self.attr(name, lineno=lineno), args, kwargs,
dyn_args, dyn_kwargs, lineno=lineno) | def function[call_method, parameter[self, name, args, kwargs, dyn_args, dyn_kwargs, lineno]]:
constant[Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`jinja2.nodes.Call`.
]
if compare[name[args] is constant[None]] begin[:]
variable[args] assign[=] list[[]]
if compare[name[kwargs] is constant[None]] begin[:]
variable[kwargs] assign[=] list[[]]
return[call[name[nodes].Call, parameter[call[name[self].attr, parameter[name[name]]], name[args], name[kwargs], name[dyn_args], name[dyn_kwargs]]]] | keyword[def] identifier[call_method] ( identifier[self] , identifier[name] , identifier[args] = keyword[None] , identifier[kwargs] = keyword[None] , identifier[dyn_args] = keyword[None] ,
identifier[dyn_kwargs] = keyword[None] , identifier[lineno] = keyword[None] ):
literal[string]
keyword[if] identifier[args] keyword[is] keyword[None] :
identifier[args] =[]
keyword[if] identifier[kwargs] keyword[is] keyword[None] :
identifier[kwargs] =[]
keyword[return] identifier[nodes] . identifier[Call] ( identifier[self] . identifier[attr] ( identifier[name] , identifier[lineno] = identifier[lineno] ), identifier[args] , identifier[kwargs] ,
identifier[dyn_args] , identifier[dyn_kwargs] , identifier[lineno] = identifier[lineno] ) | def call_method(self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None):
"""Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`jinja2.nodes.Call`.
"""
if args is None:
args = [] # depends on [control=['if'], data=['args']]
if kwargs is None:
kwargs = [] # depends on [control=['if'], data=['kwargs']]
return nodes.Call(self.attr(name, lineno=lineno), args, kwargs, dyn_args, dyn_kwargs, lineno=lineno) |
def private_keys(self):
"""A subset of the :attr:`entries` dictionary, filtered down to only
those entries of type :class:`PrivateKeyEntry`."""
return dict([(a, e) for a, e in self.entries.items()
if isinstance(e, PrivateKeyEntry)]) | def function[private_keys, parameter[self]]:
constant[A subset of the :attr:`entries` dictionary, filtered down to only
those entries of type :class:`PrivateKeyEntry`.]
return[call[name[dict], parameter[<ast.ListComp object at 0x7da1b063c2e0>]]] | keyword[def] identifier[private_keys] ( identifier[self] ):
literal[string]
keyword[return] identifier[dict] ([( identifier[a] , identifier[e] ) keyword[for] identifier[a] , identifier[e] keyword[in] identifier[self] . identifier[entries] . identifier[items] ()
keyword[if] identifier[isinstance] ( identifier[e] , identifier[PrivateKeyEntry] )]) | def private_keys(self):
"""A subset of the :attr:`entries` dictionary, filtered down to only
those entries of type :class:`PrivateKeyEntry`."""
return dict([(a, e) for (a, e) in self.entries.items() if isinstance(e, PrivateKeyEntry)]) |
def read_envvar_dir(envvar, name, extension):
"""
Read values from a file located in a directory specified by a particular
environment file. ``read_envvar_dir('HOME', 'example', 'yaml')`` would
look for a file at ``/home/user/example.yaml``. When the environment
variable isn't set or the file does not exist, `NotConfigured` will be
returned.
:param envvar: the environment variable to interpret as a directory
:param name: application or configuration set name
:param extension: file extension to look for
:return: a `.Configuration`, possibly `.NotConfigured`
"""
config_dir = environ.get(envvar)
if not config_dir:
return NotConfigured
# envvar is set, construct full file path, expanding user to allow the envvar containing a value like ~/config
config_path = path.join(path.expanduser(config_dir), '{name}.{extension}'.format(name=name, extension=extension))
return loadf(config_path, default=NotConfigured) | def function[read_envvar_dir, parameter[envvar, name, extension]]:
constant[
Read values from a file located in a directory specified by a particular
environment file. ``read_envvar_dir('HOME', 'example', 'yaml')`` would
look for a file at ``/home/user/example.yaml``. When the environment
variable isn't set or the file does not exist, `NotConfigured` will be
returned.
:param envvar: the environment variable to interpret as a directory
:param name: application or configuration set name
:param extension: file extension to look for
:return: a `.Configuration`, possibly `.NotConfigured`
]
variable[config_dir] assign[=] call[name[environ].get, parameter[name[envvar]]]
if <ast.UnaryOp object at 0x7da1b1712530> begin[:]
return[name[NotConfigured]]
variable[config_path] assign[=] call[name[path].join, parameter[call[name[path].expanduser, parameter[name[config_dir]]], call[constant[{name}.{extension}].format, parameter[]]]]
return[call[name[loadf], parameter[name[config_path]]]] | keyword[def] identifier[read_envvar_dir] ( identifier[envvar] , identifier[name] , identifier[extension] ):
literal[string]
identifier[config_dir] = identifier[environ] . identifier[get] ( identifier[envvar] )
keyword[if] keyword[not] identifier[config_dir] :
keyword[return] identifier[NotConfigured]
identifier[config_path] = identifier[path] . identifier[join] ( identifier[path] . identifier[expanduser] ( identifier[config_dir] ), literal[string] . identifier[format] ( identifier[name] = identifier[name] , identifier[extension] = identifier[extension] ))
keyword[return] identifier[loadf] ( identifier[config_path] , identifier[default] = identifier[NotConfigured] ) | def read_envvar_dir(envvar, name, extension):
"""
Read values from a file located in a directory specified by a particular
environment file. ``read_envvar_dir('HOME', 'example', 'yaml')`` would
look for a file at ``/home/user/example.yaml``. When the environment
variable isn't set or the file does not exist, `NotConfigured` will be
returned.
:param envvar: the environment variable to interpret as a directory
:param name: application or configuration set name
:param extension: file extension to look for
:return: a `.Configuration`, possibly `.NotConfigured`
"""
config_dir = environ.get(envvar)
if not config_dir:
return NotConfigured # depends on [control=['if'], data=[]]
# envvar is set, construct full file path, expanding user to allow the envvar containing a value like ~/config
config_path = path.join(path.expanduser(config_dir), '{name}.{extension}'.format(name=name, extension=extension))
return loadf(config_path, default=NotConfigured) |
def getGaTermByName(self, name):
"""
Returns a GA4GH OntologyTerm object by name.
:param name: name of the ontology term, ex. "gene".
:return: GA4GH OntologyTerm object.
"""
# TODO what is the correct value when we have no mapping??
termIds = self.getTermIds(name)
if len(termIds) == 0:
termId = ""
# TODO add logging for missed term translation.
else:
# TODO what is the correct behaviour here when we have multiple
# IDs matching a given name?
termId = termIds[0]
term = protocol.OntologyTerm()
term.term = name
term.term_id = termId
return term | def function[getGaTermByName, parameter[self, name]]:
constant[
Returns a GA4GH OntologyTerm object by name.
:param name: name of the ontology term, ex. "gene".
:return: GA4GH OntologyTerm object.
]
variable[termIds] assign[=] call[name[self].getTermIds, parameter[name[name]]]
if compare[call[name[len], parameter[name[termIds]]] equal[==] constant[0]] begin[:]
variable[termId] assign[=] constant[]
variable[term] assign[=] call[name[protocol].OntologyTerm, parameter[]]
name[term].term assign[=] name[name]
name[term].term_id assign[=] name[termId]
return[name[term]] | keyword[def] identifier[getGaTermByName] ( identifier[self] , identifier[name] ):
literal[string]
identifier[termIds] = identifier[self] . identifier[getTermIds] ( identifier[name] )
keyword[if] identifier[len] ( identifier[termIds] )== literal[int] :
identifier[termId] = literal[string]
keyword[else] :
identifier[termId] = identifier[termIds] [ literal[int] ]
identifier[term] = identifier[protocol] . identifier[OntologyTerm] ()
identifier[term] . identifier[term] = identifier[name]
identifier[term] . identifier[term_id] = identifier[termId]
keyword[return] identifier[term] | def getGaTermByName(self, name):
"""
Returns a GA4GH OntologyTerm object by name.
:param name: name of the ontology term, ex. "gene".
:return: GA4GH OntologyTerm object.
"""
# TODO what is the correct value when we have no mapping??
termIds = self.getTermIds(name)
if len(termIds) == 0:
termId = '' # depends on [control=['if'], data=[]]
else:
# TODO add logging for missed term translation.
# TODO what is the correct behaviour here when we have multiple
# IDs matching a given name?
termId = termIds[0]
term = protocol.OntologyTerm()
term.term = name
term.term_id = termId
return term |
def update_dtype(self, resvar=None):
"""Updates the dtype attribute of the function. This is required because
fortran functions can have their types declared either as a modifier on
the function *or* as a member inside the function.
:arg resvar: the name of the variable declared using the result(var)
construct after the function signature.
"""
if self.dtype is None:
#search the members of this function for one that has the same name
#as the function. If it gets found, overwrite the dtype, kind and
#modifiers attributes so the rest of the code works.
for m in self.members:
if m == self.name.lower() or m == resvar:
member = self.members[m]
self.dtype = member.dtype
self.modifiers = member.modifiers
self.kind = member.kind
self.default = member.default
self.dimension = member.dimension
del self.members[m]
break | def function[update_dtype, parameter[self, resvar]]:
constant[Updates the dtype attribute of the function. This is required because
fortran functions can have their types declared either as a modifier on
the function *or* as a member inside the function.
:arg resvar: the name of the variable declared using the result(var)
construct after the function signature.
]
if compare[name[self].dtype is constant[None]] begin[:]
for taget[name[m]] in starred[name[self].members] begin[:]
if <ast.BoolOp object at 0x7da1b26ecb80> begin[:]
variable[member] assign[=] call[name[self].members][name[m]]
name[self].dtype assign[=] name[member].dtype
name[self].modifiers assign[=] name[member].modifiers
name[self].kind assign[=] name[member].kind
name[self].default assign[=] name[member].default
name[self].dimension assign[=] name[member].dimension
<ast.Delete object at 0x7da1b26ece80>
break | keyword[def] identifier[update_dtype] ( identifier[self] , identifier[resvar] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[dtype] keyword[is] keyword[None] :
keyword[for] identifier[m] keyword[in] identifier[self] . identifier[members] :
keyword[if] identifier[m] == identifier[self] . identifier[name] . identifier[lower] () keyword[or] identifier[m] == identifier[resvar] :
identifier[member] = identifier[self] . identifier[members] [ identifier[m] ]
identifier[self] . identifier[dtype] = identifier[member] . identifier[dtype]
identifier[self] . identifier[modifiers] = identifier[member] . identifier[modifiers]
identifier[self] . identifier[kind] = identifier[member] . identifier[kind]
identifier[self] . identifier[default] = identifier[member] . identifier[default]
identifier[self] . identifier[dimension] = identifier[member] . identifier[dimension]
keyword[del] identifier[self] . identifier[members] [ identifier[m] ]
keyword[break] | def update_dtype(self, resvar=None):
"""Updates the dtype attribute of the function. This is required because
fortran functions can have their types declared either as a modifier on
the function *or* as a member inside the function.
:arg resvar: the name of the variable declared using the result(var)
construct after the function signature.
"""
if self.dtype is None:
#search the members of this function for one that has the same name
#as the function. If it gets found, overwrite the dtype, kind and
#modifiers attributes so the rest of the code works.
for m in self.members:
if m == self.name.lower() or m == resvar:
member = self.members[m]
self.dtype = member.dtype
self.modifiers = member.modifiers
self.kind = member.kind
self.default = member.default
self.dimension = member.dimension
del self.members[m]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=[]] |
def regrid(self, axes, coordsys, outname="", overwrite=True,
outshape=(), interpolation="linear",
decimate=10, replicate=False,
refchange=True, forceregrid=False):
"""Regrid the image to a new image object.
Regrid the image on the given axes to the given coordinate system.
The output is stored in the given file; it no file name is given a
temporary image is made.
If the output shape is empty, the old shape is used.
`replicate=True` means replication rather than regridding.
"""
return image(self._regrid(self._adaptAxes(axes),
outname, overwrite,
outshape, coordsys.dict(),
interpolation, decimate, replicate,
refchange, forceregrid)) | def function[regrid, parameter[self, axes, coordsys, outname, overwrite, outshape, interpolation, decimate, replicate, refchange, forceregrid]]:
constant[Regrid the image to a new image object.
Regrid the image on the given axes to the given coordinate system.
The output is stored in the given file; it no file name is given a
temporary image is made.
If the output shape is empty, the old shape is used.
`replicate=True` means replication rather than regridding.
]
return[call[name[image], parameter[call[name[self]._regrid, parameter[call[name[self]._adaptAxes, parameter[name[axes]]], name[outname], name[overwrite], name[outshape], call[name[coordsys].dict, parameter[]], name[interpolation], name[decimate], name[replicate], name[refchange], name[forceregrid]]]]]] | keyword[def] identifier[regrid] ( identifier[self] , identifier[axes] , identifier[coordsys] , identifier[outname] = literal[string] , identifier[overwrite] = keyword[True] ,
identifier[outshape] =(), identifier[interpolation] = literal[string] ,
identifier[decimate] = literal[int] , identifier[replicate] = keyword[False] ,
identifier[refchange] = keyword[True] , identifier[forceregrid] = keyword[False] ):
literal[string]
keyword[return] identifier[image] ( identifier[self] . identifier[_regrid] ( identifier[self] . identifier[_adaptAxes] ( identifier[axes] ),
identifier[outname] , identifier[overwrite] ,
identifier[outshape] , identifier[coordsys] . identifier[dict] (),
identifier[interpolation] , identifier[decimate] , identifier[replicate] ,
identifier[refchange] , identifier[forceregrid] )) | def regrid(self, axes, coordsys, outname='', overwrite=True, outshape=(), interpolation='linear', decimate=10, replicate=False, refchange=True, forceregrid=False):
"""Regrid the image to a new image object.
Regrid the image on the given axes to the given coordinate system.
The output is stored in the given file; it no file name is given a
temporary image is made.
If the output shape is empty, the old shape is used.
`replicate=True` means replication rather than regridding.
"""
return image(self._regrid(self._adaptAxes(axes), outname, overwrite, outshape, coordsys.dict(), interpolation, decimate, replicate, refchange, forceregrid)) |
def _init_attr(self, bitstring_map: Dict[str, str]):
"""
Acts instead of __init__ method to instantiate the necessary Deutsch-Jozsa state.
:param Dict[String, String] bitstring_map: truth-table of the input bitstring map in
dictionary format, used to construct the oracle in the Deutsch-Jozsa algorithm.
:return: None
:rtype: NoneType
"""
self.bit_map = bitstring_map
self.n_qubits = len(list(bitstring_map.keys())[0])
# We use one extra qubit for making the oracle,
# and one for storing the answer of the oracle.
self.n_ancillas = 2
self._qubits = list(range(self.n_qubits + self.n_ancillas))
self.computational_qubits = self._qubits[:self.n_qubits]
self.ancillas = self._qubits[self.n_qubits:]
self.unitary_matrix = self.unitary_function(bitstring_map)
self.deutsch_jozsa_circuit = self._construct_deutsch_jozsa_circuit() | def function[_init_attr, parameter[self, bitstring_map]]:
constant[
Acts instead of __init__ method to instantiate the necessary Deutsch-Jozsa state.
:param Dict[String, String] bitstring_map: truth-table of the input bitstring map in
dictionary format, used to construct the oracle in the Deutsch-Jozsa algorithm.
:return: None
:rtype: NoneType
]
name[self].bit_map assign[=] name[bitstring_map]
name[self].n_qubits assign[=] call[name[len], parameter[call[call[name[list], parameter[call[name[bitstring_map].keys, parameter[]]]]][constant[0]]]]
name[self].n_ancillas assign[=] constant[2]
name[self]._qubits assign[=] call[name[list], parameter[call[name[range], parameter[binary_operation[name[self].n_qubits + name[self].n_ancillas]]]]]
name[self].computational_qubits assign[=] call[name[self]._qubits][<ast.Slice object at 0x7da20c76e680>]
name[self].ancillas assign[=] call[name[self]._qubits][<ast.Slice object at 0x7da20c76d960>]
name[self].unitary_matrix assign[=] call[name[self].unitary_function, parameter[name[bitstring_map]]]
name[self].deutsch_jozsa_circuit assign[=] call[name[self]._construct_deutsch_jozsa_circuit, parameter[]] | keyword[def] identifier[_init_attr] ( identifier[self] , identifier[bitstring_map] : identifier[Dict] [ identifier[str] , identifier[str] ]):
literal[string]
identifier[self] . identifier[bit_map] = identifier[bitstring_map]
identifier[self] . identifier[n_qubits] = identifier[len] ( identifier[list] ( identifier[bitstring_map] . identifier[keys] ())[ literal[int] ])
identifier[self] . identifier[n_ancillas] = literal[int]
identifier[self] . identifier[_qubits] = identifier[list] ( identifier[range] ( identifier[self] . identifier[n_qubits] + identifier[self] . identifier[n_ancillas] ))
identifier[self] . identifier[computational_qubits] = identifier[self] . identifier[_qubits] [: identifier[self] . identifier[n_qubits] ]
identifier[self] . identifier[ancillas] = identifier[self] . identifier[_qubits] [ identifier[self] . identifier[n_qubits] :]
identifier[self] . identifier[unitary_matrix] = identifier[self] . identifier[unitary_function] ( identifier[bitstring_map] )
identifier[self] . identifier[deutsch_jozsa_circuit] = identifier[self] . identifier[_construct_deutsch_jozsa_circuit] () | def _init_attr(self, bitstring_map: Dict[str, str]):
"""
Acts instead of __init__ method to instantiate the necessary Deutsch-Jozsa state.
:param Dict[String, String] bitstring_map: truth-table of the input bitstring map in
dictionary format, used to construct the oracle in the Deutsch-Jozsa algorithm.
:return: None
:rtype: NoneType
"""
self.bit_map = bitstring_map
self.n_qubits = len(list(bitstring_map.keys())[0])
# We use one extra qubit for making the oracle,
# and one for storing the answer of the oracle.
self.n_ancillas = 2
self._qubits = list(range(self.n_qubits + self.n_ancillas))
self.computational_qubits = self._qubits[:self.n_qubits]
self.ancillas = self._qubits[self.n_qubits:]
self.unitary_matrix = self.unitary_function(bitstring_map)
self.deutsch_jozsa_circuit = self._construct_deutsch_jozsa_circuit() |
def add_thermodynamic(self, em=1000):
"""Apply thermodynamic constraints to the model.
Adding these constraints restricts the solution space to only
contain solutions that have no internal loops [Schilling00]_. This is
solved as a MILP problem as described in [Muller13]_. The time to solve
a problem with thermodynamic constraints is usually much longer than a
normal FBA problem.
The ``em`` parameter is the upper bound on the delta mu reaction
variables. This parameter has to be balanced based on the model size
since setting the value too low can result in the correct solutions
being infeasible and setting the value too high can result in
numerical instability which again makes the correct solutions
infeasible. The default value should work in all cases as long as the
model is not unusually large.
"""
internal = set(r for r in self._model.reactions
if not self._model.is_exchange(r))
# Reaction fluxes
v = self._v
# Indicator variable
alpha = self._prob.namespace(internal, types=lp.VariableType.Binary)
# Delta mu is the stoichiometrically weighted sum of the compound mus.
dmu = self._prob.namespace(internal)
for reaction_id in self._model.reactions:
if not self._model.is_exchange(reaction_id):
flux = v(reaction_id)
alpha_r = alpha(reaction_id)
dmu_r = dmu(reaction_id)
lower, upper = self._model.limits[reaction_id]
# Constrain the reaction to a direction determined by alpha
# and contrain the delta mu to a value in [-em; -1] if
# alpha is one, otherwise in [1; em].
self._prob.add_linear_constraints(
flux >= lower * (1 - alpha_r),
flux <= upper * alpha_r,
dmu_r >= -em * alpha_r + (1 - alpha_r),
dmu_r <= em * (1 - alpha_r) - alpha_r)
# Define mu variables
mu = self._prob.namespace(self._model.compounds)
tdbalance_lhs = {reaction_id: 0
for reaction_id in self._model.reactions}
for spec, value in iteritems(self._model.matrix):
compound, reaction_id = spec
if not self._model.is_exchange(reaction_id):
tdbalance_lhs[reaction_id] += mu(compound) * value
for reaction_id, lhs in iteritems(tdbalance_lhs):
if not self._model.is_exchange(reaction_id):
self._prob.add_linear_constraints(lhs == dmu(reaction_id)) | def function[add_thermodynamic, parameter[self, em]]:
constant[Apply thermodynamic constraints to the model.
Adding these constraints restricts the solution space to only
contain solutions that have no internal loops [Schilling00]_. This is
solved as a MILP problem as described in [Muller13]_. The time to solve
a problem with thermodynamic constraints is usually much longer than a
normal FBA problem.
The ``em`` parameter is the upper bound on the delta mu reaction
variables. This parameter has to be balanced based on the model size
since setting the value too low can result in the correct solutions
being infeasible and setting the value too high can result in
numerical instability which again makes the correct solutions
infeasible. The default value should work in all cases as long as the
model is not unusually large.
]
variable[internal] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da20c76ecb0>]]
variable[v] assign[=] name[self]._v
variable[alpha] assign[=] call[name[self]._prob.namespace, parameter[name[internal]]]
variable[dmu] assign[=] call[name[self]._prob.namespace, parameter[name[internal]]]
for taget[name[reaction_id]] in starred[name[self]._model.reactions] begin[:]
if <ast.UnaryOp object at 0x7da20c76ee00> begin[:]
variable[flux] assign[=] call[name[v], parameter[name[reaction_id]]]
variable[alpha_r] assign[=] call[name[alpha], parameter[name[reaction_id]]]
variable[dmu_r] assign[=] call[name[dmu], parameter[name[reaction_id]]]
<ast.Tuple object at 0x7da20c76f340> assign[=] call[name[self]._model.limits][name[reaction_id]]
call[name[self]._prob.add_linear_constraints, parameter[compare[name[flux] greater_or_equal[>=] binary_operation[name[lower] * binary_operation[constant[1] - name[alpha_r]]]], compare[name[flux] less_or_equal[<=] binary_operation[name[upper] * name[alpha_r]]], compare[name[dmu_r] greater_or_equal[>=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c76ce80> * name[alpha_r]] + binary_operation[constant[1] - name[alpha_r]]]], compare[name[dmu_r] less_or_equal[<=] binary_operation[binary_operation[name[em] * binary_operation[constant[1] - name[alpha_r]]] - name[alpha_r]]]]]
variable[mu] assign[=] call[name[self]._prob.namespace, parameter[name[self]._model.compounds]]
variable[tdbalance_lhs] assign[=] <ast.DictComp object at 0x7da18bc73af0>
for taget[tuple[[<ast.Name object at 0x7da18bc72ef0>, <ast.Name object at 0x7da18bc73cd0>]]] in starred[call[name[iteritems], parameter[name[self]._model.matrix]]] begin[:]
<ast.Tuple object at 0x7da18bc71600> assign[=] name[spec]
if <ast.UnaryOp object at 0x7da18bc700a0> begin[:]
<ast.AugAssign object at 0x7da18bc73550>
for taget[tuple[[<ast.Name object at 0x7da18bc70670>, <ast.Name object at 0x7da18bc73640>]]] in starred[call[name[iteritems], parameter[name[tdbalance_lhs]]]] begin[:]
if <ast.UnaryOp object at 0x7da18bc72110> begin[:]
call[name[self]._prob.add_linear_constraints, parameter[compare[name[lhs] equal[==] call[name[dmu], parameter[name[reaction_id]]]]]] | keyword[def] identifier[add_thermodynamic] ( identifier[self] , identifier[em] = literal[int] ):
literal[string]
identifier[internal] = identifier[set] ( identifier[r] keyword[for] identifier[r] keyword[in] identifier[self] . identifier[_model] . identifier[reactions]
keyword[if] keyword[not] identifier[self] . identifier[_model] . identifier[is_exchange] ( identifier[r] ))
identifier[v] = identifier[self] . identifier[_v]
identifier[alpha] = identifier[self] . identifier[_prob] . identifier[namespace] ( identifier[internal] , identifier[types] = identifier[lp] . identifier[VariableType] . identifier[Binary] )
identifier[dmu] = identifier[self] . identifier[_prob] . identifier[namespace] ( identifier[internal] )
keyword[for] identifier[reaction_id] keyword[in] identifier[self] . identifier[_model] . identifier[reactions] :
keyword[if] keyword[not] identifier[self] . identifier[_model] . identifier[is_exchange] ( identifier[reaction_id] ):
identifier[flux] = identifier[v] ( identifier[reaction_id] )
identifier[alpha_r] = identifier[alpha] ( identifier[reaction_id] )
identifier[dmu_r] = identifier[dmu] ( identifier[reaction_id] )
identifier[lower] , identifier[upper] = identifier[self] . identifier[_model] . identifier[limits] [ identifier[reaction_id] ]
identifier[self] . identifier[_prob] . identifier[add_linear_constraints] (
identifier[flux] >= identifier[lower] *( literal[int] - identifier[alpha_r] ),
identifier[flux] <= identifier[upper] * identifier[alpha_r] ,
identifier[dmu_r] >=- identifier[em] * identifier[alpha_r] +( literal[int] - identifier[alpha_r] ),
identifier[dmu_r] <= identifier[em] *( literal[int] - identifier[alpha_r] )- identifier[alpha_r] )
identifier[mu] = identifier[self] . identifier[_prob] . identifier[namespace] ( identifier[self] . identifier[_model] . identifier[compounds] )
identifier[tdbalance_lhs] ={ identifier[reaction_id] : literal[int]
keyword[for] identifier[reaction_id] keyword[in] identifier[self] . identifier[_model] . identifier[reactions] }
keyword[for] identifier[spec] , identifier[value] keyword[in] identifier[iteritems] ( identifier[self] . identifier[_model] . identifier[matrix] ):
identifier[compound] , identifier[reaction_id] = identifier[spec]
keyword[if] keyword[not] identifier[self] . identifier[_model] . identifier[is_exchange] ( identifier[reaction_id] ):
identifier[tdbalance_lhs] [ identifier[reaction_id] ]+= identifier[mu] ( identifier[compound] )* identifier[value]
keyword[for] identifier[reaction_id] , identifier[lhs] keyword[in] identifier[iteritems] ( identifier[tdbalance_lhs] ):
keyword[if] keyword[not] identifier[self] . identifier[_model] . identifier[is_exchange] ( identifier[reaction_id] ):
identifier[self] . identifier[_prob] . identifier[add_linear_constraints] ( identifier[lhs] == identifier[dmu] ( identifier[reaction_id] )) | def add_thermodynamic(self, em=1000):
"""Apply thermodynamic constraints to the model.
Adding these constraints restricts the solution space to only
contain solutions that have no internal loops [Schilling00]_. This is
solved as a MILP problem as described in [Muller13]_. The time to solve
a problem with thermodynamic constraints is usually much longer than a
normal FBA problem.
The ``em`` parameter is the upper bound on the delta mu reaction
variables. This parameter has to be balanced based on the model size
since setting the value too low can result in the correct solutions
being infeasible and setting the value too high can result in
numerical instability which again makes the correct solutions
infeasible. The default value should work in all cases as long as the
model is not unusually large.
"""
internal = set((r for r in self._model.reactions if not self._model.is_exchange(r)))
# Reaction fluxes
v = self._v
# Indicator variable
alpha = self._prob.namespace(internal, types=lp.VariableType.Binary)
# Delta mu is the stoichiometrically weighted sum of the compound mus.
dmu = self._prob.namespace(internal)
for reaction_id in self._model.reactions:
if not self._model.is_exchange(reaction_id):
flux = v(reaction_id)
alpha_r = alpha(reaction_id)
dmu_r = dmu(reaction_id)
(lower, upper) = self._model.limits[reaction_id]
# Constrain the reaction to a direction determined by alpha
# and contrain the delta mu to a value in [-em; -1] if
# alpha is one, otherwise in [1; em].
self._prob.add_linear_constraints(flux >= lower * (1 - alpha_r), flux <= upper * alpha_r, dmu_r >= -em * alpha_r + (1 - alpha_r), dmu_r <= em * (1 - alpha_r) - alpha_r) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['reaction_id']]
# Define mu variables
mu = self._prob.namespace(self._model.compounds)
tdbalance_lhs = {reaction_id: 0 for reaction_id in self._model.reactions}
for (spec, value) in iteritems(self._model.matrix):
(compound, reaction_id) = spec
if not self._model.is_exchange(reaction_id):
tdbalance_lhs[reaction_id] += mu(compound) * value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for (reaction_id, lhs) in iteritems(tdbalance_lhs):
if not self._model.is_exchange(reaction_id):
self._prob.add_linear_constraints(lhs == dmu(reaction_id)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def set_path(self, path):
"""Set the path of the file."""
if os.path.isabs(path):
path = os.path.normpath(os.path.join(self.cwd, path))
self.path = path
self.relative = os.path.relpath(self.path, self.base) | def function[set_path, parameter[self, path]]:
constant[Set the path of the file.]
if call[name[os].path.isabs, parameter[name[path]]] begin[:]
variable[path] assign[=] call[name[os].path.normpath, parameter[call[name[os].path.join, parameter[name[self].cwd, name[path]]]]]
name[self].path assign[=] name[path]
name[self].relative assign[=] call[name[os].path.relpath, parameter[name[self].path, name[self].base]] | keyword[def] identifier[set_path] ( identifier[self] , identifier[path] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isabs] ( identifier[path] ):
identifier[path] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[cwd] , identifier[path] ))
identifier[self] . identifier[path] = identifier[path]
identifier[self] . identifier[relative] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[self] . identifier[path] , identifier[self] . identifier[base] ) | def set_path(self, path):
"""Set the path of the file."""
if os.path.isabs(path):
path = os.path.normpath(os.path.join(self.cwd, path)) # depends on [control=['if'], data=[]]
self.path = path
self.relative = os.path.relpath(self.path, self.base) |
def load_cz2010_hourly_temp_data(
self, start, end, read_from_cache=True, write_to_cache=True
):
""" Load hourly CZ2010 temperature data from start date to end date (inclusive).
This is the primary convenience method for loading hourly CZ2010 temperature data.
Parameters
----------
start : datetime.datetime
The earliest date from which to load data.
end : datetime.datetime
The latest date until which to load data.
read_from_cache : bool
Whether or not to load data from cache.
write_to_cache : bool
Whether or not to write newly loaded data to cache.
"""
return load_cz2010_hourly_temp_data(
self.usaf_id,
start,
end,
read_from_cache=read_from_cache,
write_to_cache=write_to_cache,
) | def function[load_cz2010_hourly_temp_data, parameter[self, start, end, read_from_cache, write_to_cache]]:
constant[ Load hourly CZ2010 temperature data from start date to end date (inclusive).
This is the primary convenience method for loading hourly CZ2010 temperature data.
Parameters
----------
start : datetime.datetime
The earliest date from which to load data.
end : datetime.datetime
The latest date until which to load data.
read_from_cache : bool
Whether or not to load data from cache.
write_to_cache : bool
Whether or not to write newly loaded data to cache.
]
return[call[name[load_cz2010_hourly_temp_data], parameter[name[self].usaf_id, name[start], name[end]]]] | keyword[def] identifier[load_cz2010_hourly_temp_data] (
identifier[self] , identifier[start] , identifier[end] , identifier[read_from_cache] = keyword[True] , identifier[write_to_cache] = keyword[True]
):
literal[string]
keyword[return] identifier[load_cz2010_hourly_temp_data] (
identifier[self] . identifier[usaf_id] ,
identifier[start] ,
identifier[end] ,
identifier[read_from_cache] = identifier[read_from_cache] ,
identifier[write_to_cache] = identifier[write_to_cache] ,
) | def load_cz2010_hourly_temp_data(self, start, end, read_from_cache=True, write_to_cache=True):
""" Load hourly CZ2010 temperature data from start date to end date (inclusive).
This is the primary convenience method for loading hourly CZ2010 temperature data.
Parameters
----------
start : datetime.datetime
The earliest date from which to load data.
end : datetime.datetime
The latest date until which to load data.
read_from_cache : bool
Whether or not to load data from cache.
write_to_cache : bool
Whether or not to write newly loaded data to cache.
"""
return load_cz2010_hourly_temp_data(self.usaf_id, start, end, read_from_cache=read_from_cache, write_to_cache=write_to_cache) |
def reset(self):
'''
Resets this agent type to prepare it for a new simulation run. This
includes resetting the random number generator and initializing the style
of each agent of this type.
'''
self.resetRNG()
sNow = np.zeros(self.pop_size)
Shk = self.RNG.rand(self.pop_size)
sNow[Shk < self.p_init] = 1
self.sNow = sNow | def function[reset, parameter[self]]:
constant[
Resets this agent type to prepare it for a new simulation run. This
includes resetting the random number generator and initializing the style
of each agent of this type.
]
call[name[self].resetRNG, parameter[]]
variable[sNow] assign[=] call[name[np].zeros, parameter[name[self].pop_size]]
variable[Shk] assign[=] call[name[self].RNG.rand, parameter[name[self].pop_size]]
call[name[sNow]][compare[name[Shk] less[<] name[self].p_init]] assign[=] constant[1]
name[self].sNow assign[=] name[sNow] | keyword[def] identifier[reset] ( identifier[self] ):
literal[string]
identifier[self] . identifier[resetRNG] ()
identifier[sNow] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[pop_size] )
identifier[Shk] = identifier[self] . identifier[RNG] . identifier[rand] ( identifier[self] . identifier[pop_size] )
identifier[sNow] [ identifier[Shk] < identifier[self] . identifier[p_init] ]= literal[int]
identifier[self] . identifier[sNow] = identifier[sNow] | def reset(self):
"""
Resets this agent type to prepare it for a new simulation run. This
includes resetting the random number generator and initializing the style
of each agent of this type.
"""
self.resetRNG()
sNow = np.zeros(self.pop_size)
Shk = self.RNG.rand(self.pop_size)
sNow[Shk < self.p_init] = 1
self.sNow = sNow |
def sun_rise_set_transit_ephem(times, latitude, longitude,
next_or_previous='next',
altitude=0,
pressure=101325,
temperature=12, horizon='0:00'):
"""
Calculate the next sunrise and sunset times using the PyEphem package.
Parameters
----------
time : pandas.DatetimeIndex
Must be localized
latitude : float
Latitude in degrees, positive north of equator, negative to south
longitude : float
Longitude in degrees, positive east of prime meridian, negative to west
next_or_previous : str
'next' or 'previous' sunrise and sunset relative to time
altitude : float, default 0
distance above sea level in meters.
pressure : int or float, optional, default 101325
air pressure in Pascals.
temperature : int or float, optional, default 12
air temperature in degrees C.
horizon : string, format +/-X:YY
arc degrees:arc minutes from geometrical horizon for sunrise and
sunset, e.g., horizon='+0:00' to use sun center crossing the
geometrical horizon to define sunrise and sunset,
horizon='-0:34' for when the sun's upper edge crosses the
geometrical horizon
Returns
-------
pandas.DataFrame
index is the same as input `time` argument
columns are 'sunrise', 'sunset', and 'transit'
See also
--------
pyephem
"""
try:
import ephem
except ImportError:
raise ImportError('PyEphem must be installed')
# times must be localized
if times.tz:
tzinfo = times.tz
else:
raise ValueError('times must be localized')
obs, sun = _ephem_setup(latitude, longitude, altitude,
pressure, temperature, horizon)
# create lists of sunrise and sunset time localized to time.tz
if next_or_previous.lower() == 'next':
rising = obs.next_rising
setting = obs.next_setting
transit = obs.next_transit
elif next_or_previous.lower() == 'previous':
rising = obs.previous_rising
setting = obs.previous_setting
transit = obs.previous_transit
else:
raise ValueError("next_or_previous must be either 'next' or" +
" 'previous'")
sunrise = []
sunset = []
trans = []
for thetime in times:
thetime = thetime.to_pydatetime()
# pyephem drops timezone when converting to its internal datetime
# format, so handle timezone explicitly here
obs.date = ephem.Date(thetime - thetime.utcoffset())
sunrise.append(_ephem_to_timezone(rising(sun), tzinfo))
sunset.append(_ephem_to_timezone(setting(sun), tzinfo))
trans.append(_ephem_to_timezone(transit(sun), tzinfo))
return pd.DataFrame(index=times, data={'sunrise': sunrise,
'sunset': sunset,
'transit': trans}) | def function[sun_rise_set_transit_ephem, parameter[times, latitude, longitude, next_or_previous, altitude, pressure, temperature, horizon]]:
constant[
Calculate the next sunrise and sunset times using the PyEphem package.
Parameters
----------
time : pandas.DatetimeIndex
Must be localized
latitude : float
Latitude in degrees, positive north of equator, negative to south
longitude : float
Longitude in degrees, positive east of prime meridian, negative to west
next_or_previous : str
'next' or 'previous' sunrise and sunset relative to time
altitude : float, default 0
distance above sea level in meters.
pressure : int or float, optional, default 101325
air pressure in Pascals.
temperature : int or float, optional, default 12
air temperature in degrees C.
horizon : string, format +/-X:YY
arc degrees:arc minutes from geometrical horizon for sunrise and
sunset, e.g., horizon='+0:00' to use sun center crossing the
geometrical horizon to define sunrise and sunset,
horizon='-0:34' for when the sun's upper edge crosses the
geometrical horizon
Returns
-------
pandas.DataFrame
index is the same as input `time` argument
columns are 'sunrise', 'sunset', and 'transit'
See also
--------
pyephem
]
<ast.Try object at 0x7da1b1a77c10>
if name[times].tz begin[:]
variable[tzinfo] assign[=] name[times].tz
<ast.Tuple object at 0x7da1b1a777f0> assign[=] call[name[_ephem_setup], parameter[name[latitude], name[longitude], name[altitude], name[pressure], name[temperature], name[horizon]]]
if compare[call[name[next_or_previous].lower, parameter[]] equal[==] constant[next]] begin[:]
variable[rising] assign[=] name[obs].next_rising
variable[setting] assign[=] name[obs].next_setting
variable[transit] assign[=] name[obs].next_transit
variable[sunrise] assign[=] list[[]]
variable[sunset] assign[=] list[[]]
variable[trans] assign[=] list[[]]
for taget[name[thetime]] in starred[name[times]] begin[:]
variable[thetime] assign[=] call[name[thetime].to_pydatetime, parameter[]]
name[obs].date assign[=] call[name[ephem].Date, parameter[binary_operation[name[thetime] - call[name[thetime].utcoffset, parameter[]]]]]
call[name[sunrise].append, parameter[call[name[_ephem_to_timezone], parameter[call[name[rising], parameter[name[sun]]], name[tzinfo]]]]]
call[name[sunset].append, parameter[call[name[_ephem_to_timezone], parameter[call[name[setting], parameter[name[sun]]], name[tzinfo]]]]]
call[name[trans].append, parameter[call[name[_ephem_to_timezone], parameter[call[name[transit], parameter[name[sun]]], name[tzinfo]]]]]
return[call[name[pd].DataFrame, parameter[]]] | keyword[def] identifier[sun_rise_set_transit_ephem] ( identifier[times] , identifier[latitude] , identifier[longitude] ,
identifier[next_or_previous] = literal[string] ,
identifier[altitude] = literal[int] ,
identifier[pressure] = literal[int] ,
identifier[temperature] = literal[int] , identifier[horizon] = literal[string] ):
literal[string]
keyword[try] :
keyword[import] identifier[ephem]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[ImportError] ( literal[string] )
keyword[if] identifier[times] . identifier[tz] :
identifier[tzinfo] = identifier[times] . identifier[tz]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[obs] , identifier[sun] = identifier[_ephem_setup] ( identifier[latitude] , identifier[longitude] , identifier[altitude] ,
identifier[pressure] , identifier[temperature] , identifier[horizon] )
keyword[if] identifier[next_or_previous] . identifier[lower] ()== literal[string] :
identifier[rising] = identifier[obs] . identifier[next_rising]
identifier[setting] = identifier[obs] . identifier[next_setting]
identifier[transit] = identifier[obs] . identifier[next_transit]
keyword[elif] identifier[next_or_previous] . identifier[lower] ()== literal[string] :
identifier[rising] = identifier[obs] . identifier[previous_rising]
identifier[setting] = identifier[obs] . identifier[previous_setting]
identifier[transit] = identifier[obs] . identifier[previous_transit]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] +
literal[string] )
identifier[sunrise] =[]
identifier[sunset] =[]
identifier[trans] =[]
keyword[for] identifier[thetime] keyword[in] identifier[times] :
identifier[thetime] = identifier[thetime] . identifier[to_pydatetime] ()
identifier[obs] . identifier[date] = identifier[ephem] . identifier[Date] ( identifier[thetime] - identifier[thetime] . identifier[utcoffset] ())
identifier[sunrise] . identifier[append] ( identifier[_ephem_to_timezone] ( identifier[rising] ( identifier[sun] ), identifier[tzinfo] ))
identifier[sunset] . identifier[append] ( identifier[_ephem_to_timezone] ( identifier[setting] ( identifier[sun] ), identifier[tzinfo] ))
identifier[trans] . identifier[append] ( identifier[_ephem_to_timezone] ( identifier[transit] ( identifier[sun] ), identifier[tzinfo] ))
keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[index] = identifier[times] , identifier[data] ={ literal[string] : identifier[sunrise] ,
literal[string] : identifier[sunset] ,
literal[string] : identifier[trans] }) | def sun_rise_set_transit_ephem(times, latitude, longitude, next_or_previous='next', altitude=0, pressure=101325, temperature=12, horizon='0:00'):
"""
Calculate the next sunrise and sunset times using the PyEphem package.
Parameters
----------
time : pandas.DatetimeIndex
Must be localized
latitude : float
Latitude in degrees, positive north of equator, negative to south
longitude : float
Longitude in degrees, positive east of prime meridian, negative to west
next_or_previous : str
'next' or 'previous' sunrise and sunset relative to time
altitude : float, default 0
distance above sea level in meters.
pressure : int or float, optional, default 101325
air pressure in Pascals.
temperature : int or float, optional, default 12
air temperature in degrees C.
horizon : string, format +/-X:YY
arc degrees:arc minutes from geometrical horizon for sunrise and
sunset, e.g., horizon='+0:00' to use sun center crossing the
geometrical horizon to define sunrise and sunset,
horizon='-0:34' for when the sun's upper edge crosses the
geometrical horizon
Returns
-------
pandas.DataFrame
index is the same as input `time` argument
columns are 'sunrise', 'sunset', and 'transit'
See also
--------
pyephem
"""
try:
import ephem # depends on [control=['try'], data=[]]
except ImportError:
raise ImportError('PyEphem must be installed') # depends on [control=['except'], data=[]]
# times must be localized
if times.tz:
tzinfo = times.tz # depends on [control=['if'], data=[]]
else:
raise ValueError('times must be localized')
(obs, sun) = _ephem_setup(latitude, longitude, altitude, pressure, temperature, horizon)
# create lists of sunrise and sunset time localized to time.tz
if next_or_previous.lower() == 'next':
rising = obs.next_rising
setting = obs.next_setting
transit = obs.next_transit # depends on [control=['if'], data=[]]
elif next_or_previous.lower() == 'previous':
rising = obs.previous_rising
setting = obs.previous_setting
transit = obs.previous_transit # depends on [control=['if'], data=[]]
else:
raise ValueError("next_or_previous must be either 'next' or" + " 'previous'")
sunrise = []
sunset = []
trans = []
for thetime in times:
thetime = thetime.to_pydatetime()
# pyephem drops timezone when converting to its internal datetime
# format, so handle timezone explicitly here
obs.date = ephem.Date(thetime - thetime.utcoffset())
sunrise.append(_ephem_to_timezone(rising(sun), tzinfo))
sunset.append(_ephem_to_timezone(setting(sun), tzinfo))
trans.append(_ephem_to_timezone(transit(sun), tzinfo)) # depends on [control=['for'], data=['thetime']]
return pd.DataFrame(index=times, data={'sunrise': sunrise, 'sunset': sunset, 'transit': trans}) |
def assert_reset(self, asserted):
"""! @brief Assert or de-assert target reset line"""
self._link.drive_nreset(asserted)
self._nreset_state = asserted | def function[assert_reset, parameter[self, asserted]]:
constant[! @brief Assert or de-assert target reset line]
call[name[self]._link.drive_nreset, parameter[name[asserted]]]
name[self]._nreset_state assign[=] name[asserted] | keyword[def] identifier[assert_reset] ( identifier[self] , identifier[asserted] ):
literal[string]
identifier[self] . identifier[_link] . identifier[drive_nreset] ( identifier[asserted] )
identifier[self] . identifier[_nreset_state] = identifier[asserted] | def assert_reset(self, asserted):
"""! @brief Assert or de-assert target reset line"""
self._link.drive_nreset(asserted)
self._nreset_state = asserted |
def slicedIterator(sourceList, sliceSize):
"""
:param: sourceList: list which need to be sliced
:type: list
:param: sliceSize: size of the slice
:type: int
:return: iterator of the sliced list
"""
start = 0
end = 0
while len(sourceList) > end:
end = start + sliceSize
yield sourceList[start: end]
start = end | def function[slicedIterator, parameter[sourceList, sliceSize]]:
constant[
:param: sourceList: list which need to be sliced
:type: list
:param: sliceSize: size of the slice
:type: int
:return: iterator of the sliced list
]
variable[start] assign[=] constant[0]
variable[end] assign[=] constant[0]
while compare[call[name[len], parameter[name[sourceList]]] greater[>] name[end]] begin[:]
variable[end] assign[=] binary_operation[name[start] + name[sliceSize]]
<ast.Yield object at 0x7da204565f60>
variable[start] assign[=] name[end] | keyword[def] identifier[slicedIterator] ( identifier[sourceList] , identifier[sliceSize] ):
literal[string]
identifier[start] = literal[int]
identifier[end] = literal[int]
keyword[while] identifier[len] ( identifier[sourceList] )> identifier[end] :
identifier[end] = identifier[start] + identifier[sliceSize]
keyword[yield] identifier[sourceList] [ identifier[start] : identifier[end] ]
identifier[start] = identifier[end] | def slicedIterator(sourceList, sliceSize):
"""
:param: sourceList: list which need to be sliced
:type: list
:param: sliceSize: size of the slice
:type: int
:return: iterator of the sliced list
"""
start = 0
end = 0
while len(sourceList) > end:
end = start + sliceSize
yield sourceList[start:end]
start = end # depends on [control=['while'], data=['end']] |
async def pair(self):
"""Pair pyatv as a remote control with an Apple TV."""
# Connect using the specified protocol
# TODO: config should be stored elsewhere so that API is same for both
protocol = self.atv.service.protocol
if protocol == const.PROTOCOL_DMAP:
await self.atv.pairing.start(zeroconf=Zeroconf(),
name=self.args.remote_name,
pairing_guid=self.args.pairing_guid)
elif protocol == const.PROTOCOL_MRP:
await self.atv.pairing.start()
# Ask for PIN if present or just wait for pairing to end
if self.atv.pairing.device_provides_pin:
pin = await _read_input(self.loop, 'Enter PIN on screen: ')
self.atv.pairing.pin(pin)
else:
self.atv.pairing.pin(self.args.pin_code)
print('Use {0} to pair with "{1}" (press ENTER to stop)'.format(
self.args.pin_code, self.args.remote_name))
if self.args.pin_code is None:
print('Use any pin to pair with "{}" (press ENTER to stop)'.format(
self.args.remote_name))
else:
print('Use pin {} to pair with "{}" (press ENTER to stop)'.format(
self.args.pin_code, self.args.remote_name))
await self.loop.run_in_executor(None, sys.stdin.readline)
await self.atv.pairing.stop()
# Give some feedback to the user
if self.atv.pairing.has_paired:
print('Pairing seems to have succeeded, yey!')
print('You may now use these credentials: {0}'.format(
self.atv.pairing.credentials))
else:
print('Pairing failed!')
return 1
return 0 | <ast.AsyncFunctionDef object at 0x7da18fe93490> | keyword[async] keyword[def] identifier[pair] ( identifier[self] ):
literal[string]
identifier[protocol] = identifier[self] . identifier[atv] . identifier[service] . identifier[protocol]
keyword[if] identifier[protocol] == identifier[const] . identifier[PROTOCOL_DMAP] :
keyword[await] identifier[self] . identifier[atv] . identifier[pairing] . identifier[start] ( identifier[zeroconf] = identifier[Zeroconf] (),
identifier[name] = identifier[self] . identifier[args] . identifier[remote_name] ,
identifier[pairing_guid] = identifier[self] . identifier[args] . identifier[pairing_guid] )
keyword[elif] identifier[protocol] == identifier[const] . identifier[PROTOCOL_MRP] :
keyword[await] identifier[self] . identifier[atv] . identifier[pairing] . identifier[start] ()
keyword[if] identifier[self] . identifier[atv] . identifier[pairing] . identifier[device_provides_pin] :
identifier[pin] = keyword[await] identifier[_read_input] ( identifier[self] . identifier[loop] , literal[string] )
identifier[self] . identifier[atv] . identifier[pairing] . identifier[pin] ( identifier[pin] )
keyword[else] :
identifier[self] . identifier[atv] . identifier[pairing] . identifier[pin] ( identifier[self] . identifier[args] . identifier[pin_code] )
identifier[print] ( literal[string] . identifier[format] (
identifier[self] . identifier[args] . identifier[pin_code] , identifier[self] . identifier[args] . identifier[remote_name] ))
keyword[if] identifier[self] . identifier[args] . identifier[pin_code] keyword[is] keyword[None] :
identifier[print] ( literal[string] . identifier[format] (
identifier[self] . identifier[args] . identifier[remote_name] ))
keyword[else] :
identifier[print] ( literal[string] . identifier[format] (
identifier[self] . identifier[args] . identifier[pin_code] , identifier[self] . identifier[args] . identifier[remote_name] ))
keyword[await] identifier[self] . identifier[loop] . identifier[run_in_executor] ( keyword[None] , identifier[sys] . identifier[stdin] . identifier[readline] )
keyword[await] identifier[self] . identifier[atv] . identifier[pairing] . identifier[stop] ()
keyword[if] identifier[self] . identifier[atv] . identifier[pairing] . identifier[has_paired] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] . identifier[format] (
identifier[self] . identifier[atv] . identifier[pairing] . identifier[credentials] ))
keyword[else] :
identifier[print] ( literal[string] )
keyword[return] literal[int]
keyword[return] literal[int] | async def pair(self):
"""Pair pyatv as a remote control with an Apple TV."""
# Connect using the specified protocol
# TODO: config should be stored elsewhere so that API is same for both
protocol = self.atv.service.protocol
if protocol == const.PROTOCOL_DMAP:
await self.atv.pairing.start(zeroconf=Zeroconf(), name=self.args.remote_name, pairing_guid=self.args.pairing_guid) # depends on [control=['if'], data=[]]
elif protocol == const.PROTOCOL_MRP:
await self.atv.pairing.start() # depends on [control=['if'], data=[]]
# Ask for PIN if present or just wait for pairing to end
if self.atv.pairing.device_provides_pin:
pin = await _read_input(self.loop, 'Enter PIN on screen: ')
self.atv.pairing.pin(pin) # depends on [control=['if'], data=[]]
else:
self.atv.pairing.pin(self.args.pin_code)
print('Use {0} to pair with "{1}" (press ENTER to stop)'.format(self.args.pin_code, self.args.remote_name))
if self.args.pin_code is None:
print('Use any pin to pair with "{}" (press ENTER to stop)'.format(self.args.remote_name)) # depends on [control=['if'], data=[]]
else:
print('Use pin {} to pair with "{}" (press ENTER to stop)'.format(self.args.pin_code, self.args.remote_name))
await self.loop.run_in_executor(None, sys.stdin.readline)
await self.atv.pairing.stop()
# Give some feedback to the user
if self.atv.pairing.has_paired:
print('Pairing seems to have succeeded, yey!')
print('You may now use these credentials: {0}'.format(self.atv.pairing.credentials)) # depends on [control=['if'], data=[]]
else:
print('Pairing failed!')
return 1
return 0 |
def set(self, lang, instance):
"""
Establece en la instancia actual los atributos de traducción
y la almacena en un diccionario de claves _create_key y valores
el objeto con los atributos dinámicos.
"""
if self._cache_is_too_big():
self.cache = {}
instance_key = TransCache._create_key(lang, instance)
instance._translations_are_cached = True
instance.load_translations(lang=lang)
self.cache[instance_key] = instance | def function[set, parameter[self, lang, instance]]:
constant[
Establece en la instancia actual los atributos de traducción
y la almacena en un diccionario de claves _create_key y valores
el objeto con los atributos dinámicos.
]
if call[name[self]._cache_is_too_big, parameter[]] begin[:]
name[self].cache assign[=] dictionary[[], []]
variable[instance_key] assign[=] call[name[TransCache]._create_key, parameter[name[lang], name[instance]]]
name[instance]._translations_are_cached assign[=] constant[True]
call[name[instance].load_translations, parameter[]]
call[name[self].cache][name[instance_key]] assign[=] name[instance] | keyword[def] identifier[set] ( identifier[self] , identifier[lang] , identifier[instance] ):
literal[string]
keyword[if] identifier[self] . identifier[_cache_is_too_big] ():
identifier[self] . identifier[cache] ={}
identifier[instance_key] = identifier[TransCache] . identifier[_create_key] ( identifier[lang] , identifier[instance] )
identifier[instance] . identifier[_translations_are_cached] = keyword[True]
identifier[instance] . identifier[load_translations] ( identifier[lang] = identifier[lang] )
identifier[self] . identifier[cache] [ identifier[instance_key] ]= identifier[instance] | def set(self, lang, instance):
"""
Establece en la instancia actual los atributos de traducción
y la almacena en un diccionario de claves _create_key y valores
el objeto con los atributos dinámicos.
"""
if self._cache_is_too_big():
self.cache = {} # depends on [control=['if'], data=[]]
instance_key = TransCache._create_key(lang, instance)
instance._translations_are_cached = True
instance.load_translations(lang=lang)
self.cache[instance_key] = instance |
def construct_s3_location_object(location_uri, logical_id, property_name):
"""Constructs a Lambda `Code` or `Content` property, from the SAM `CodeUri` or `ContentUri` property.
This follows the current scheme for Lambda Functions and LayerVersions.
:param dict or string location_uri: s3 location dict or string
:param string logical_id: logical_id of the resource calling this function
:param string property_name: name of the property which is used as an input to this function.
:returns: a Code dict, containing the S3 Bucket, Key, and Version of the Lambda layer code
:rtype: dict
"""
if isinstance(location_uri, dict):
if not location_uri.get("Bucket") or not location_uri.get("Key"):
# location_uri is a dictionary but does not contain Bucket or Key property
raise InvalidResourceException(logical_id,
"'{}' requires Bucket and Key properties to be "
"specified".format(property_name))
s3_pointer = location_uri
else:
# location_uri is NOT a dictionary. Parse it as a string
s3_pointer = parse_s3_uri(location_uri)
if s3_pointer is None:
raise InvalidResourceException(logical_id,
'\'{}\' is not a valid S3 Uri of the form '
'"s3://bucket/key" with optional versionId query '
'parameter.'.format(property_name))
code = {
'S3Bucket': s3_pointer['Bucket'],
'S3Key': s3_pointer['Key']
}
if 'Version' in s3_pointer:
code['S3ObjectVersion'] = s3_pointer['Version']
return code | def function[construct_s3_location_object, parameter[location_uri, logical_id, property_name]]:
constant[Constructs a Lambda `Code` or `Content` property, from the SAM `CodeUri` or `ContentUri` property.
This follows the current scheme for Lambda Functions and LayerVersions.
:param dict or string location_uri: s3 location dict or string
:param string logical_id: logical_id of the resource calling this function
:param string property_name: name of the property which is used as an input to this function.
:returns: a Code dict, containing the S3 Bucket, Key, and Version of the Lambda layer code
:rtype: dict
]
if call[name[isinstance], parameter[name[location_uri], name[dict]]] begin[:]
if <ast.BoolOp object at 0x7da2049607c0> begin[:]
<ast.Raise object at 0x7da204961f30>
variable[s3_pointer] assign[=] name[location_uri]
variable[code] assign[=] dictionary[[<ast.Constant object at 0x7da204960520>, <ast.Constant object at 0x7da2049607f0>], [<ast.Subscript object at 0x7da204962cb0>, <ast.Subscript object at 0x7da204962b30>]]
if compare[constant[Version] in name[s3_pointer]] begin[:]
call[name[code]][constant[S3ObjectVersion]] assign[=] call[name[s3_pointer]][constant[Version]]
return[name[code]] | keyword[def] identifier[construct_s3_location_object] ( identifier[location_uri] , identifier[logical_id] , identifier[property_name] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[location_uri] , identifier[dict] ):
keyword[if] keyword[not] identifier[location_uri] . identifier[get] ( literal[string] ) keyword[or] keyword[not] identifier[location_uri] . identifier[get] ( literal[string] ):
keyword[raise] identifier[InvalidResourceException] ( identifier[logical_id] ,
literal[string]
literal[string] . identifier[format] ( identifier[property_name] ))
identifier[s3_pointer] = identifier[location_uri]
keyword[else] :
identifier[s3_pointer] = identifier[parse_s3_uri] ( identifier[location_uri] )
keyword[if] identifier[s3_pointer] keyword[is] keyword[None] :
keyword[raise] identifier[InvalidResourceException] ( identifier[logical_id] ,
literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[property_name] ))
identifier[code] ={
literal[string] : identifier[s3_pointer] [ literal[string] ],
literal[string] : identifier[s3_pointer] [ literal[string] ]
}
keyword[if] literal[string] keyword[in] identifier[s3_pointer] :
identifier[code] [ literal[string] ]= identifier[s3_pointer] [ literal[string] ]
keyword[return] identifier[code] | def construct_s3_location_object(location_uri, logical_id, property_name):
"""Constructs a Lambda `Code` or `Content` property, from the SAM `CodeUri` or `ContentUri` property.
This follows the current scheme for Lambda Functions and LayerVersions.
:param dict or string location_uri: s3 location dict or string
:param string logical_id: logical_id of the resource calling this function
:param string property_name: name of the property which is used as an input to this function.
:returns: a Code dict, containing the S3 Bucket, Key, and Version of the Lambda layer code
:rtype: dict
"""
if isinstance(location_uri, dict):
if not location_uri.get('Bucket') or not location_uri.get('Key'):
# location_uri is a dictionary but does not contain Bucket or Key property
raise InvalidResourceException(logical_id, "'{}' requires Bucket and Key properties to be specified".format(property_name)) # depends on [control=['if'], data=[]]
s3_pointer = location_uri # depends on [control=['if'], data=[]]
else:
# location_uri is NOT a dictionary. Parse it as a string
s3_pointer = parse_s3_uri(location_uri)
if s3_pointer is None:
raise InvalidResourceException(logical_id, '\'{}\' is not a valid S3 Uri of the form "s3://bucket/key" with optional versionId query parameter.'.format(property_name)) # depends on [control=['if'], data=[]]
code = {'S3Bucket': s3_pointer['Bucket'], 'S3Key': s3_pointer['Key']}
if 'Version' in s3_pointer:
code['S3ObjectVersion'] = s3_pointer['Version'] # depends on [control=['if'], data=['s3_pointer']]
return code |
def maxCtxContextualSubtable(maxCtx, st, ruleType, chain=''):
"""Calculate usMaxContext based on a contextual feature subtable."""
if st.Format == 1:
for ruleset in getattr(st, '%s%sRuleSet' % (chain, ruleType)):
if ruleset is None:
continue
for rule in getattr(ruleset, '%s%sRule' % (chain, ruleType)):
if rule is None:
continue
maxCtx = maxCtxContextualRule(maxCtx, rule, chain)
elif st.Format == 2:
for ruleset in getattr(st, '%s%sClassSet' % (chain, ruleType)):
if ruleset is None:
continue
for rule in getattr(ruleset, '%s%sClassRule' % (chain, ruleType)):
if rule is None:
continue
maxCtx = maxCtxContextualRule(maxCtx, rule, chain)
elif st.Format == 3:
maxCtx = maxCtxContextualRule(maxCtx, st, chain)
return maxCtx | def function[maxCtxContextualSubtable, parameter[maxCtx, st, ruleType, chain]]:
constant[Calculate usMaxContext based on a contextual feature subtable.]
if compare[name[st].Format equal[==] constant[1]] begin[:]
for taget[name[ruleset]] in starred[call[name[getattr], parameter[name[st], binary_operation[constant[%s%sRuleSet] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bcc9db0>, <ast.Name object at 0x7da18bccbe50>]]]]]] begin[:]
if compare[name[ruleset] is constant[None]] begin[:]
continue
for taget[name[rule]] in starred[call[name[getattr], parameter[name[ruleset], binary_operation[constant[%s%sRule] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bcc99f0>, <ast.Name object at 0x7da18bcc9e70>]]]]]] begin[:]
if compare[name[rule] is constant[None]] begin[:]
continue
variable[maxCtx] assign[=] call[name[maxCtxContextualRule], parameter[name[maxCtx], name[rule], name[chain]]]
return[name[maxCtx]] | keyword[def] identifier[maxCtxContextualSubtable] ( identifier[maxCtx] , identifier[st] , identifier[ruleType] , identifier[chain] = literal[string] ):
literal[string]
keyword[if] identifier[st] . identifier[Format] == literal[int] :
keyword[for] identifier[ruleset] keyword[in] identifier[getattr] ( identifier[st] , literal[string] %( identifier[chain] , identifier[ruleType] )):
keyword[if] identifier[ruleset] keyword[is] keyword[None] :
keyword[continue]
keyword[for] identifier[rule] keyword[in] identifier[getattr] ( identifier[ruleset] , literal[string] %( identifier[chain] , identifier[ruleType] )):
keyword[if] identifier[rule] keyword[is] keyword[None] :
keyword[continue]
identifier[maxCtx] = identifier[maxCtxContextualRule] ( identifier[maxCtx] , identifier[rule] , identifier[chain] )
keyword[elif] identifier[st] . identifier[Format] == literal[int] :
keyword[for] identifier[ruleset] keyword[in] identifier[getattr] ( identifier[st] , literal[string] %( identifier[chain] , identifier[ruleType] )):
keyword[if] identifier[ruleset] keyword[is] keyword[None] :
keyword[continue]
keyword[for] identifier[rule] keyword[in] identifier[getattr] ( identifier[ruleset] , literal[string] %( identifier[chain] , identifier[ruleType] )):
keyword[if] identifier[rule] keyword[is] keyword[None] :
keyword[continue]
identifier[maxCtx] = identifier[maxCtxContextualRule] ( identifier[maxCtx] , identifier[rule] , identifier[chain] )
keyword[elif] identifier[st] . identifier[Format] == literal[int] :
identifier[maxCtx] = identifier[maxCtxContextualRule] ( identifier[maxCtx] , identifier[st] , identifier[chain] )
keyword[return] identifier[maxCtx] | def maxCtxContextualSubtable(maxCtx, st, ruleType, chain=''):
"""Calculate usMaxContext based on a contextual feature subtable."""
if st.Format == 1:
for ruleset in getattr(st, '%s%sRuleSet' % (chain, ruleType)):
if ruleset is None:
continue # depends on [control=['if'], data=[]]
for rule in getattr(ruleset, '%s%sRule' % (chain, ruleType)):
if rule is None:
continue # depends on [control=['if'], data=[]]
maxCtx = maxCtxContextualRule(maxCtx, rule, chain) # depends on [control=['for'], data=['rule']] # depends on [control=['for'], data=['ruleset']] # depends on [control=['if'], data=[]]
elif st.Format == 2:
for ruleset in getattr(st, '%s%sClassSet' % (chain, ruleType)):
if ruleset is None:
continue # depends on [control=['if'], data=[]]
for rule in getattr(ruleset, '%s%sClassRule' % (chain, ruleType)):
if rule is None:
continue # depends on [control=['if'], data=[]]
maxCtx = maxCtxContextualRule(maxCtx, rule, chain) # depends on [control=['for'], data=['rule']] # depends on [control=['for'], data=['ruleset']] # depends on [control=['if'], data=[]]
elif st.Format == 3:
maxCtx = maxCtxContextualRule(maxCtx, st, chain) # depends on [control=['if'], data=[]]
return maxCtx |
def get_user_and_user_email_by_id(self, user_or_user_email_id):
"""Retrieve the User and UserEmail object by ID."""
if self.UserEmailClass:
user_email = self.db_adapter.get_object(self.UserEmailClass, user_or_user_email_id)
user = user_email.user if user_email else None
else:
user = self.db_adapter.get_object(self.UserClass, user_or_user_email_id)
user_email = user
return (user, user_email) | def function[get_user_and_user_email_by_id, parameter[self, user_or_user_email_id]]:
constant[Retrieve the User and UserEmail object by ID.]
if name[self].UserEmailClass begin[:]
variable[user_email] assign[=] call[name[self].db_adapter.get_object, parameter[name[self].UserEmailClass, name[user_or_user_email_id]]]
variable[user] assign[=] <ast.IfExp object at 0x7da1b1d452a0>
return[tuple[[<ast.Name object at 0x7da1b1d47b50>, <ast.Name object at 0x7da1b1ed61a0>]]] | keyword[def] identifier[get_user_and_user_email_by_id] ( identifier[self] , identifier[user_or_user_email_id] ):
literal[string]
keyword[if] identifier[self] . identifier[UserEmailClass] :
identifier[user_email] = identifier[self] . identifier[db_adapter] . identifier[get_object] ( identifier[self] . identifier[UserEmailClass] , identifier[user_or_user_email_id] )
identifier[user] = identifier[user_email] . identifier[user] keyword[if] identifier[user_email] keyword[else] keyword[None]
keyword[else] :
identifier[user] = identifier[self] . identifier[db_adapter] . identifier[get_object] ( identifier[self] . identifier[UserClass] , identifier[user_or_user_email_id] )
identifier[user_email] = identifier[user]
keyword[return] ( identifier[user] , identifier[user_email] ) | def get_user_and_user_email_by_id(self, user_or_user_email_id):
"""Retrieve the User and UserEmail object by ID."""
if self.UserEmailClass:
user_email = self.db_adapter.get_object(self.UserEmailClass, user_or_user_email_id)
user = user_email.user if user_email else None # depends on [control=['if'], data=[]]
else:
user = self.db_adapter.get_object(self.UserClass, user_or_user_email_id)
user_email = user
return (user, user_email) |
def releases(self):
"""The releases for this app."""
return self._h._get_resources(
resource=('apps', self.name, 'releases'),
obj=Release, app=self
) | def function[releases, parameter[self]]:
constant[The releases for this app.]
return[call[name[self]._h._get_resources, parameter[]]] | keyword[def] identifier[releases] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[_h] . identifier[_get_resources] (
identifier[resource] =( literal[string] , identifier[self] . identifier[name] , literal[string] ),
identifier[obj] = identifier[Release] , identifier[app] = identifier[self]
) | def releases(self):
"""The releases for this app."""
return self._h._get_resources(resource=('apps', self.name, 'releases'), obj=Release, app=self) |
def open(self, name, mode='rb'):
"""
Retrieves the specified file from storage.
:param name: file name
:type name: str
:param mode: mode to open the file with
:type mode: str
:rtype: :class:`~django:django.core.files.File`
"""
return self.get_storage(name).open(name, mode) | def function[open, parameter[self, name, mode]]:
constant[
Retrieves the specified file from storage.
:param name: file name
:type name: str
:param mode: mode to open the file with
:type mode: str
:rtype: :class:`~django:django.core.files.File`
]
return[call[call[name[self].get_storage, parameter[name[name]]].open, parameter[name[name], name[mode]]]] | keyword[def] identifier[open] ( identifier[self] , identifier[name] , identifier[mode] = literal[string] ):
literal[string]
keyword[return] identifier[self] . identifier[get_storage] ( identifier[name] ). identifier[open] ( identifier[name] , identifier[mode] ) | def open(self, name, mode='rb'):
"""
Retrieves the specified file from storage.
:param name: file name
:type name: str
:param mode: mode to open the file with
:type mode: str
:rtype: :class:`~django:django.core.files.File`
"""
return self.get_storage(name).open(name, mode) |
def beta_to_uni(text, strict=False):
"""
Converts the given text from betacode to unicode.
Args:
text: The beta code text to convert. All of this text must be betacode.
strict: Flag to allow for flexible diacritic order on input.
Returns:
The converted text.
"""
# Check if the requested configuration for conversion already has a trie
# stored otherwise convert it.
param_key = (strict,)
try:
t = _BETA_CONVERSION_TRIES[param_key]
except KeyError:
t = _create_conversion_trie(*param_key)
_BETA_CONVERSION_TRIES[param_key] = t
transform = []
idx = 0
possible_word_boundary = False
while idx < len(text):
if possible_word_boundary and _penultimate_sigma_word_final(transform):
transform[-2] = _FINAL_LC_SIGMA
step = t.longest_prefix(text[idx:idx + _MAX_BETA_TOKEN_LEN])
if step:
possible_word_boundary = text[idx] in _BETA_PUNCTUATION
key, value = step
transform.append(value)
idx += len(key)
else:
possible_word_boundary = True
transform.append(text[idx])
idx += 1
# Check one last time in case there is some whitespace or punctuation at the
# end and check if the last character is a sigma.
if possible_word_boundary and _penultimate_sigma_word_final(transform):
transform[-2] = _FINAL_LC_SIGMA
elif len(transform) > 0 and transform[-1] == _MEDIAL_LC_SIGMA:
transform[-1] = _FINAL_LC_SIGMA
converted = ''.join(transform)
return converted | def function[beta_to_uni, parameter[text, strict]]:
constant[
Converts the given text from betacode to unicode.
Args:
text: The beta code text to convert. All of this text must be betacode.
strict: Flag to allow for flexible diacritic order on input.
Returns:
The converted text.
]
variable[param_key] assign[=] tuple[[<ast.Name object at 0x7da18bc72c80>]]
<ast.Try object at 0x7da18bc708e0>
variable[transform] assign[=] list[[]]
variable[idx] assign[=] constant[0]
variable[possible_word_boundary] assign[=] constant[False]
while compare[name[idx] less[<] call[name[len], parameter[name[text]]]] begin[:]
if <ast.BoolOp object at 0x7da18bc736d0> begin[:]
call[name[transform]][<ast.UnaryOp object at 0x7da207f00a00>] assign[=] name[_FINAL_LC_SIGMA]
variable[step] assign[=] call[name[t].longest_prefix, parameter[call[name[text]][<ast.Slice object at 0x7da207f02bf0>]]]
if name[step] begin[:]
variable[possible_word_boundary] assign[=] compare[call[name[text]][name[idx]] in name[_BETA_PUNCTUATION]]
<ast.Tuple object at 0x7da207f03550> assign[=] name[step]
call[name[transform].append, parameter[name[value]]]
<ast.AugAssign object at 0x7da207f01b40>
if <ast.BoolOp object at 0x7da207f00e20> begin[:]
call[name[transform]][<ast.UnaryOp object at 0x7da207f03670>] assign[=] name[_FINAL_LC_SIGMA]
variable[converted] assign[=] call[constant[].join, parameter[name[transform]]]
return[name[converted]] | keyword[def] identifier[beta_to_uni] ( identifier[text] , identifier[strict] = keyword[False] ):
literal[string]
identifier[param_key] =( identifier[strict] ,)
keyword[try] :
identifier[t] = identifier[_BETA_CONVERSION_TRIES] [ identifier[param_key] ]
keyword[except] identifier[KeyError] :
identifier[t] = identifier[_create_conversion_trie] (* identifier[param_key] )
identifier[_BETA_CONVERSION_TRIES] [ identifier[param_key] ]= identifier[t]
identifier[transform] =[]
identifier[idx] = literal[int]
identifier[possible_word_boundary] = keyword[False]
keyword[while] identifier[idx] < identifier[len] ( identifier[text] ):
keyword[if] identifier[possible_word_boundary] keyword[and] identifier[_penultimate_sigma_word_final] ( identifier[transform] ):
identifier[transform] [- literal[int] ]= identifier[_FINAL_LC_SIGMA]
identifier[step] = identifier[t] . identifier[longest_prefix] ( identifier[text] [ identifier[idx] : identifier[idx] + identifier[_MAX_BETA_TOKEN_LEN] ])
keyword[if] identifier[step] :
identifier[possible_word_boundary] = identifier[text] [ identifier[idx] ] keyword[in] identifier[_BETA_PUNCTUATION]
identifier[key] , identifier[value] = identifier[step]
identifier[transform] . identifier[append] ( identifier[value] )
identifier[idx] += identifier[len] ( identifier[key] )
keyword[else] :
identifier[possible_word_boundary] = keyword[True]
identifier[transform] . identifier[append] ( identifier[text] [ identifier[idx] ])
identifier[idx] += literal[int]
keyword[if] identifier[possible_word_boundary] keyword[and] identifier[_penultimate_sigma_word_final] ( identifier[transform] ):
identifier[transform] [- literal[int] ]= identifier[_FINAL_LC_SIGMA]
keyword[elif] identifier[len] ( identifier[transform] )> literal[int] keyword[and] identifier[transform] [- literal[int] ]== identifier[_MEDIAL_LC_SIGMA] :
identifier[transform] [- literal[int] ]= identifier[_FINAL_LC_SIGMA]
identifier[converted] = literal[string] . identifier[join] ( identifier[transform] )
keyword[return] identifier[converted] | def beta_to_uni(text, strict=False):
"""
Converts the given text from betacode to unicode.
Args:
text: The beta code text to convert. All of this text must be betacode.
strict: Flag to allow for flexible diacritic order on input.
Returns:
The converted text.
"""
# Check if the requested configuration for conversion already has a trie
# stored otherwise convert it.
param_key = (strict,)
try:
t = _BETA_CONVERSION_TRIES[param_key] # depends on [control=['try'], data=[]]
except KeyError:
t = _create_conversion_trie(*param_key)
_BETA_CONVERSION_TRIES[param_key] = t # depends on [control=['except'], data=[]]
transform = []
idx = 0
possible_word_boundary = False
while idx < len(text):
if possible_word_boundary and _penultimate_sigma_word_final(transform):
transform[-2] = _FINAL_LC_SIGMA # depends on [control=['if'], data=[]]
step = t.longest_prefix(text[idx:idx + _MAX_BETA_TOKEN_LEN])
if step:
possible_word_boundary = text[idx] in _BETA_PUNCTUATION
(key, value) = step
transform.append(value)
idx += len(key) # depends on [control=['if'], data=[]]
else:
possible_word_boundary = True
transform.append(text[idx])
idx += 1 # depends on [control=['while'], data=['idx']]
# Check one last time in case there is some whitespace or punctuation at the
# end and check if the last character is a sigma.
if possible_word_boundary and _penultimate_sigma_word_final(transform):
transform[-2] = _FINAL_LC_SIGMA # depends on [control=['if'], data=[]]
elif len(transform) > 0 and transform[-1] == _MEDIAL_LC_SIGMA:
transform[-1] = _FINAL_LC_SIGMA # depends on [control=['if'], data=[]]
converted = ''.join(transform)
return converted |
def determine_context(device_ids: List[int],
use_cpu: bool,
disable_device_locking: bool,
lock_dir: str,
exit_stack: ExitStack) -> List[mx.Context]:
"""
Determine the MXNet context to run on (CPU or GPU).
:param device_ids: List of device as defined from the CLI.
:param use_cpu: Whether to use the CPU instead of GPU(s).
:param disable_device_locking: Disable Sockeye's device locking feature.
:param lock_dir: Directory to place device lock files in.
:param exit_stack: An ExitStack from contextlib.
:return: A list with the context(s) to run on.
"""
if use_cpu:
context = [mx.cpu()]
else:
num_gpus = get_num_gpus()
check_condition(num_gpus >= 1,
"No GPUs found, consider running on the CPU with --use-cpu ")
if disable_device_locking:
context = expand_requested_device_ids(device_ids)
else:
context = exit_stack.enter_context(acquire_gpus(device_ids, lock_dir=lock_dir))
context = [mx.gpu(gpu_id) for gpu_id in context]
return context | def function[determine_context, parameter[device_ids, use_cpu, disable_device_locking, lock_dir, exit_stack]]:
constant[
Determine the MXNet context to run on (CPU or GPU).
:param device_ids: List of device as defined from the CLI.
:param use_cpu: Whether to use the CPU instead of GPU(s).
:param disable_device_locking: Disable Sockeye's device locking feature.
:param lock_dir: Directory to place device lock files in.
:param exit_stack: An ExitStack from contextlib.
:return: A list with the context(s) to run on.
]
if name[use_cpu] begin[:]
variable[context] assign[=] list[[<ast.Call object at 0x7da1b1d8bc40>]]
return[name[context]] | keyword[def] identifier[determine_context] ( identifier[device_ids] : identifier[List] [ identifier[int] ],
identifier[use_cpu] : identifier[bool] ,
identifier[disable_device_locking] : identifier[bool] ,
identifier[lock_dir] : identifier[str] ,
identifier[exit_stack] : identifier[ExitStack] )-> identifier[List] [ identifier[mx] . identifier[Context] ]:
literal[string]
keyword[if] identifier[use_cpu] :
identifier[context] =[ identifier[mx] . identifier[cpu] ()]
keyword[else] :
identifier[num_gpus] = identifier[get_num_gpus] ()
identifier[check_condition] ( identifier[num_gpus] >= literal[int] ,
literal[string] )
keyword[if] identifier[disable_device_locking] :
identifier[context] = identifier[expand_requested_device_ids] ( identifier[device_ids] )
keyword[else] :
identifier[context] = identifier[exit_stack] . identifier[enter_context] ( identifier[acquire_gpus] ( identifier[device_ids] , identifier[lock_dir] = identifier[lock_dir] ))
identifier[context] =[ identifier[mx] . identifier[gpu] ( identifier[gpu_id] ) keyword[for] identifier[gpu_id] keyword[in] identifier[context] ]
keyword[return] identifier[context] | def determine_context(device_ids: List[int], use_cpu: bool, disable_device_locking: bool, lock_dir: str, exit_stack: ExitStack) -> List[mx.Context]:
"""
Determine the MXNet context to run on (CPU or GPU).
:param device_ids: List of device as defined from the CLI.
:param use_cpu: Whether to use the CPU instead of GPU(s).
:param disable_device_locking: Disable Sockeye's device locking feature.
:param lock_dir: Directory to place device lock files in.
:param exit_stack: An ExitStack from contextlib.
:return: A list with the context(s) to run on.
"""
if use_cpu:
context = [mx.cpu()] # depends on [control=['if'], data=[]]
else:
num_gpus = get_num_gpus()
check_condition(num_gpus >= 1, 'No GPUs found, consider running on the CPU with --use-cpu ')
if disable_device_locking:
context = expand_requested_device_ids(device_ids) # depends on [control=['if'], data=[]]
else:
context = exit_stack.enter_context(acquire_gpus(device_ids, lock_dir=lock_dir))
context = [mx.gpu(gpu_id) for gpu_id in context]
return context |
def getSonarData(self):
'''
Returns last LaserData.
@return last JdeRobotTypes LaserData saved
'''
if self.hasproxy():
self.lock.acquire()
sonar = self.sonar
self.lock.release()
return sonar
return None | def function[getSonarData, parameter[self]]:
constant[
Returns last LaserData.
@return last JdeRobotTypes LaserData saved
]
if call[name[self].hasproxy, parameter[]] begin[:]
call[name[self].lock.acquire, parameter[]]
variable[sonar] assign[=] name[self].sonar
call[name[self].lock.release, parameter[]]
return[name[sonar]]
return[constant[None]] | keyword[def] identifier[getSonarData] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[hasproxy] ():
identifier[self] . identifier[lock] . identifier[acquire] ()
identifier[sonar] = identifier[self] . identifier[sonar]
identifier[self] . identifier[lock] . identifier[release] ()
keyword[return] identifier[sonar]
keyword[return] keyword[None] | def getSonarData(self):
"""
Returns last LaserData.
@return last JdeRobotTypes LaserData saved
"""
if self.hasproxy():
self.lock.acquire()
sonar = self.sonar
self.lock.release()
return sonar # depends on [control=['if'], data=[]]
return None |
def process_action(resource, action, action_issuer='unknown'):
"""Process an audit action for a resource, if possible
Args:
resource (:obj:`Resource`): A resource object to perform the action on
action (`str`): Type of action to perform (`kill` or `stop`)
action_issuer (`str`): The issuer of the action
Returns:
`ActionStatus`
"""
from cinq_collector_aws import AWSRegionCollector
func_action = action_mapper[resource.resource_type][action]
extra_info = {}
action_status = ActionStatus.UNKNOWN
if func_action:
if action_mapper[resource.resource_type]['service_name'] == 'lambda':
client = get_aws_session(
AWSAccount.get(dbconfig.get('rds_collector_account', AWSRegionCollector.ns, ''))
).client(
'lambda',
dbconfig.get('rds_collector_region', AWSRegionCollector.ns, '')
)
else:
client = get_aws_session(AWSAccount(resource.account)).client(
action_mapper[resource.resource_type]['service_name'],
region_name=resource.location
)
try:
logger.info(f'Trying to {action} resource {resource.id} for account {resource.account.account_name} / region {resource.location}')
action_status, extra_info = func_action(client, resource)
Enforcement.create(resource.account.account_id, resource.id, action, datetime.now(), extra_info)
except Exception as ex:
action_status = ActionStatus.FAILED
logger.exception('Failed to apply action {} to {}: {}'.format(action, resource.id, ex))
finally:
auditlog(
event='{}.{}.{}.{}'.format(action_issuer, resource.resource_type, action, action_status),
actor=action_issuer,
data={
'resource_id': resource.id,
'account_name': resource.account.account_name,
'location': resource.location,
'info': extra_info
}
)
return action_status
else:
logger.error('Failed to apply action {} to {}: Not supported'.format(action, resource.id))
return ActionStatus.FAILED | def function[process_action, parameter[resource, action, action_issuer]]:
constant[Process an audit action for a resource, if possible
Args:
resource (:obj:`Resource`): A resource object to perform the action on
action (`str`): Type of action to perform (`kill` or `stop`)
action_issuer (`str`): The issuer of the action
Returns:
`ActionStatus`
]
from relative_module[cinq_collector_aws] import module[AWSRegionCollector]
variable[func_action] assign[=] call[call[name[action_mapper]][name[resource].resource_type]][name[action]]
variable[extra_info] assign[=] dictionary[[], []]
variable[action_status] assign[=] name[ActionStatus].UNKNOWN
if name[func_action] begin[:]
if compare[call[call[name[action_mapper]][name[resource].resource_type]][constant[service_name]] equal[==] constant[lambda]] begin[:]
variable[client] assign[=] call[call[name[get_aws_session], parameter[call[name[AWSAccount].get, parameter[call[name[dbconfig].get, parameter[constant[rds_collector_account], name[AWSRegionCollector].ns, constant[]]]]]]].client, parameter[constant[lambda], call[name[dbconfig].get, parameter[constant[rds_collector_region], name[AWSRegionCollector].ns, constant[]]]]]
<ast.Try object at 0x7da1b204a7d0> | keyword[def] identifier[process_action] ( identifier[resource] , identifier[action] , identifier[action_issuer] = literal[string] ):
literal[string]
keyword[from] identifier[cinq_collector_aws] keyword[import] identifier[AWSRegionCollector]
identifier[func_action] = identifier[action_mapper] [ identifier[resource] . identifier[resource_type] ][ identifier[action] ]
identifier[extra_info] ={}
identifier[action_status] = identifier[ActionStatus] . identifier[UNKNOWN]
keyword[if] identifier[func_action] :
keyword[if] identifier[action_mapper] [ identifier[resource] . identifier[resource_type] ][ literal[string] ]== literal[string] :
identifier[client] = identifier[get_aws_session] (
identifier[AWSAccount] . identifier[get] ( identifier[dbconfig] . identifier[get] ( literal[string] , identifier[AWSRegionCollector] . identifier[ns] , literal[string] ))
). identifier[client] (
literal[string] ,
identifier[dbconfig] . identifier[get] ( literal[string] , identifier[AWSRegionCollector] . identifier[ns] , literal[string] )
)
keyword[else] :
identifier[client] = identifier[get_aws_session] ( identifier[AWSAccount] ( identifier[resource] . identifier[account] )). identifier[client] (
identifier[action_mapper] [ identifier[resource] . identifier[resource_type] ][ literal[string] ],
identifier[region_name] = identifier[resource] . identifier[location]
)
keyword[try] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[action_status] , identifier[extra_info] = identifier[func_action] ( identifier[client] , identifier[resource] )
identifier[Enforcement] . identifier[create] ( identifier[resource] . identifier[account] . identifier[account_id] , identifier[resource] . identifier[id] , identifier[action] , identifier[datetime] . identifier[now] (), identifier[extra_info] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[action_status] = identifier[ActionStatus] . identifier[FAILED]
identifier[logger] . identifier[exception] ( literal[string] . identifier[format] ( identifier[action] , identifier[resource] . identifier[id] , identifier[ex] ))
keyword[finally] :
identifier[auditlog] (
identifier[event] = literal[string] . identifier[format] ( identifier[action_issuer] , identifier[resource] . identifier[resource_type] , identifier[action] , identifier[action_status] ),
identifier[actor] = identifier[action_issuer] ,
identifier[data] ={
literal[string] : identifier[resource] . identifier[id] ,
literal[string] : identifier[resource] . identifier[account] . identifier[account_name] ,
literal[string] : identifier[resource] . identifier[location] ,
literal[string] : identifier[extra_info]
}
)
keyword[return] identifier[action_status]
keyword[else] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[action] , identifier[resource] . identifier[id] ))
keyword[return] identifier[ActionStatus] . identifier[FAILED] | def process_action(resource, action, action_issuer='unknown'):
"""Process an audit action for a resource, if possible
Args:
resource (:obj:`Resource`): A resource object to perform the action on
action (`str`): Type of action to perform (`kill` or `stop`)
action_issuer (`str`): The issuer of the action
Returns:
`ActionStatus`
"""
from cinq_collector_aws import AWSRegionCollector
func_action = action_mapper[resource.resource_type][action]
extra_info = {}
action_status = ActionStatus.UNKNOWN
if func_action:
if action_mapper[resource.resource_type]['service_name'] == 'lambda':
client = get_aws_session(AWSAccount.get(dbconfig.get('rds_collector_account', AWSRegionCollector.ns, ''))).client('lambda', dbconfig.get('rds_collector_region', AWSRegionCollector.ns, '')) # depends on [control=['if'], data=[]]
else:
client = get_aws_session(AWSAccount(resource.account)).client(action_mapper[resource.resource_type]['service_name'], region_name=resource.location)
try:
logger.info(f'Trying to {action} resource {resource.id} for account {resource.account.account_name} / region {resource.location}')
(action_status, extra_info) = func_action(client, resource)
Enforcement.create(resource.account.account_id, resource.id, action, datetime.now(), extra_info) # depends on [control=['try'], data=[]]
except Exception as ex:
action_status = ActionStatus.FAILED
logger.exception('Failed to apply action {} to {}: {}'.format(action, resource.id, ex)) # depends on [control=['except'], data=['ex']]
finally:
auditlog(event='{}.{}.{}.{}'.format(action_issuer, resource.resource_type, action, action_status), actor=action_issuer, data={'resource_id': resource.id, 'account_name': resource.account.account_name, 'location': resource.location, 'info': extra_info})
return action_status # depends on [control=['if'], data=[]]
else:
logger.error('Failed to apply action {} to {}: Not supported'.format(action, resource.id))
return ActionStatus.FAILED |
def _on_client_disconnect(self, data):
"""Handle client disconnect."""
self._clients[data.get('id')].update_connected(False)
_LOGGER.info('client %s disconnected', self._clients[data.get('id')].friendly_name) | def function[_on_client_disconnect, parameter[self, data]]:
constant[Handle client disconnect.]
call[call[name[self]._clients][call[name[data].get, parameter[constant[id]]]].update_connected, parameter[constant[False]]]
call[name[_LOGGER].info, parameter[constant[client %s disconnected], call[name[self]._clients][call[name[data].get, parameter[constant[id]]]].friendly_name]] | keyword[def] identifier[_on_client_disconnect] ( identifier[self] , identifier[data] ):
literal[string]
identifier[self] . identifier[_clients] [ identifier[data] . identifier[get] ( literal[string] )]. identifier[update_connected] ( keyword[False] )
identifier[_LOGGER] . identifier[info] ( literal[string] , identifier[self] . identifier[_clients] [ identifier[data] . identifier[get] ( literal[string] )]. identifier[friendly_name] ) | def _on_client_disconnect(self, data):
"""Handle client disconnect."""
self._clients[data.get('id')].update_connected(False)
_LOGGER.info('client %s disconnected', self._clients[data.get('id')].friendly_name) |
def increment(self, counter_name, delta):
"""Increment counter value.
Args:
counter_name: counter name as String.
delta: increment delta as Integer.
Returns:
new counter value.
"""
current_value = self.counters.get(counter_name, 0)
new_value = current_value + delta
self.counters[counter_name] = new_value
return new_value | def function[increment, parameter[self, counter_name, delta]]:
constant[Increment counter value.
Args:
counter_name: counter name as String.
delta: increment delta as Integer.
Returns:
new counter value.
]
variable[current_value] assign[=] call[name[self].counters.get, parameter[name[counter_name], constant[0]]]
variable[new_value] assign[=] binary_operation[name[current_value] + name[delta]]
call[name[self].counters][name[counter_name]] assign[=] name[new_value]
return[name[new_value]] | keyword[def] identifier[increment] ( identifier[self] , identifier[counter_name] , identifier[delta] ):
literal[string]
identifier[current_value] = identifier[self] . identifier[counters] . identifier[get] ( identifier[counter_name] , literal[int] )
identifier[new_value] = identifier[current_value] + identifier[delta]
identifier[self] . identifier[counters] [ identifier[counter_name] ]= identifier[new_value]
keyword[return] identifier[new_value] | def increment(self, counter_name, delta):
"""Increment counter value.
Args:
counter_name: counter name as String.
delta: increment delta as Integer.
Returns:
new counter value.
"""
current_value = self.counters.get(counter_name, 0)
new_value = current_value + delta
self.counters[counter_name] = new_value
return new_value |
def vprjp(vin, plane):
"""
Project a vector onto a specified plane, orthogonally.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vprjp_c.html
:param vin: The projected vector.
:type vin: 3-Element Array of floats
:param plane: Plane containing vin.
:type plane: spiceypy.utils.support_types.Plane
:return: Vector resulting from projection.
:rtype: 3-Element Array of floats
"""
vin = stypes.toDoubleVector(vin)
vout = stypes.emptyDoubleVector(3)
libspice.vprjp_c(vin, ctypes.byref(plane), vout)
return stypes.cVectorToPython(vout) | def function[vprjp, parameter[vin, plane]]:
constant[
Project a vector onto a specified plane, orthogonally.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vprjp_c.html
:param vin: The projected vector.
:type vin: 3-Element Array of floats
:param plane: Plane containing vin.
:type plane: spiceypy.utils.support_types.Plane
:return: Vector resulting from projection.
:rtype: 3-Element Array of floats
]
variable[vin] assign[=] call[name[stypes].toDoubleVector, parameter[name[vin]]]
variable[vout] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]]
call[name[libspice].vprjp_c, parameter[name[vin], call[name[ctypes].byref, parameter[name[plane]]], name[vout]]]
return[call[name[stypes].cVectorToPython, parameter[name[vout]]]] | keyword[def] identifier[vprjp] ( identifier[vin] , identifier[plane] ):
literal[string]
identifier[vin] = identifier[stypes] . identifier[toDoubleVector] ( identifier[vin] )
identifier[vout] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[libspice] . identifier[vprjp_c] ( identifier[vin] , identifier[ctypes] . identifier[byref] ( identifier[plane] ), identifier[vout] )
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[vout] ) | def vprjp(vin, plane):
"""
Project a vector onto a specified plane, orthogonally.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vprjp_c.html
:param vin: The projected vector.
:type vin: 3-Element Array of floats
:param plane: Plane containing vin.
:type plane: spiceypy.utils.support_types.Plane
:return: Vector resulting from projection.
:rtype: 3-Element Array of floats
"""
vin = stypes.toDoubleVector(vin)
vout = stypes.emptyDoubleVector(3)
libspice.vprjp_c(vin, ctypes.byref(plane), vout)
return stypes.cVectorToPython(vout) |
def load_feature(fname, language):
""" Load and parse a feature file. """
fname = os.path.abspath(fname)
feat = parse_file(fname, language)
return feat | def function[load_feature, parameter[fname, language]]:
constant[ Load and parse a feature file. ]
variable[fname] assign[=] call[name[os].path.abspath, parameter[name[fname]]]
variable[feat] assign[=] call[name[parse_file], parameter[name[fname], name[language]]]
return[name[feat]] | keyword[def] identifier[load_feature] ( identifier[fname] , identifier[language] ):
literal[string]
identifier[fname] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[fname] )
identifier[feat] = identifier[parse_file] ( identifier[fname] , identifier[language] )
keyword[return] identifier[feat] | def load_feature(fname, language):
""" Load and parse a feature file. """
fname = os.path.abspath(fname)
feat = parse_file(fname, language)
return feat |
def get(self):
"""Get quota from Cloud Provider."""
# get all network quota from Cloud Provider.
attrs = ("networks",
"security_groups",
"floating_ips",
"routers",
"internet_gateways")
for attr in attrs:
setattr(self, attr, eval("self.get_{}()". format(attr))) | def function[get, parameter[self]]:
constant[Get quota from Cloud Provider.]
variable[attrs] assign[=] tuple[[<ast.Constant object at 0x7da18c4ceb90>, <ast.Constant object at 0x7da18c4cceb0>, <ast.Constant object at 0x7da18c4ce0b0>, <ast.Constant object at 0x7da18c4ced40>, <ast.Constant object at 0x7da18c4cc850>]]
for taget[name[attr]] in starred[name[attrs]] begin[:]
call[name[setattr], parameter[name[self], name[attr], call[name[eval], parameter[call[constant[self.get_{}()].format, parameter[name[attr]]]]]]] | keyword[def] identifier[get] ( identifier[self] ):
literal[string]
identifier[attrs] =( literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] )
keyword[for] identifier[attr] keyword[in] identifier[attrs] :
identifier[setattr] ( identifier[self] , identifier[attr] , identifier[eval] ( literal[string] . identifier[format] ( identifier[attr] ))) | def get(self):
"""Get quota from Cloud Provider."""
# get all network quota from Cloud Provider.
attrs = ('networks', 'security_groups', 'floating_ips', 'routers', 'internet_gateways')
for attr in attrs:
setattr(self, attr, eval('self.get_{}()'.format(attr))) # depends on [control=['for'], data=['attr']] |
def fix(source_key, rownum, line, source, sample_only_filter, sample_size):
"""
:param rownum:
:param line:
:param source:
:param sample_only_filter:
:param sample_size:
:return: (row, no_more_data) TUPLE WHERE row IS {"value":<data structure>} OR {"json":<text line>}
"""
value = json2value(line)
if rownum == 0:
if len(line) > MAX_RECORD_LENGTH:
_shorten(source_key, value, source)
value = _fix(value)
if sample_only_filter and Random.int(int(1.0/coalesce(sample_size, 0.01))) != 0 and jx.filter([value], sample_only_filter):
# INDEX etl.id==0, BUT NO MORE
if value.etl.id != 0:
Log.error("Expecting etl.id==0")
row = {"value": value}
return row, True
elif len(line) > MAX_RECORD_LENGTH:
_shorten(source_key, value, source)
value = _fix(value)
elif '"resource_usage":' in line:
value = _fix(value)
row = {"value": value}
return row, False | def function[fix, parameter[source_key, rownum, line, source, sample_only_filter, sample_size]]:
constant[
:param rownum:
:param line:
:param source:
:param sample_only_filter:
:param sample_size:
:return: (row, no_more_data) TUPLE WHERE row IS {"value":<data structure>} OR {"json":<text line>}
]
variable[value] assign[=] call[name[json2value], parameter[name[line]]]
if compare[name[rownum] equal[==] constant[0]] begin[:]
if compare[call[name[len], parameter[name[line]]] greater[>] name[MAX_RECORD_LENGTH]] begin[:]
call[name[_shorten], parameter[name[source_key], name[value], name[source]]]
variable[value] assign[=] call[name[_fix], parameter[name[value]]]
if <ast.BoolOp object at 0x7da207f01d50> begin[:]
if compare[name[value].etl.id not_equal[!=] constant[0]] begin[:]
call[name[Log].error, parameter[constant[Expecting etl.id==0]]]
variable[row] assign[=] dictionary[[<ast.Constant object at 0x7da1b2346ad0>], [<ast.Name object at 0x7da1b23464a0>]]
return[tuple[[<ast.Name object at 0x7da1b23447c0>, <ast.Constant object at 0x7da1b2345660>]]]
variable[row] assign[=] dictionary[[<ast.Constant object at 0x7da1b0b72fb0>], [<ast.Name object at 0x7da1b0b72da0>]]
return[tuple[[<ast.Name object at 0x7da207f021d0>, <ast.Constant object at 0x7da207f00100>]]] | keyword[def] identifier[fix] ( identifier[source_key] , identifier[rownum] , identifier[line] , identifier[source] , identifier[sample_only_filter] , identifier[sample_size] ):
literal[string]
identifier[value] = identifier[json2value] ( identifier[line] )
keyword[if] identifier[rownum] == literal[int] :
keyword[if] identifier[len] ( identifier[line] )> identifier[MAX_RECORD_LENGTH] :
identifier[_shorten] ( identifier[source_key] , identifier[value] , identifier[source] )
identifier[value] = identifier[_fix] ( identifier[value] )
keyword[if] identifier[sample_only_filter] keyword[and] identifier[Random] . identifier[int] ( identifier[int] ( literal[int] / identifier[coalesce] ( identifier[sample_size] , literal[int] )))!= literal[int] keyword[and] identifier[jx] . identifier[filter] ([ identifier[value] ], identifier[sample_only_filter] ):
keyword[if] identifier[value] . identifier[etl] . identifier[id] != literal[int] :
identifier[Log] . identifier[error] ( literal[string] )
identifier[row] ={ literal[string] : identifier[value] }
keyword[return] identifier[row] , keyword[True]
keyword[elif] identifier[len] ( identifier[line] )> identifier[MAX_RECORD_LENGTH] :
identifier[_shorten] ( identifier[source_key] , identifier[value] , identifier[source] )
identifier[value] = identifier[_fix] ( identifier[value] )
keyword[elif] literal[string] keyword[in] identifier[line] :
identifier[value] = identifier[_fix] ( identifier[value] )
identifier[row] ={ literal[string] : identifier[value] }
keyword[return] identifier[row] , keyword[False] | def fix(source_key, rownum, line, source, sample_only_filter, sample_size):
"""
:param rownum:
:param line:
:param source:
:param sample_only_filter:
:param sample_size:
:return: (row, no_more_data) TUPLE WHERE row IS {"value":<data structure>} OR {"json":<text line>}
"""
value = json2value(line)
if rownum == 0:
if len(line) > MAX_RECORD_LENGTH:
_shorten(source_key, value, source) # depends on [control=['if'], data=[]]
value = _fix(value)
if sample_only_filter and Random.int(int(1.0 / coalesce(sample_size, 0.01))) != 0 and jx.filter([value], sample_only_filter):
# INDEX etl.id==0, BUT NO MORE
if value.etl.id != 0:
Log.error('Expecting etl.id==0') # depends on [control=['if'], data=[]]
row = {'value': value}
return (row, True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif len(line) > MAX_RECORD_LENGTH:
_shorten(source_key, value, source)
value = _fix(value) # depends on [control=['if'], data=[]]
elif '"resource_usage":' in line:
value = _fix(value) # depends on [control=['if'], data=[]]
row = {'value': value}
return (row, False) |
def output(self, original_filename):
"""
_filename is not used
Args:
_filename(string)
"""
for contract in self.contracts:
for function in contract.functions + contract.modifiers:
filename = "{}-{}-{}.dot".format(original_filename, contract.name, function.full_name)
self.info('Export {}'.format(filename))
function.slithir_cfg_to_dot(filename) | def function[output, parameter[self, original_filename]]:
constant[
_filename is not used
Args:
_filename(string)
]
for taget[name[contract]] in starred[name[self].contracts] begin[:]
for taget[name[function]] in starred[binary_operation[name[contract].functions + name[contract].modifiers]] begin[:]
variable[filename] assign[=] call[constant[{}-{}-{}.dot].format, parameter[name[original_filename], name[contract].name, name[function].full_name]]
call[name[self].info, parameter[call[constant[Export {}].format, parameter[name[filename]]]]]
call[name[function].slithir_cfg_to_dot, parameter[name[filename]]] | keyword[def] identifier[output] ( identifier[self] , identifier[original_filename] ):
literal[string]
keyword[for] identifier[contract] keyword[in] identifier[self] . identifier[contracts] :
keyword[for] identifier[function] keyword[in] identifier[contract] . identifier[functions] + identifier[contract] . identifier[modifiers] :
identifier[filename] = literal[string] . identifier[format] ( identifier[original_filename] , identifier[contract] . identifier[name] , identifier[function] . identifier[full_name] )
identifier[self] . identifier[info] ( literal[string] . identifier[format] ( identifier[filename] ))
identifier[function] . identifier[slithir_cfg_to_dot] ( identifier[filename] ) | def output(self, original_filename):
"""
_filename is not used
Args:
_filename(string)
"""
for contract in self.contracts:
for function in contract.functions + contract.modifiers:
filename = '{}-{}-{}.dot'.format(original_filename, contract.name, function.full_name)
self.info('Export {}'.format(filename))
function.slithir_cfg_to_dot(filename) # depends on [control=['for'], data=['function']] # depends on [control=['for'], data=['contract']] |
def convert(self, vroot, entry_variables):
"""
All functions are replaced with the same `new` function.
Args:
vroot (:obj:`Variable`): NNabla Variable
entry_variables (:obj:`Variable`): Entry variable from which the conversion starts.
"""
self.graph_info = GraphInfo(vroot)
self.entry_variables = entry_variables
with nn.parameter_scope(self.name):
# Function loop in the forward order
for t, func in enumerate(self.graph_info.funcs):
# Activation check
if func.name in self.activation_functions:
activation_func = func
o = self._fixed_point_activation_conversion(
activation_func)
continue
# Identity conversion
o = self._identity_conversion(func)
self.end_variable = o
return self.end_variable | def function[convert, parameter[self, vroot, entry_variables]]:
constant[
All functions are replaced with the same `new` function.
Args:
vroot (:obj:`Variable`): NNabla Variable
entry_variables (:obj:`Variable`): Entry variable from which the conversion starts.
]
name[self].graph_info assign[=] call[name[GraphInfo], parameter[name[vroot]]]
name[self].entry_variables assign[=] name[entry_variables]
with call[name[nn].parameter_scope, parameter[name[self].name]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da204565d50>, <ast.Name object at 0x7da204564df0>]]] in starred[call[name[enumerate], parameter[name[self].graph_info.funcs]]] begin[:]
if compare[name[func].name in name[self].activation_functions] begin[:]
variable[activation_func] assign[=] name[func]
variable[o] assign[=] call[name[self]._fixed_point_activation_conversion, parameter[name[activation_func]]]
continue
variable[o] assign[=] call[name[self]._identity_conversion, parameter[name[func]]]
name[self].end_variable assign[=] name[o]
return[name[self].end_variable] | keyword[def] identifier[convert] ( identifier[self] , identifier[vroot] , identifier[entry_variables] ):
literal[string]
identifier[self] . identifier[graph_info] = identifier[GraphInfo] ( identifier[vroot] )
identifier[self] . identifier[entry_variables] = identifier[entry_variables]
keyword[with] identifier[nn] . identifier[parameter_scope] ( identifier[self] . identifier[name] ):
keyword[for] identifier[t] , identifier[func] keyword[in] identifier[enumerate] ( identifier[self] . identifier[graph_info] . identifier[funcs] ):
keyword[if] identifier[func] . identifier[name] keyword[in] identifier[self] . identifier[activation_functions] :
identifier[activation_func] = identifier[func]
identifier[o] = identifier[self] . identifier[_fixed_point_activation_conversion] (
identifier[activation_func] )
keyword[continue]
identifier[o] = identifier[self] . identifier[_identity_conversion] ( identifier[func] )
identifier[self] . identifier[end_variable] = identifier[o]
keyword[return] identifier[self] . identifier[end_variable] | def convert(self, vroot, entry_variables):
"""
All functions are replaced with the same `new` function.
Args:
vroot (:obj:`Variable`): NNabla Variable
entry_variables (:obj:`Variable`): Entry variable from which the conversion starts.
"""
self.graph_info = GraphInfo(vroot)
self.entry_variables = entry_variables
with nn.parameter_scope(self.name):
# Function loop in the forward order
for (t, func) in enumerate(self.graph_info.funcs):
# Activation check
if func.name in self.activation_functions:
activation_func = func
o = self._fixed_point_activation_conversion(activation_func)
continue # depends on [control=['if'], data=[]]
# Identity conversion
o = self._identity_conversion(func) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=[]]
self.end_variable = o
return self.end_variable |
def _connected(self, link_uri):
""" This callback is called form the Crazyflie API when a Crazyflie
has been connected and the TOCs have been downloaded."""
print('Connected to %s' % link_uri)
# Print the param TOC
p_toc = self._cf.param.toc.toc
for group in sorted(p_toc.keys()):
print('{}'.format(group))
for param in sorted(p_toc[group].keys()):
print('\t{}'.format(param))
self._param_check_list.append('{0}.{1}'.format(group, param))
self._param_groups.append('{}'.format(group))
# For every group, register the callback
self._cf.param.add_update_callback(group=group, name=None,
cb=self._param_callback)
# You can also register a callback for a specific group.name combo
self._cf.param.add_update_callback(group='cpu', name='flash',
cb=self._cpu_flash_callback)
print('') | def function[_connected, parameter[self, link_uri]]:
constant[ This callback is called form the Crazyflie API when a Crazyflie
has been connected and the TOCs have been downloaded.]
call[name[print], parameter[binary_operation[constant[Connected to %s] <ast.Mod object at 0x7da2590d6920> name[link_uri]]]]
variable[p_toc] assign[=] name[self]._cf.param.toc.toc
for taget[name[group]] in starred[call[name[sorted], parameter[call[name[p_toc].keys, parameter[]]]]] begin[:]
call[name[print], parameter[call[constant[{}].format, parameter[name[group]]]]]
for taget[name[param]] in starred[call[name[sorted], parameter[call[call[name[p_toc]][name[group]].keys, parameter[]]]]] begin[:]
call[name[print], parameter[call[constant[ {}].format, parameter[name[param]]]]]
call[name[self]._param_check_list.append, parameter[call[constant[{0}.{1}].format, parameter[name[group], name[param]]]]]
call[name[self]._param_groups.append, parameter[call[constant[{}].format, parameter[name[group]]]]]
call[name[self]._cf.param.add_update_callback, parameter[]]
call[name[self]._cf.param.add_update_callback, parameter[]]
call[name[print], parameter[constant[]]] | keyword[def] identifier[_connected] ( identifier[self] , identifier[link_uri] ):
literal[string]
identifier[print] ( literal[string] % identifier[link_uri] )
identifier[p_toc] = identifier[self] . identifier[_cf] . identifier[param] . identifier[toc] . identifier[toc]
keyword[for] identifier[group] keyword[in] identifier[sorted] ( identifier[p_toc] . identifier[keys] ()):
identifier[print] ( literal[string] . identifier[format] ( identifier[group] ))
keyword[for] identifier[param] keyword[in] identifier[sorted] ( identifier[p_toc] [ identifier[group] ]. identifier[keys] ()):
identifier[print] ( literal[string] . identifier[format] ( identifier[param] ))
identifier[self] . identifier[_param_check_list] . identifier[append] ( literal[string] . identifier[format] ( identifier[group] , identifier[param] ))
identifier[self] . identifier[_param_groups] . identifier[append] ( literal[string] . identifier[format] ( identifier[group] ))
identifier[self] . identifier[_cf] . identifier[param] . identifier[add_update_callback] ( identifier[group] = identifier[group] , identifier[name] = keyword[None] ,
identifier[cb] = identifier[self] . identifier[_param_callback] )
identifier[self] . identifier[_cf] . identifier[param] . identifier[add_update_callback] ( identifier[group] = literal[string] , identifier[name] = literal[string] ,
identifier[cb] = identifier[self] . identifier[_cpu_flash_callback] )
identifier[print] ( literal[string] ) | def _connected(self, link_uri):
""" This callback is called form the Crazyflie API when a Crazyflie
has been connected and the TOCs have been downloaded."""
print('Connected to %s' % link_uri)
# Print the param TOC
p_toc = self._cf.param.toc.toc
for group in sorted(p_toc.keys()):
print('{}'.format(group))
for param in sorted(p_toc[group].keys()):
print('\t{}'.format(param))
self._param_check_list.append('{0}.{1}'.format(group, param)) # depends on [control=['for'], data=['param']]
self._param_groups.append('{}'.format(group))
# For every group, register the callback
self._cf.param.add_update_callback(group=group, name=None, cb=self._param_callback) # depends on [control=['for'], data=['group']]
# You can also register a callback for a specific group.name combo
self._cf.param.add_update_callback(group='cpu', name='flash', cb=self._cpu_flash_callback)
print('') |
def onShortcutUnindentWithBackspace(self):
"""Backspace pressed, unindent
"""
assert self._qpart.textBeforeCursor().endswith(self.text())
charsToRemove = len(self._qpart.textBeforeCursor()) % len(self.text())
if charsToRemove == 0:
charsToRemove = len(self.text())
cursor = self._qpart.textCursor()
cursor.setPosition(cursor.position() - charsToRemove, QTextCursor.KeepAnchor)
cursor.removeSelectedText() | def function[onShortcutUnindentWithBackspace, parameter[self]]:
constant[Backspace pressed, unindent
]
assert[call[call[name[self]._qpart.textBeforeCursor, parameter[]].endswith, parameter[call[name[self].text, parameter[]]]]]
variable[charsToRemove] assign[=] binary_operation[call[name[len], parameter[call[name[self]._qpart.textBeforeCursor, parameter[]]]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[call[name[self].text, parameter[]]]]]
if compare[name[charsToRemove] equal[==] constant[0]] begin[:]
variable[charsToRemove] assign[=] call[name[len], parameter[call[name[self].text, parameter[]]]]
variable[cursor] assign[=] call[name[self]._qpart.textCursor, parameter[]]
call[name[cursor].setPosition, parameter[binary_operation[call[name[cursor].position, parameter[]] - name[charsToRemove]], name[QTextCursor].KeepAnchor]]
call[name[cursor].removeSelectedText, parameter[]] | keyword[def] identifier[onShortcutUnindentWithBackspace] ( identifier[self] ):
literal[string]
keyword[assert] identifier[self] . identifier[_qpart] . identifier[textBeforeCursor] (). identifier[endswith] ( identifier[self] . identifier[text] ())
identifier[charsToRemove] = identifier[len] ( identifier[self] . identifier[_qpart] . identifier[textBeforeCursor] ())% identifier[len] ( identifier[self] . identifier[text] ())
keyword[if] identifier[charsToRemove] == literal[int] :
identifier[charsToRemove] = identifier[len] ( identifier[self] . identifier[text] ())
identifier[cursor] = identifier[self] . identifier[_qpart] . identifier[textCursor] ()
identifier[cursor] . identifier[setPosition] ( identifier[cursor] . identifier[position] ()- identifier[charsToRemove] , identifier[QTextCursor] . identifier[KeepAnchor] )
identifier[cursor] . identifier[removeSelectedText] () | def onShortcutUnindentWithBackspace(self):
"""Backspace pressed, unindent
"""
assert self._qpart.textBeforeCursor().endswith(self.text())
charsToRemove = len(self._qpart.textBeforeCursor()) % len(self.text())
if charsToRemove == 0:
charsToRemove = len(self.text()) # depends on [control=['if'], data=['charsToRemove']]
cursor = self._qpart.textCursor()
cursor.setPosition(cursor.position() - charsToRemove, QTextCursor.KeepAnchor)
cursor.removeSelectedText() |
def render(self, container, descender, state, space_below=0,
first_line_only=False):
"""Typeset the paragraph
The paragraph is typeset in the given container starting below the
current cursor position of the container. When the end of the container
is reached, the rendering state is preserved to continue setting the
rest of the paragraph when this method is called with a new container.
Args:
container (Container): the container to render to
descender (float or None): descender height of the preceeding line
state (ParagraphState): the state where rendering will continue
first_line_only (bool): typeset only the first line
"""
indent_first = (float(self.get_style('indent_first', container))
if state.initial else 0)
line_width = float(container.width)
line_spacing = self.get_style('line_spacing', container)
text_align = self.get_style('text_align', container)
tab_stops = self.get_style('tab_stops', container)
if not tab_stops:
tab_width = 2 * self.get_style('font_size', container)
tab_stops = DefaultTabStops(tab_width)
# `saved_state` is updated after successfully rendering each line, so
# that when `container` overflows on rendering a line, the words in that
# line are yielded again on the next typeset() call.
saved_state = copy(state)
prev_state = copy(state)
max_line_width = 0
def typeset_line(line, last_line=False):
"""Typeset `line` and, if no exception is raised, update the
paragraph's internal rendering state."""
nonlocal state, saved_state, max_line_width, descender, space_below
max_line_width = max(max_line_width, line.cursor)
advance = (line.ascender(container) if descender is None
else line_spacing.advance(line, descender, container))
descender = line.descender(container) # descender <= 0
line.advance = advance
total_advance = advance + (space_below if last_line else 0) - descender
if container.remaining_height < total_advance:
raise EndOfContainer(saved_state)
assert container.advance2(advance)
line.typeset(container, text_align, last_line)
assert container.advance2(- descender)
state.initial = False
saved_state = copy(state)
return Line(tab_stops, line_width, container,
significant_whitespace=self.significant_whitespace)
first_line = line = Line(tab_stops, line_width, container,
indent_first, self.significant_whitespace)
while True:
try:
word = state.next_word()
except StopIteration:
break
try:
if not line.append_word(word):
for first, second in word.hyphenate(container):
if line.append_word(first):
state.prepend_word(second) # prepend second part
break
else:
state = prev_state
line = typeset_line(line)
if first_line_only:
break
continue
except NewLineException:
line.append(word.glyphs_span)
line = typeset_line(line, last_line=True)
if first_line_only:
break
prev_state = copy(state)
if line:
typeset_line(line, last_line=True)
# Correct the horizontal text placement for auto-width paragraphs
if self._width(container) == FlowableWidth.AUTO:
if text_align == TextAlign.CENTER:
container.left -= float(container.width - max_line_width) / 2
if text_align == TextAlign.RIGHT:
container.left -= float(container.width - max_line_width)
return max_line_width, first_line.advance, descender | def function[render, parameter[self, container, descender, state, space_below, first_line_only]]:
constant[Typeset the paragraph
The paragraph is typeset in the given container starting below the
current cursor position of the container. When the end of the container
is reached, the rendering state is preserved to continue setting the
rest of the paragraph when this method is called with a new container.
Args:
container (Container): the container to render to
descender (float or None): descender height of the preceeding line
state (ParagraphState): the state where rendering will continue
first_line_only (bool): typeset only the first line
]
variable[indent_first] assign[=] <ast.IfExp object at 0x7da2041dab30>
variable[line_width] assign[=] call[name[float], parameter[name[container].width]]
variable[line_spacing] assign[=] call[name[self].get_style, parameter[constant[line_spacing], name[container]]]
variable[text_align] assign[=] call[name[self].get_style, parameter[constant[text_align], name[container]]]
variable[tab_stops] assign[=] call[name[self].get_style, parameter[constant[tab_stops], name[container]]]
if <ast.UnaryOp object at 0x7da2041d9900> begin[:]
variable[tab_width] assign[=] binary_operation[constant[2] * call[name[self].get_style, parameter[constant[font_size], name[container]]]]
variable[tab_stops] assign[=] call[name[DefaultTabStops], parameter[name[tab_width]]]
variable[saved_state] assign[=] call[name[copy], parameter[name[state]]]
variable[prev_state] assign[=] call[name[copy], parameter[name[state]]]
variable[max_line_width] assign[=] constant[0]
def function[typeset_line, parameter[line, last_line]]:
constant[Typeset `line` and, if no exception is raised, update the
paragraph's internal rendering state.]
<ast.Nonlocal object at 0x7da2041d9f00>
variable[max_line_width] assign[=] call[name[max], parameter[name[max_line_width], name[line].cursor]]
variable[advance] assign[=] <ast.IfExp object at 0x7da2041d8610>
variable[descender] assign[=] call[name[line].descender, parameter[name[container]]]
name[line].advance assign[=] name[advance]
variable[total_advance] assign[=] binary_operation[binary_operation[name[advance] + <ast.IfExp object at 0x7da2041dacb0>] - name[descender]]
if compare[name[container].remaining_height less[<] name[total_advance]] begin[:]
<ast.Raise object at 0x7da2041d97b0>
assert[call[name[container].advance2, parameter[name[advance]]]]
call[name[line].typeset, parameter[name[container], name[text_align], name[last_line]]]
assert[call[name[container].advance2, parameter[<ast.UnaryOp object at 0x7da2041da950>]]]
name[state].initial assign[=] constant[False]
variable[saved_state] assign[=] call[name[copy], parameter[name[state]]]
return[call[name[Line], parameter[name[tab_stops], name[line_width], name[container]]]]
variable[first_line] assign[=] call[name[Line], parameter[name[tab_stops], name[line_width], name[container], name[indent_first], name[self].significant_whitespace]]
while constant[True] begin[:]
<ast.Try object at 0x7da2041db340>
<ast.Try object at 0x7da2041da530>
variable[prev_state] assign[=] call[name[copy], parameter[name[state]]]
if name[line] begin[:]
call[name[typeset_line], parameter[name[line]]]
if compare[call[name[self]._width, parameter[name[container]]] equal[==] name[FlowableWidth].AUTO] begin[:]
if compare[name[text_align] equal[==] name[TextAlign].CENTER] begin[:]
<ast.AugAssign object at 0x7da2041db1f0>
if compare[name[text_align] equal[==] name[TextAlign].RIGHT] begin[:]
<ast.AugAssign object at 0x7da2041da110>
return[tuple[[<ast.Name object at 0x7da2041dbb50>, <ast.Attribute object at 0x7da2041d9270>, <ast.Name object at 0x7da2041d90c0>]]] | keyword[def] identifier[render] ( identifier[self] , identifier[container] , identifier[descender] , identifier[state] , identifier[space_below] = literal[int] ,
identifier[first_line_only] = keyword[False] ):
literal[string]
identifier[indent_first] =( identifier[float] ( identifier[self] . identifier[get_style] ( literal[string] , identifier[container] ))
keyword[if] identifier[state] . identifier[initial] keyword[else] literal[int] )
identifier[line_width] = identifier[float] ( identifier[container] . identifier[width] )
identifier[line_spacing] = identifier[self] . identifier[get_style] ( literal[string] , identifier[container] )
identifier[text_align] = identifier[self] . identifier[get_style] ( literal[string] , identifier[container] )
identifier[tab_stops] = identifier[self] . identifier[get_style] ( literal[string] , identifier[container] )
keyword[if] keyword[not] identifier[tab_stops] :
identifier[tab_width] = literal[int] * identifier[self] . identifier[get_style] ( literal[string] , identifier[container] )
identifier[tab_stops] = identifier[DefaultTabStops] ( identifier[tab_width] )
identifier[saved_state] = identifier[copy] ( identifier[state] )
identifier[prev_state] = identifier[copy] ( identifier[state] )
identifier[max_line_width] = literal[int]
keyword[def] identifier[typeset_line] ( identifier[line] , identifier[last_line] = keyword[False] ):
literal[string]
keyword[nonlocal] identifier[state] , identifier[saved_state] , identifier[max_line_width] , identifier[descender] , identifier[space_below]
identifier[max_line_width] = identifier[max] ( identifier[max_line_width] , identifier[line] . identifier[cursor] )
identifier[advance] =( identifier[line] . identifier[ascender] ( identifier[container] ) keyword[if] identifier[descender] keyword[is] keyword[None]
keyword[else] identifier[line_spacing] . identifier[advance] ( identifier[line] , identifier[descender] , identifier[container] ))
identifier[descender] = identifier[line] . identifier[descender] ( identifier[container] )
identifier[line] . identifier[advance] = identifier[advance]
identifier[total_advance] = identifier[advance] +( identifier[space_below] keyword[if] identifier[last_line] keyword[else] literal[int] )- identifier[descender]
keyword[if] identifier[container] . identifier[remaining_height] < identifier[total_advance] :
keyword[raise] identifier[EndOfContainer] ( identifier[saved_state] )
keyword[assert] identifier[container] . identifier[advance2] ( identifier[advance] )
identifier[line] . identifier[typeset] ( identifier[container] , identifier[text_align] , identifier[last_line] )
keyword[assert] identifier[container] . identifier[advance2] (- identifier[descender] )
identifier[state] . identifier[initial] = keyword[False]
identifier[saved_state] = identifier[copy] ( identifier[state] )
keyword[return] identifier[Line] ( identifier[tab_stops] , identifier[line_width] , identifier[container] ,
identifier[significant_whitespace] = identifier[self] . identifier[significant_whitespace] )
identifier[first_line] = identifier[line] = identifier[Line] ( identifier[tab_stops] , identifier[line_width] , identifier[container] ,
identifier[indent_first] , identifier[self] . identifier[significant_whitespace] )
keyword[while] keyword[True] :
keyword[try] :
identifier[word] = identifier[state] . identifier[next_word] ()
keyword[except] identifier[StopIteration] :
keyword[break]
keyword[try] :
keyword[if] keyword[not] identifier[line] . identifier[append_word] ( identifier[word] ):
keyword[for] identifier[first] , identifier[second] keyword[in] identifier[word] . identifier[hyphenate] ( identifier[container] ):
keyword[if] identifier[line] . identifier[append_word] ( identifier[first] ):
identifier[state] . identifier[prepend_word] ( identifier[second] )
keyword[break]
keyword[else] :
identifier[state] = identifier[prev_state]
identifier[line] = identifier[typeset_line] ( identifier[line] )
keyword[if] identifier[first_line_only] :
keyword[break]
keyword[continue]
keyword[except] identifier[NewLineException] :
identifier[line] . identifier[append] ( identifier[word] . identifier[glyphs_span] )
identifier[line] = identifier[typeset_line] ( identifier[line] , identifier[last_line] = keyword[True] )
keyword[if] identifier[first_line_only] :
keyword[break]
identifier[prev_state] = identifier[copy] ( identifier[state] )
keyword[if] identifier[line] :
identifier[typeset_line] ( identifier[line] , identifier[last_line] = keyword[True] )
keyword[if] identifier[self] . identifier[_width] ( identifier[container] )== identifier[FlowableWidth] . identifier[AUTO] :
keyword[if] identifier[text_align] == identifier[TextAlign] . identifier[CENTER] :
identifier[container] . identifier[left] -= identifier[float] ( identifier[container] . identifier[width] - identifier[max_line_width] )/ literal[int]
keyword[if] identifier[text_align] == identifier[TextAlign] . identifier[RIGHT] :
identifier[container] . identifier[left] -= identifier[float] ( identifier[container] . identifier[width] - identifier[max_line_width] )
keyword[return] identifier[max_line_width] , identifier[first_line] . identifier[advance] , identifier[descender] | def render(self, container, descender, state, space_below=0, first_line_only=False):
"""Typeset the paragraph
The paragraph is typeset in the given container starting below the
current cursor position of the container. When the end of the container
is reached, the rendering state is preserved to continue setting the
rest of the paragraph when this method is called with a new container.
Args:
container (Container): the container to render to
descender (float or None): descender height of the preceeding line
state (ParagraphState): the state where rendering will continue
first_line_only (bool): typeset only the first line
"""
indent_first = float(self.get_style('indent_first', container)) if state.initial else 0
line_width = float(container.width)
line_spacing = self.get_style('line_spacing', container)
text_align = self.get_style('text_align', container)
tab_stops = self.get_style('tab_stops', container)
if not tab_stops:
tab_width = 2 * self.get_style('font_size', container)
tab_stops = DefaultTabStops(tab_width) # depends on [control=['if'], data=[]]
# `saved_state` is updated after successfully rendering each line, so
# that when `container` overflows on rendering a line, the words in that
# line are yielded again on the next typeset() call.
saved_state = copy(state)
prev_state = copy(state)
max_line_width = 0
def typeset_line(line, last_line=False):
"""Typeset `line` and, if no exception is raised, update the
paragraph's internal rendering state."""
nonlocal state, saved_state, max_line_width, descender, space_below
max_line_width = max(max_line_width, line.cursor)
advance = line.ascender(container) if descender is None else line_spacing.advance(line, descender, container)
descender = line.descender(container) # descender <= 0
line.advance = advance
total_advance = advance + (space_below if last_line else 0) - descender
if container.remaining_height < total_advance:
raise EndOfContainer(saved_state) # depends on [control=['if'], data=[]]
assert container.advance2(advance)
line.typeset(container, text_align, last_line)
assert container.advance2(-descender)
state.initial = False
saved_state = copy(state)
return Line(tab_stops, line_width, container, significant_whitespace=self.significant_whitespace)
first_line = line = Line(tab_stops, line_width, container, indent_first, self.significant_whitespace)
while True:
try:
word = state.next_word() # depends on [control=['try'], data=[]]
except StopIteration:
break # depends on [control=['except'], data=[]]
try:
if not line.append_word(word):
for (first, second) in word.hyphenate(container):
if line.append_word(first):
state.prepend_word(second) # prepend second part
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
else:
state = prev_state
line = typeset_line(line)
if first_line_only:
break # depends on [control=['if'], data=[]]
continue # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except NewLineException:
line.append(word.glyphs_span)
line = typeset_line(line, last_line=True)
if first_line_only:
break # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
prev_state = copy(state) # depends on [control=['while'], data=[]]
if line:
typeset_line(line, last_line=True) # depends on [control=['if'], data=[]]
# Correct the horizontal text placement for auto-width paragraphs
if self._width(container) == FlowableWidth.AUTO:
if text_align == TextAlign.CENTER:
container.left -= float(container.width - max_line_width) / 2 # depends on [control=['if'], data=[]]
if text_align == TextAlign.RIGHT:
container.left -= float(container.width - max_line_width) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return (max_line_width, first_line.advance, descender) |
def cmd(send, msg, _):
"""SHA512 hashes something.
Syntax: {command} <msg>
"""
msg = msg.encode('utf-8')
send(hashlib.sha512(msg).hexdigest()) | def function[cmd, parameter[send, msg, _]]:
constant[SHA512 hashes something.
Syntax: {command} <msg>
]
variable[msg] assign[=] call[name[msg].encode, parameter[constant[utf-8]]]
call[name[send], parameter[call[call[name[hashlib].sha512, parameter[name[msg]]].hexdigest, parameter[]]]] | keyword[def] identifier[cmd] ( identifier[send] , identifier[msg] , identifier[_] ):
literal[string]
identifier[msg] = identifier[msg] . identifier[encode] ( literal[string] )
identifier[send] ( identifier[hashlib] . identifier[sha512] ( identifier[msg] ). identifier[hexdigest] ()) | def cmd(send, msg, _):
"""SHA512 hashes something.
Syntax: {command} <msg>
"""
msg = msg.encode('utf-8')
send(hashlib.sha512(msg).hexdigest()) |
def to_links_df(regressor_type,
regressor_kwargs,
trained_regressor,
tf_matrix_gene_names,
target_gene_name):
"""
:param regressor_type: string. Case insensitive.
:param regressor_kwargs: dict of key-value pairs that configures the regressor.
:param trained_regressor: the trained model from which to extract the feature importances.
:param tf_matrix_gene_names: the list of names corresponding to the columns of the tf_matrix used to train the model.
:param target_gene_name: the name of the target gene.
:return: a Pandas DataFrame['TF', 'target', 'importance'] representing inferred regulatory links and their
connection strength.
"""
def pythonic():
# feature_importances = trained_regressor.feature_importances_
feature_importances = to_feature_importances(regressor_type, regressor_kwargs, trained_regressor)
links_df = pd.DataFrame({'TF': tf_matrix_gene_names, 'importance': feature_importances})
links_df['target'] = target_gene_name
clean_links_df = links_df[links_df.importance > 0].sort_values(by='importance', ascending=False)
return clean_links_df[['TF', 'target', 'importance']]
if is_sklearn_regressor(regressor_type):
return pythonic()
elif is_xgboost_regressor(regressor_type):
raise ValueError('XGB regressor not yet supported')
else:
raise ValueError('Unsupported regressor type: ' + regressor_type) | def function[to_links_df, parameter[regressor_type, regressor_kwargs, trained_regressor, tf_matrix_gene_names, target_gene_name]]:
constant[
:param regressor_type: string. Case insensitive.
:param regressor_kwargs: dict of key-value pairs that configures the regressor.
:param trained_regressor: the trained model from which to extract the feature importances.
:param tf_matrix_gene_names: the list of names corresponding to the columns of the tf_matrix used to train the model.
:param target_gene_name: the name of the target gene.
:return: a Pandas DataFrame['TF', 'target', 'importance'] representing inferred regulatory links and their
connection strength.
]
def function[pythonic, parameter[]]:
variable[feature_importances] assign[=] call[name[to_feature_importances], parameter[name[regressor_type], name[regressor_kwargs], name[trained_regressor]]]
variable[links_df] assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da207f03c10>, <ast.Constant object at 0x7da207f007f0>], [<ast.Name object at 0x7da207f03af0>, <ast.Name object at 0x7da207f02ec0>]]]]
call[name[links_df]][constant[target]] assign[=] name[target_gene_name]
variable[clean_links_df] assign[=] call[call[name[links_df]][compare[name[links_df].importance greater[>] constant[0]]].sort_values, parameter[]]
return[call[name[clean_links_df]][list[[<ast.Constant object at 0x7da207f00070>, <ast.Constant object at 0x7da207f02290>, <ast.Constant object at 0x7da207f03d60>]]]]
if call[name[is_sklearn_regressor], parameter[name[regressor_type]]] begin[:]
return[call[name[pythonic], parameter[]]] | keyword[def] identifier[to_links_df] ( identifier[regressor_type] ,
identifier[regressor_kwargs] ,
identifier[trained_regressor] ,
identifier[tf_matrix_gene_names] ,
identifier[target_gene_name] ):
literal[string]
keyword[def] identifier[pythonic] ():
identifier[feature_importances] = identifier[to_feature_importances] ( identifier[regressor_type] , identifier[regressor_kwargs] , identifier[trained_regressor] )
identifier[links_df] = identifier[pd] . identifier[DataFrame] ({ literal[string] : identifier[tf_matrix_gene_names] , literal[string] : identifier[feature_importances] })
identifier[links_df] [ literal[string] ]= identifier[target_gene_name]
identifier[clean_links_df] = identifier[links_df] [ identifier[links_df] . identifier[importance] > literal[int] ]. identifier[sort_values] ( identifier[by] = literal[string] , identifier[ascending] = keyword[False] )
keyword[return] identifier[clean_links_df] [[ literal[string] , literal[string] , literal[string] ]]
keyword[if] identifier[is_sklearn_regressor] ( identifier[regressor_type] ):
keyword[return] identifier[pythonic] ()
keyword[elif] identifier[is_xgboost_regressor] ( identifier[regressor_type] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[regressor_type] ) | def to_links_df(regressor_type, regressor_kwargs, trained_regressor, tf_matrix_gene_names, target_gene_name):
"""
:param regressor_type: string. Case insensitive.
:param regressor_kwargs: dict of key-value pairs that configures the regressor.
:param trained_regressor: the trained model from which to extract the feature importances.
:param tf_matrix_gene_names: the list of names corresponding to the columns of the tf_matrix used to train the model.
:param target_gene_name: the name of the target gene.
:return: a Pandas DataFrame['TF', 'target', 'importance'] representing inferred regulatory links and their
connection strength.
"""
def pythonic():
# feature_importances = trained_regressor.feature_importances_
feature_importances = to_feature_importances(regressor_type, regressor_kwargs, trained_regressor)
links_df = pd.DataFrame({'TF': tf_matrix_gene_names, 'importance': feature_importances})
links_df['target'] = target_gene_name
clean_links_df = links_df[links_df.importance > 0].sort_values(by='importance', ascending=False)
return clean_links_df[['TF', 'target', 'importance']]
if is_sklearn_regressor(regressor_type):
return pythonic() # depends on [control=['if'], data=[]]
elif is_xgboost_regressor(regressor_type):
raise ValueError('XGB regressor not yet supported') # depends on [control=['if'], data=[]]
else:
raise ValueError('Unsupported regressor type: ' + regressor_type) |
def __save(self, b):
'''
saves the given data to the buffer
:param b:
'''
newbufferidx = (self.__bufferidx + len(b))
self.__buffer[self.__bufferidx:newbufferidx] = b
#update buffer index
self.__bufferidx = newbufferidx | def function[__save, parameter[self, b]]:
constant[
saves the given data to the buffer
:param b:
]
variable[newbufferidx] assign[=] binary_operation[name[self].__bufferidx + call[name[len], parameter[name[b]]]]
call[name[self].__buffer][<ast.Slice object at 0x7da1b1588eb0>] assign[=] name[b]
name[self].__bufferidx assign[=] name[newbufferidx] | keyword[def] identifier[__save] ( identifier[self] , identifier[b] ):
literal[string]
identifier[newbufferidx] =( identifier[self] . identifier[__bufferidx] + identifier[len] ( identifier[b] ))
identifier[self] . identifier[__buffer] [ identifier[self] . identifier[__bufferidx] : identifier[newbufferidx] ]= identifier[b]
identifier[self] . identifier[__bufferidx] = identifier[newbufferidx] | def __save(self, b):
"""
saves the given data to the buffer
:param b:
"""
newbufferidx = self.__bufferidx + len(b)
self.__buffer[self.__bufferidx:newbufferidx] = b
#update buffer index
self.__bufferidx = newbufferidx |
def authenticate_credentials(self, userargs, password, request=None):
"""
Authenticate the userargs and password against Django auth backends.
The "userargs" string may be just the username, or a querystring-encoded set of params.
"""
credentials = {
'password': password
}
if "=" not in userargs:
# if it doesn't seem to be in querystring format, just use it as the username
credentials[get_user_model().USERNAME_FIELD] = userargs
else:
# parse out the user args from querystring format into the credentials dict
for arg in userargs.split("&"):
key, val = arg.split("=")
credentials[key] = val
# authenticate the user via Django's auth backends
user = authenticate(**credentials)
if user is None:
raise exceptions.AuthenticationFailed('Invalid credentials.')
if not user.is_active:
raise exceptions.AuthenticationFailed('User inactive or deleted.')
return (user, None) | def function[authenticate_credentials, parameter[self, userargs, password, request]]:
constant[
Authenticate the userargs and password against Django auth backends.
The "userargs" string may be just the username, or a querystring-encoded set of params.
]
variable[credentials] assign[=] dictionary[[<ast.Constant object at 0x7da1b00fb010>], [<ast.Name object at 0x7da1b00fa5c0>]]
if compare[constant[=] <ast.NotIn object at 0x7da2590d7190> name[userargs]] begin[:]
call[name[credentials]][call[name[get_user_model], parameter[]].USERNAME_FIELD] assign[=] name[userargs]
variable[user] assign[=] call[name[authenticate], parameter[]]
if compare[name[user] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b00faf20>
if <ast.UnaryOp object at 0x7da1b00f9810> begin[:]
<ast.Raise object at 0x7da1b00fbd30>
return[tuple[[<ast.Name object at 0x7da1b00f89d0>, <ast.Constant object at 0x7da1b00f89a0>]]] | keyword[def] identifier[authenticate_credentials] ( identifier[self] , identifier[userargs] , identifier[password] , identifier[request] = keyword[None] ):
literal[string]
identifier[credentials] ={
literal[string] : identifier[password]
}
keyword[if] literal[string] keyword[not] keyword[in] identifier[userargs] :
identifier[credentials] [ identifier[get_user_model] (). identifier[USERNAME_FIELD] ]= identifier[userargs]
keyword[else] :
keyword[for] identifier[arg] keyword[in] identifier[userargs] . identifier[split] ( literal[string] ):
identifier[key] , identifier[val] = identifier[arg] . identifier[split] ( literal[string] )
identifier[credentials] [ identifier[key] ]= identifier[val]
identifier[user] = identifier[authenticate] (** identifier[credentials] )
keyword[if] identifier[user] keyword[is] keyword[None] :
keyword[raise] identifier[exceptions] . identifier[AuthenticationFailed] ( literal[string] )
keyword[if] keyword[not] identifier[user] . identifier[is_active] :
keyword[raise] identifier[exceptions] . identifier[AuthenticationFailed] ( literal[string] )
keyword[return] ( identifier[user] , keyword[None] ) | def authenticate_credentials(self, userargs, password, request=None):
"""
Authenticate the userargs and password against Django auth backends.
The "userargs" string may be just the username, or a querystring-encoded set of params.
"""
credentials = {'password': password}
if '=' not in userargs:
# if it doesn't seem to be in querystring format, just use it as the username
credentials[get_user_model().USERNAME_FIELD] = userargs # depends on [control=['if'], data=['userargs']]
else:
# parse out the user args from querystring format into the credentials dict
for arg in userargs.split('&'):
(key, val) = arg.split('=')
credentials[key] = val # depends on [control=['for'], data=['arg']]
# authenticate the user via Django's auth backends
user = authenticate(**credentials)
if user is None:
raise exceptions.AuthenticationFailed('Invalid credentials.') # depends on [control=['if'], data=[]]
if not user.is_active:
raise exceptions.AuthenticationFailed('User inactive or deleted.') # depends on [control=['if'], data=[]]
return (user, None) |
def computeHeteroPercentage(fileName):
"""Computes the heterozygosity percentage.
:param fileName: the name of the input file.
:type fileName: str
Reads the ``ped`` file created by Plink using the ``recodeA`` options (see
:py:func:`createPedChr23UsingPlink`) and computes the heterozygosity
percentage on the chromosome ``23``.
"""
outputFile = None
try:
outputFile = open(fileName + ".hetero", "w")
except IOError:
msg = "%s: can't write file" % fileName + ".hetero"
raise ProgramError(msg)
print >>outputFile, "\t".join(["PED", "ID", "SEX", "HETERO"])
try:
toPrint = []
with open(fileName, "r") as inputFile:
for i, line in enumerate(inputFile):
row = line.rstrip("\r\n").split(" ")
if i != 0:
# This is data
genotypes = np.array(row[6:])
genotypes = genotypes[np.where(genotypes != "NA")]
nbMarker = len(genotypes)
nbHetero = len(np.where(genotypes == "1")[0])
percentHetero = -9999
if nbMarker != 0:
percentHetero = nbHetero / float(nbMarker)
toPrint.append((row[0], row[1], row[4], percentHetero))
# Sorting the data
toPrint.sort(reverse=True, key=lambda values: values[3])
# Printing the data
for row in toPrint:
value = list(row)
if value[3] == -9999:
value[3] = "ALL_NA"
else:
value[3] = str(value[3])
print >>outputFile, "\t".join(value)
except IOError:
msg = "%(fileName)s: no such file" % locals()
raise ProgramError(msg)
# Closing the output file
outputFile.close() | def function[computeHeteroPercentage, parameter[fileName]]:
constant[Computes the heterozygosity percentage.
:param fileName: the name of the input file.
:type fileName: str
Reads the ``ped`` file created by Plink using the ``recodeA`` options (see
:py:func:`createPedChr23UsingPlink`) and computes the heterozygosity
percentage on the chromosome ``23``.
]
variable[outputFile] assign[=] constant[None]
<ast.Try object at 0x7da1b09705e0>
tuple[[<ast.BinOp object at 0x7da1b0a1c3a0>, <ast.Call object at 0x7da1b0a1c700>]]
<ast.Try object at 0x7da1b0a1d420>
call[name[outputFile].close, parameter[]] | keyword[def] identifier[computeHeteroPercentage] ( identifier[fileName] ):
literal[string]
identifier[outputFile] = keyword[None]
keyword[try] :
identifier[outputFile] = identifier[open] ( identifier[fileName] + literal[string] , literal[string] )
keyword[except] identifier[IOError] :
identifier[msg] = literal[string] % identifier[fileName] + literal[string]
keyword[raise] identifier[ProgramError] ( identifier[msg] )
identifier[print] >> identifier[outputFile] , literal[string] . identifier[join] ([ literal[string] , literal[string] , literal[string] , literal[string] ])
keyword[try] :
identifier[toPrint] =[]
keyword[with] identifier[open] ( identifier[fileName] , literal[string] ) keyword[as] identifier[inputFile] :
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[inputFile] ):
identifier[row] = identifier[line] . identifier[rstrip] ( literal[string] ). identifier[split] ( literal[string] )
keyword[if] identifier[i] != literal[int] :
identifier[genotypes] = identifier[np] . identifier[array] ( identifier[row] [ literal[int] :])
identifier[genotypes] = identifier[genotypes] [ identifier[np] . identifier[where] ( identifier[genotypes] != literal[string] )]
identifier[nbMarker] = identifier[len] ( identifier[genotypes] )
identifier[nbHetero] = identifier[len] ( identifier[np] . identifier[where] ( identifier[genotypes] == literal[string] )[ literal[int] ])
identifier[percentHetero] =- literal[int]
keyword[if] identifier[nbMarker] != literal[int] :
identifier[percentHetero] = identifier[nbHetero] / identifier[float] ( identifier[nbMarker] )
identifier[toPrint] . identifier[append] (( identifier[row] [ literal[int] ], identifier[row] [ literal[int] ], identifier[row] [ literal[int] ], identifier[percentHetero] ))
identifier[toPrint] . identifier[sort] ( identifier[reverse] = keyword[True] , identifier[key] = keyword[lambda] identifier[values] : identifier[values] [ literal[int] ])
keyword[for] identifier[row] keyword[in] identifier[toPrint] :
identifier[value] = identifier[list] ( identifier[row] )
keyword[if] identifier[value] [ literal[int] ]==- literal[int] :
identifier[value] [ literal[int] ]= literal[string]
keyword[else] :
identifier[value] [ literal[int] ]= identifier[str] ( identifier[value] [ literal[int] ])
identifier[print] >> identifier[outputFile] , literal[string] . identifier[join] ( identifier[value] )
keyword[except] identifier[IOError] :
identifier[msg] = literal[string] % identifier[locals] ()
keyword[raise] identifier[ProgramError] ( identifier[msg] )
identifier[outputFile] . identifier[close] () | def computeHeteroPercentage(fileName):
"""Computes the heterozygosity percentage.
:param fileName: the name of the input file.
:type fileName: str
Reads the ``ped`` file created by Plink using the ``recodeA`` options (see
:py:func:`createPedChr23UsingPlink`) and computes the heterozygosity
percentage on the chromosome ``23``.
"""
outputFile = None
try:
outputFile = open(fileName + '.hetero', 'w') # depends on [control=['try'], data=[]]
except IOError:
msg = "%s: can't write file" % fileName + '.hetero'
raise ProgramError(msg) # depends on [control=['except'], data=[]]
(print >> outputFile, '\t'.join(['PED', 'ID', 'SEX', 'HETERO']))
try:
toPrint = []
with open(fileName, 'r') as inputFile:
for (i, line) in enumerate(inputFile):
row = line.rstrip('\r\n').split(' ')
if i != 0:
# This is data
genotypes = np.array(row[6:])
genotypes = genotypes[np.where(genotypes != 'NA')]
nbMarker = len(genotypes)
nbHetero = len(np.where(genotypes == '1')[0])
percentHetero = -9999
if nbMarker != 0:
percentHetero = nbHetero / float(nbMarker) # depends on [control=['if'], data=['nbMarker']]
toPrint.append((row[0], row[1], row[4], percentHetero)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['inputFile']]
# Sorting the data
toPrint.sort(reverse=True, key=lambda values: values[3])
# Printing the data
for row in toPrint:
value = list(row)
if value[3] == -9999:
value[3] = 'ALL_NA' # depends on [control=['if'], data=[]]
else:
value[3] = str(value[3])
(print >> outputFile, '\t'.join(value)) # depends on [control=['for'], data=['row']] # depends on [control=['try'], data=[]]
except IOError:
msg = '%(fileName)s: no such file' % locals()
raise ProgramError(msg) # depends on [control=['except'], data=[]]
# Closing the output file
outputFile.close() |
def convert_table(self, table_name, cls_att=None):
'''
Returns the specified table as an orange example table.
:param table_name: table name to convert
:cls_att: class attribute name
:rtype: orange.ExampleTable
'''
import Orange
cols = self.db.cols[table_name]
attributes, metas, class_var = [], [], None
for col in cols:
att_type = self.orng_type(table_name, col)
if att_type == 'd':
att_vals = self.db.col_vals[table_name][col]
att_var = Orange.data.DiscreteVariable(str(col), values=[str(val) for val in att_vals])
elif att_type == 'c':
att_var = Orange.data.ContinuousVariable(str(col))
else:
att_var = Orange.data.StringVariable(str(col))
if col == cls_att:
if att_type == 'string':
raise Exception('Unsuitable data type for a target variable: %s' % att_type)
class_var = att_var
continue
elif att_type == 'string' or table_name in self.db.pkeys and col in self.db.pkeys[
table_name] or table_name in self.db.fkeys and col in self.db.fkeys[table_name]:
metas.append(att_var)
else:
attributes.append(att_var)
domain = Orange.data.Domain(attributes, class_vars=class_var, metas=metas)
# for meta in metas:
# domain.addmeta(Orange.newmetaid(), meta)
dataset = Orange.data.Table(domain)
dataset.name = table_name
for row in self.db.rows(table_name, cols):
example = Orange.data.Instance(domain)
for col, val in zip(cols, row):
example[str(col)] = str(val) if val != None else '?'
dataset.append(example)
return dataset | def function[convert_table, parameter[self, table_name, cls_att]]:
constant[
Returns the specified table as an orange example table.
:param table_name: table name to convert
:cls_att: class attribute name
:rtype: orange.ExampleTable
]
import module[Orange]
variable[cols] assign[=] call[name[self].db.cols][name[table_name]]
<ast.Tuple object at 0x7da20e9b0e20> assign[=] tuple[[<ast.List object at 0x7da20e9b1d20>, <ast.List object at 0x7da20e9b2860>, <ast.Constant object at 0x7da20e9b0160>]]
for taget[name[col]] in starred[name[cols]] begin[:]
variable[att_type] assign[=] call[name[self].orng_type, parameter[name[table_name], name[col]]]
if compare[name[att_type] equal[==] constant[d]] begin[:]
variable[att_vals] assign[=] call[call[name[self].db.col_vals][name[table_name]]][name[col]]
variable[att_var] assign[=] call[name[Orange].data.DiscreteVariable, parameter[call[name[str], parameter[name[col]]]]]
if compare[name[col] equal[==] name[cls_att]] begin[:]
if compare[name[att_type] equal[==] constant[string]] begin[:]
<ast.Raise object at 0x7da20e9b3610>
variable[class_var] assign[=] name[att_var]
continue
variable[domain] assign[=] call[name[Orange].data.Domain, parameter[name[attributes]]]
variable[dataset] assign[=] call[name[Orange].data.Table, parameter[name[domain]]]
name[dataset].name assign[=] name[table_name]
for taget[name[row]] in starred[call[name[self].db.rows, parameter[name[table_name], name[cols]]]] begin[:]
variable[example] assign[=] call[name[Orange].data.Instance, parameter[name[domain]]]
for taget[tuple[[<ast.Name object at 0x7da20e9b0520>, <ast.Name object at 0x7da20e9b39d0>]]] in starred[call[name[zip], parameter[name[cols], name[row]]]] begin[:]
call[name[example]][call[name[str], parameter[name[col]]]] assign[=] <ast.IfExp object at 0x7da20e9b0f70>
call[name[dataset].append, parameter[name[example]]]
return[name[dataset]] | keyword[def] identifier[convert_table] ( identifier[self] , identifier[table_name] , identifier[cls_att] = keyword[None] ):
literal[string]
keyword[import] identifier[Orange]
identifier[cols] = identifier[self] . identifier[db] . identifier[cols] [ identifier[table_name] ]
identifier[attributes] , identifier[metas] , identifier[class_var] =[],[], keyword[None]
keyword[for] identifier[col] keyword[in] identifier[cols] :
identifier[att_type] = identifier[self] . identifier[orng_type] ( identifier[table_name] , identifier[col] )
keyword[if] identifier[att_type] == literal[string] :
identifier[att_vals] = identifier[self] . identifier[db] . identifier[col_vals] [ identifier[table_name] ][ identifier[col] ]
identifier[att_var] = identifier[Orange] . identifier[data] . identifier[DiscreteVariable] ( identifier[str] ( identifier[col] ), identifier[values] =[ identifier[str] ( identifier[val] ) keyword[for] identifier[val] keyword[in] identifier[att_vals] ])
keyword[elif] identifier[att_type] == literal[string] :
identifier[att_var] = identifier[Orange] . identifier[data] . identifier[ContinuousVariable] ( identifier[str] ( identifier[col] ))
keyword[else] :
identifier[att_var] = identifier[Orange] . identifier[data] . identifier[StringVariable] ( identifier[str] ( identifier[col] ))
keyword[if] identifier[col] == identifier[cls_att] :
keyword[if] identifier[att_type] == literal[string] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[att_type] )
identifier[class_var] = identifier[att_var]
keyword[continue]
keyword[elif] identifier[att_type] == literal[string] keyword[or] identifier[table_name] keyword[in] identifier[self] . identifier[db] . identifier[pkeys] keyword[and] identifier[col] keyword[in] identifier[self] . identifier[db] . identifier[pkeys] [
identifier[table_name] ] keyword[or] identifier[table_name] keyword[in] identifier[self] . identifier[db] . identifier[fkeys] keyword[and] identifier[col] keyword[in] identifier[self] . identifier[db] . identifier[fkeys] [ identifier[table_name] ]:
identifier[metas] . identifier[append] ( identifier[att_var] )
keyword[else] :
identifier[attributes] . identifier[append] ( identifier[att_var] )
identifier[domain] = identifier[Orange] . identifier[data] . identifier[Domain] ( identifier[attributes] , identifier[class_vars] = identifier[class_var] , identifier[metas] = identifier[metas] )
identifier[dataset] = identifier[Orange] . identifier[data] . identifier[Table] ( identifier[domain] )
identifier[dataset] . identifier[name] = identifier[table_name]
keyword[for] identifier[row] keyword[in] identifier[self] . identifier[db] . identifier[rows] ( identifier[table_name] , identifier[cols] ):
identifier[example] = identifier[Orange] . identifier[data] . identifier[Instance] ( identifier[domain] )
keyword[for] identifier[col] , identifier[val] keyword[in] identifier[zip] ( identifier[cols] , identifier[row] ):
identifier[example] [ identifier[str] ( identifier[col] )]= identifier[str] ( identifier[val] ) keyword[if] identifier[val] != keyword[None] keyword[else] literal[string]
identifier[dataset] . identifier[append] ( identifier[example] )
keyword[return] identifier[dataset] | def convert_table(self, table_name, cls_att=None):
"""
Returns the specified table as an orange example table.
:param table_name: table name to convert
:cls_att: class attribute name
:rtype: orange.ExampleTable
"""
import Orange
cols = self.db.cols[table_name]
(attributes, metas, class_var) = ([], [], None)
for col in cols:
att_type = self.orng_type(table_name, col)
if att_type == 'd':
att_vals = self.db.col_vals[table_name][col]
att_var = Orange.data.DiscreteVariable(str(col), values=[str(val) for val in att_vals]) # depends on [control=['if'], data=[]]
elif att_type == 'c':
att_var = Orange.data.ContinuousVariable(str(col)) # depends on [control=['if'], data=[]]
else:
att_var = Orange.data.StringVariable(str(col))
if col == cls_att:
if att_type == 'string':
raise Exception('Unsuitable data type for a target variable: %s' % att_type) # depends on [control=['if'], data=['att_type']]
class_var = att_var
continue # depends on [control=['if'], data=[]]
elif att_type == 'string' or (table_name in self.db.pkeys and col in self.db.pkeys[table_name]) or (table_name in self.db.fkeys and col in self.db.fkeys[table_name]):
metas.append(att_var) # depends on [control=['if'], data=[]]
else:
attributes.append(att_var) # depends on [control=['for'], data=['col']]
domain = Orange.data.Domain(attributes, class_vars=class_var, metas=metas)
# for meta in metas:
# domain.addmeta(Orange.newmetaid(), meta)
dataset = Orange.data.Table(domain)
dataset.name = table_name
for row in self.db.rows(table_name, cols):
example = Orange.data.Instance(domain)
for (col, val) in zip(cols, row):
example[str(col)] = str(val) if val != None else '?' # depends on [control=['for'], data=[]]
dataset.append(example) # depends on [control=['for'], data=['row']]
return dataset |
def enable(self, http_proxy=None):
"""
Enable URL Filtering on the engine. If proxy servers
are needed, provide a list of HTTPProxy elements.
:param http_proxy: list of proxies for GTI connections
:type http_proxy: list(str,HttpProxy)
"""
self.update(ts_enabled=True, http_proxy=get_proxy(http_proxy)) | def function[enable, parameter[self, http_proxy]]:
constant[
Enable URL Filtering on the engine. If proxy servers
are needed, provide a list of HTTPProxy elements.
:param http_proxy: list of proxies for GTI connections
:type http_proxy: list(str,HttpProxy)
]
call[name[self].update, parameter[]] | keyword[def] identifier[enable] ( identifier[self] , identifier[http_proxy] = keyword[None] ):
literal[string]
identifier[self] . identifier[update] ( identifier[ts_enabled] = keyword[True] , identifier[http_proxy] = identifier[get_proxy] ( identifier[http_proxy] )) | def enable(self, http_proxy=None):
"""
Enable URL Filtering on the engine. If proxy servers
are needed, provide a list of HTTPProxy elements.
:param http_proxy: list of proxies for GTI connections
:type http_proxy: list(str,HttpProxy)
"""
self.update(ts_enabled=True, http_proxy=get_proxy(http_proxy)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.