code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def serialize(self, data, response=None, request=None, format=None):
"""Serializes the data using a determined serializer.
@param[in] data
The data to be serialized.
@param[in] response
The response object to serialize the data to.
If this method is invoked as an instance method, the response
object can be omitted and it will be taken from the instance.
@param[in] request
The request object to pull information from; normally used to
determine the serialization format (when `format` is not provided).
May be used by some serializers as well to pull additional headers.
If this method is invoked as an instance method, the request
object can be omitted and it will be taken from the instance.
@param[in] format
A specific format to serialize in; if provided, no detection is
done. If not provided, the accept header (as well as the URL
extension) is looked at to determine an appropriate serializer.
@returns
A tuple of the serialized text and an instance of the
serializer used.
"""
if isinstance(self, Resource):
if not request:
# Ensure we have a response object.
request = self._request
Serializer = None
if format:
# An explicit format was given; do not attempt to auto-detect
# a serializer.
Serializer = self.meta.serializers[format]
if not Serializer:
# Determine an appropriate serializer to use by
# introspecting the request object and looking at the `Accept`
# header.
media_ranges = (request.get('Accept') or '*/*').strip()
if not media_ranges:
# Default the media ranges to */*
media_ranges = '*/*'
if media_ranges != '*/*':
# Parse the media ranges and determine the serializer
# that is the closest match.
media_types = six.iterkeys(self._serializer_map)
media_type = mimeparse.best_match(media_types, media_ranges)
if media_type:
format = self._serializer_map[media_type]
Serializer = self.meta.serializers[format]
else:
# Client indicated no preference; use the default.
default = self.meta.default_serializer
Serializer = self.meta.serializers[default]
if Serializer:
try:
# Attempt to serialize the data using the determined
# serializer.
serializer = Serializer(request, response)
return serializer.serialize(data), serializer
except ValueError:
# Failed to serialize the data.
pass
# Either failed to determine a serializer or failed to serialize
# the data; construct a list of available and valid encoders.
available = {}
for name in self.meta.allowed_serializers:
Serializer = self.meta.serializers[name]
instance = Serializer(request, None)
if instance.can_serialize(data):
available[name] = Serializer.media_types[0]
# Raise a Not Acceptable exception.
raise http.exceptions.NotAcceptable(available) | def function[serialize, parameter[self, data, response, request, format]]:
constant[Serializes the data using a determined serializer.
@param[in] data
The data to be serialized.
@param[in] response
The response object to serialize the data to.
If this method is invoked as an instance method, the response
object can be omitted and it will be taken from the instance.
@param[in] request
The request object to pull information from; normally used to
determine the serialization format (when `format` is not provided).
May be used by some serializers as well to pull additional headers.
If this method is invoked as an instance method, the request
object can be omitted and it will be taken from the instance.
@param[in] format
A specific format to serialize in; if provided, no detection is
done. If not provided, the accept header (as well as the URL
extension) is looked at to determine an appropriate serializer.
@returns
A tuple of the serialized text and an instance of the
serializer used.
]
if call[name[isinstance], parameter[name[self], name[Resource]]] begin[:]
if <ast.UnaryOp object at 0x7da1afef8a30> begin[:]
variable[request] assign[=] name[self]._request
variable[Serializer] assign[=] constant[None]
if name[format] begin[:]
variable[Serializer] assign[=] call[name[self].meta.serializers][name[format]]
if <ast.UnaryOp object at 0x7da1afefa8f0> begin[:]
variable[media_ranges] assign[=] call[<ast.BoolOp object at 0x7da1afef8dc0>.strip, parameter[]]
if <ast.UnaryOp object at 0x7da1afef9840> begin[:]
variable[media_ranges] assign[=] constant[*/*]
if compare[name[media_ranges] not_equal[!=] constant[*/*]] begin[:]
variable[media_types] assign[=] call[name[six].iterkeys, parameter[name[self]._serializer_map]]
variable[media_type] assign[=] call[name[mimeparse].best_match, parameter[name[media_types], name[media_ranges]]]
if name[media_type] begin[:]
variable[format] assign[=] call[name[self]._serializer_map][name[media_type]]
variable[Serializer] assign[=] call[name[self].meta.serializers][name[format]]
if name[Serializer] begin[:]
<ast.Try object at 0x7da1afef8c40>
variable[available] assign[=] dictionary[[], []]
for taget[name[name]] in starred[name[self].meta.allowed_serializers] begin[:]
variable[Serializer] assign[=] call[name[self].meta.serializers][name[name]]
variable[instance] assign[=] call[name[Serializer], parameter[name[request], constant[None]]]
if call[name[instance].can_serialize, parameter[name[data]]] begin[:]
call[name[available]][name[name]] assign[=] call[name[Serializer].media_types][constant[0]]
<ast.Raise object at 0x7da1afef9780> | keyword[def] identifier[serialize] ( identifier[self] , identifier[data] , identifier[response] = keyword[None] , identifier[request] = keyword[None] , identifier[format] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] , identifier[Resource] ):
keyword[if] keyword[not] identifier[request] :
identifier[request] = identifier[self] . identifier[_request]
identifier[Serializer] = keyword[None]
keyword[if] identifier[format] :
identifier[Serializer] = identifier[self] . identifier[meta] . identifier[serializers] [ identifier[format] ]
keyword[if] keyword[not] identifier[Serializer] :
identifier[media_ranges] =( identifier[request] . identifier[get] ( literal[string] ) keyword[or] literal[string] ). identifier[strip] ()
keyword[if] keyword[not] identifier[media_ranges] :
identifier[media_ranges] = literal[string]
keyword[if] identifier[media_ranges] != literal[string] :
identifier[media_types] = identifier[six] . identifier[iterkeys] ( identifier[self] . identifier[_serializer_map] )
identifier[media_type] = identifier[mimeparse] . identifier[best_match] ( identifier[media_types] , identifier[media_ranges] )
keyword[if] identifier[media_type] :
identifier[format] = identifier[self] . identifier[_serializer_map] [ identifier[media_type] ]
identifier[Serializer] = identifier[self] . identifier[meta] . identifier[serializers] [ identifier[format] ]
keyword[else] :
identifier[default] = identifier[self] . identifier[meta] . identifier[default_serializer]
identifier[Serializer] = identifier[self] . identifier[meta] . identifier[serializers] [ identifier[default] ]
keyword[if] identifier[Serializer] :
keyword[try] :
identifier[serializer] = identifier[Serializer] ( identifier[request] , identifier[response] )
keyword[return] identifier[serializer] . identifier[serialize] ( identifier[data] ), identifier[serializer]
keyword[except] identifier[ValueError] :
keyword[pass]
identifier[available] ={}
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[meta] . identifier[allowed_serializers] :
identifier[Serializer] = identifier[self] . identifier[meta] . identifier[serializers] [ identifier[name] ]
identifier[instance] = identifier[Serializer] ( identifier[request] , keyword[None] )
keyword[if] identifier[instance] . identifier[can_serialize] ( identifier[data] ):
identifier[available] [ identifier[name] ]= identifier[Serializer] . identifier[media_types] [ literal[int] ]
keyword[raise] identifier[http] . identifier[exceptions] . identifier[NotAcceptable] ( identifier[available] ) | def serialize(self, data, response=None, request=None, format=None):
"""Serializes the data using a determined serializer.
@param[in] data
The data to be serialized.
@param[in] response
The response object to serialize the data to.
If this method is invoked as an instance method, the response
object can be omitted and it will be taken from the instance.
@param[in] request
The request object to pull information from; normally used to
determine the serialization format (when `format` is not provided).
May be used by some serializers as well to pull additional headers.
If this method is invoked as an instance method, the request
object can be omitted and it will be taken from the instance.
@param[in] format
A specific format to serialize in; if provided, no detection is
done. If not provided, the accept header (as well as the URL
extension) is looked at to determine an appropriate serializer.
@returns
A tuple of the serialized text and an instance of the
serializer used.
"""
if isinstance(self, Resource):
if not request:
# Ensure we have a response object.
request = self._request # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
Serializer = None
if format:
# An explicit format was given; do not attempt to auto-detect
# a serializer.
Serializer = self.meta.serializers[format] # depends on [control=['if'], data=[]]
if not Serializer:
# Determine an appropriate serializer to use by
# introspecting the request object and looking at the `Accept`
# header.
media_ranges = (request.get('Accept') or '*/*').strip()
if not media_ranges:
# Default the media ranges to */*
media_ranges = '*/*' # depends on [control=['if'], data=[]]
if media_ranges != '*/*':
# Parse the media ranges and determine the serializer
# that is the closest match.
media_types = six.iterkeys(self._serializer_map)
media_type = mimeparse.best_match(media_types, media_ranges)
if media_type:
format = self._serializer_map[media_type]
Serializer = self.meta.serializers[format] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['media_ranges']]
else:
# Client indicated no preference; use the default.
default = self.meta.default_serializer
Serializer = self.meta.serializers[default] # depends on [control=['if'], data=[]]
if Serializer:
try:
# Attempt to serialize the data using the determined
# serializer.
serializer = Serializer(request, response)
return (serializer.serialize(data), serializer) # depends on [control=['try'], data=[]]
except ValueError:
# Failed to serialize the data.
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Either failed to determine a serializer or failed to serialize
# the data; construct a list of available and valid encoders.
available = {}
for name in self.meta.allowed_serializers:
Serializer = self.meta.serializers[name]
instance = Serializer(request, None)
if instance.can_serialize(data):
available[name] = Serializer.media_types[0] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
# Raise a Not Acceptable exception.
raise http.exceptions.NotAcceptable(available) |
def update(self, **kwargs):
"""Update all resources in this collection."""
self.inflate()
for model in self._models:
model.update(**kwargs)
return self | def function[update, parameter[self]]:
constant[Update all resources in this collection.]
call[name[self].inflate, parameter[]]
for taget[name[model]] in starred[name[self]._models] begin[:]
call[name[model].update, parameter[]]
return[name[self]] | keyword[def] identifier[update] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[inflate] ()
keyword[for] identifier[model] keyword[in] identifier[self] . identifier[_models] :
identifier[model] . identifier[update] (** identifier[kwargs] )
keyword[return] identifier[self] | def update(self, **kwargs):
"""Update all resources in this collection."""
self.inflate()
for model in self._models:
model.update(**kwargs) # depends on [control=['for'], data=['model']]
return self |
def add(self, doc):
"""Add a doc's annotations to the binder for serialization."""
array = doc.to_array(self.attrs)
if len(array.shape) == 1:
array = array.reshape((array.shape[0], 1))
self.tokens.append(array)
spaces = doc.to_array(SPACY)
assert array.shape[0] == spaces.shape[0]
spaces = spaces.reshape((spaces.shape[0], 1))
self.spaces.append(numpy.asarray(spaces, dtype=bool))
self.strings.update(w.text for w in doc) | def function[add, parameter[self, doc]]:
constant[Add a doc's annotations to the binder for serialization.]
variable[array] assign[=] call[name[doc].to_array, parameter[name[self].attrs]]
if compare[call[name[len], parameter[name[array].shape]] equal[==] constant[1]] begin[:]
variable[array] assign[=] call[name[array].reshape, parameter[tuple[[<ast.Subscript object at 0x7da1b1e9a7d0>, <ast.Constant object at 0x7da1b1e98250>]]]]
call[name[self].tokens.append, parameter[name[array]]]
variable[spaces] assign[=] call[name[doc].to_array, parameter[name[SPACY]]]
assert[compare[call[name[array].shape][constant[0]] equal[==] call[name[spaces].shape][constant[0]]]]
variable[spaces] assign[=] call[name[spaces].reshape, parameter[tuple[[<ast.Subscript object at 0x7da1b1e9bfa0>, <ast.Constant object at 0x7da1b1e981c0>]]]]
call[name[self].spaces.append, parameter[call[name[numpy].asarray, parameter[name[spaces]]]]]
call[name[self].strings.update, parameter[<ast.GeneratorExp object at 0x7da1b1e9a950>]] | keyword[def] identifier[add] ( identifier[self] , identifier[doc] ):
literal[string]
identifier[array] = identifier[doc] . identifier[to_array] ( identifier[self] . identifier[attrs] )
keyword[if] identifier[len] ( identifier[array] . identifier[shape] )== literal[int] :
identifier[array] = identifier[array] . identifier[reshape] (( identifier[array] . identifier[shape] [ literal[int] ], literal[int] ))
identifier[self] . identifier[tokens] . identifier[append] ( identifier[array] )
identifier[spaces] = identifier[doc] . identifier[to_array] ( identifier[SPACY] )
keyword[assert] identifier[array] . identifier[shape] [ literal[int] ]== identifier[spaces] . identifier[shape] [ literal[int] ]
identifier[spaces] = identifier[spaces] . identifier[reshape] (( identifier[spaces] . identifier[shape] [ literal[int] ], literal[int] ))
identifier[self] . identifier[spaces] . identifier[append] ( identifier[numpy] . identifier[asarray] ( identifier[spaces] , identifier[dtype] = identifier[bool] ))
identifier[self] . identifier[strings] . identifier[update] ( identifier[w] . identifier[text] keyword[for] identifier[w] keyword[in] identifier[doc] ) | def add(self, doc):
"""Add a doc's annotations to the binder for serialization."""
array = doc.to_array(self.attrs)
if len(array.shape) == 1:
array = array.reshape((array.shape[0], 1)) # depends on [control=['if'], data=[]]
self.tokens.append(array)
spaces = doc.to_array(SPACY)
assert array.shape[0] == spaces.shape[0]
spaces = spaces.reshape((spaces.shape[0], 1))
self.spaces.append(numpy.asarray(spaces, dtype=bool))
self.strings.update((w.text for w in doc)) |
def detect_Massimini2004(dat_orig, s_freq, time, opts):
"""Slow wave detection based on Massimini et al., 2004.
Parameters
----------
dat_orig : ndarray (dtype='float')
vector with the data for one channel
s_freq : float
sampling frequency
time : ndarray (dtype='float')
vector with the time points for each sample
opts : instance of 'DetectSlowWave'
'det_filt' : dict
parameters for 'butter',
'duration' : tuple of float
min and max duration of SW
'min_ptp' : float
min peak-to-peak amplitude
'trough_duration' : tuple of float
min and max duration of first half-wave (trough)
Returns
-------
list of dict
list of detected SWs
float
SW density, per 30-s epoch
References
----------
Massimini, M. et al. J Neurosci 24(31) 6862-70 (2004).
"""
if opts.invert:
dat_orig = -dat_orig
dat_det = transform_signal(dat_orig, s_freq, 'double_butter',
opts.det_filt)
above_zero = detect_events(dat_det, 'above_thresh', value=0.)
sw_in_chan = []
if above_zero is not None:
troughs = within_duration(above_zero, time, opts.trough_duration)
#lg.info('troughs within duration: ' + str(troughs.shape))
if troughs is not None:
troughs = select_peaks(dat_det, troughs, opts.max_trough_amp)
#lg.info('troughs deep enough: ' + str(troughs.shape))
if troughs is not None:
events = _add_halfwave(dat_det, troughs, s_freq, opts)
#lg.info('SWs high enough: ' + str(events.shape))
if len(events):
events = within_duration(events, time, opts.duration)
events = remove_straddlers(events, time, s_freq)
#lg.info('SWs within duration: ' + str(events.shape))
sw_in_chan = make_slow_waves(events, dat_det, time, s_freq)
if len(sw_in_chan) == 0:
lg.info('No slow wave found')
return sw_in_chan | def function[detect_Massimini2004, parameter[dat_orig, s_freq, time, opts]]:
constant[Slow wave detection based on Massimini et al., 2004.
Parameters
----------
dat_orig : ndarray (dtype='float')
vector with the data for one channel
s_freq : float
sampling frequency
time : ndarray (dtype='float')
vector with the time points for each sample
opts : instance of 'DetectSlowWave'
'det_filt' : dict
parameters for 'butter',
'duration' : tuple of float
min and max duration of SW
'min_ptp' : float
min peak-to-peak amplitude
'trough_duration' : tuple of float
min and max duration of first half-wave (trough)
Returns
-------
list of dict
list of detected SWs
float
SW density, per 30-s epoch
References
----------
Massimini, M. et al. J Neurosci 24(31) 6862-70 (2004).
]
if name[opts].invert begin[:]
variable[dat_orig] assign[=] <ast.UnaryOp object at 0x7da1b0e8d6f0>
variable[dat_det] assign[=] call[name[transform_signal], parameter[name[dat_orig], name[s_freq], constant[double_butter], name[opts].det_filt]]
variable[above_zero] assign[=] call[name[detect_events], parameter[name[dat_det], constant[above_thresh]]]
variable[sw_in_chan] assign[=] list[[]]
if compare[name[above_zero] is_not constant[None]] begin[:]
variable[troughs] assign[=] call[name[within_duration], parameter[name[above_zero], name[time], name[opts].trough_duration]]
if compare[name[troughs] is_not constant[None]] begin[:]
variable[troughs] assign[=] call[name[select_peaks], parameter[name[dat_det], name[troughs], name[opts].max_trough_amp]]
if compare[name[troughs] is_not constant[None]] begin[:]
variable[events] assign[=] call[name[_add_halfwave], parameter[name[dat_det], name[troughs], name[s_freq], name[opts]]]
if call[name[len], parameter[name[events]]] begin[:]
variable[events] assign[=] call[name[within_duration], parameter[name[events], name[time], name[opts].duration]]
variable[events] assign[=] call[name[remove_straddlers], parameter[name[events], name[time], name[s_freq]]]
variable[sw_in_chan] assign[=] call[name[make_slow_waves], parameter[name[events], name[dat_det], name[time], name[s_freq]]]
if compare[call[name[len], parameter[name[sw_in_chan]]] equal[==] constant[0]] begin[:]
call[name[lg].info, parameter[constant[No slow wave found]]]
return[name[sw_in_chan]] | keyword[def] identifier[detect_Massimini2004] ( identifier[dat_orig] , identifier[s_freq] , identifier[time] , identifier[opts] ):
literal[string]
keyword[if] identifier[opts] . identifier[invert] :
identifier[dat_orig] =- identifier[dat_orig]
identifier[dat_det] = identifier[transform_signal] ( identifier[dat_orig] , identifier[s_freq] , literal[string] ,
identifier[opts] . identifier[det_filt] )
identifier[above_zero] = identifier[detect_events] ( identifier[dat_det] , literal[string] , identifier[value] = literal[int] )
identifier[sw_in_chan] =[]
keyword[if] identifier[above_zero] keyword[is] keyword[not] keyword[None] :
identifier[troughs] = identifier[within_duration] ( identifier[above_zero] , identifier[time] , identifier[opts] . identifier[trough_duration] )
keyword[if] identifier[troughs] keyword[is] keyword[not] keyword[None] :
identifier[troughs] = identifier[select_peaks] ( identifier[dat_det] , identifier[troughs] , identifier[opts] . identifier[max_trough_amp] )
keyword[if] identifier[troughs] keyword[is] keyword[not] keyword[None] :
identifier[events] = identifier[_add_halfwave] ( identifier[dat_det] , identifier[troughs] , identifier[s_freq] , identifier[opts] )
keyword[if] identifier[len] ( identifier[events] ):
identifier[events] = identifier[within_duration] ( identifier[events] , identifier[time] , identifier[opts] . identifier[duration] )
identifier[events] = identifier[remove_straddlers] ( identifier[events] , identifier[time] , identifier[s_freq] )
identifier[sw_in_chan] = identifier[make_slow_waves] ( identifier[events] , identifier[dat_det] , identifier[time] , identifier[s_freq] )
keyword[if] identifier[len] ( identifier[sw_in_chan] )== literal[int] :
identifier[lg] . identifier[info] ( literal[string] )
keyword[return] identifier[sw_in_chan] | def detect_Massimini2004(dat_orig, s_freq, time, opts):
"""Slow wave detection based on Massimini et al., 2004.
Parameters
----------
dat_orig : ndarray (dtype='float')
vector with the data for one channel
s_freq : float
sampling frequency
time : ndarray (dtype='float')
vector with the time points for each sample
opts : instance of 'DetectSlowWave'
'det_filt' : dict
parameters for 'butter',
'duration' : tuple of float
min and max duration of SW
'min_ptp' : float
min peak-to-peak amplitude
'trough_duration' : tuple of float
min and max duration of first half-wave (trough)
Returns
-------
list of dict
list of detected SWs
float
SW density, per 30-s epoch
References
----------
Massimini, M. et al. J Neurosci 24(31) 6862-70 (2004).
"""
if opts.invert:
dat_orig = -dat_orig # depends on [control=['if'], data=[]]
dat_det = transform_signal(dat_orig, s_freq, 'double_butter', opts.det_filt)
above_zero = detect_events(dat_det, 'above_thresh', value=0.0)
sw_in_chan = []
if above_zero is not None:
troughs = within_duration(above_zero, time, opts.trough_duration)
#lg.info('troughs within duration: ' + str(troughs.shape))
if troughs is not None:
troughs = select_peaks(dat_det, troughs, opts.max_trough_amp)
#lg.info('troughs deep enough: ' + str(troughs.shape))
if troughs is not None:
events = _add_halfwave(dat_det, troughs, s_freq, opts)
#lg.info('SWs high enough: ' + str(events.shape))
if len(events):
events = within_duration(events, time, opts.duration)
events = remove_straddlers(events, time, s_freq)
#lg.info('SWs within duration: ' + str(events.shape))
sw_in_chan = make_slow_waves(events, dat_det, time, s_freq) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['troughs']] # depends on [control=['if'], data=['troughs']] # depends on [control=['if'], data=['above_zero']]
if len(sw_in_chan) == 0:
lg.info('No slow wave found') # depends on [control=['if'], data=[]]
return sw_in_chan |
def read_if_vcf(params):
"""
Checks if input is VCF and reads in appropriately if it is
"""
ref = None
aln = params.aln
fixed_pi = None
if hasattr(params, 'aln') and params.aln is not None:
if any([params.aln.lower().endswith(x) for x in ['.vcf', '.vcf.gz']]):
if not params.vcf_reference:
print("ERROR: a reference Fasta is required with VCF-format alignments")
return -1
compress_seq = read_vcf(params.aln, params.vcf_reference)
sequences = compress_seq['sequences']
ref = compress_seq['reference']
aln = sequences
if not hasattr(params, 'gtr') or params.gtr=="infer": #if not specified, set it:
alpha = alphabets['aa'] if params.aa else alphabets['nuc']
fixed_pi = [ref.count(base)/len(ref) for base in alpha]
if fixed_pi[-1] == 0:
fixed_pi[-1] = 0.05
fixed_pi = [v-0.01 for v in fixed_pi]
return aln, ref, fixed_pi | def function[read_if_vcf, parameter[params]]:
constant[
Checks if input is VCF and reads in appropriately if it is
]
variable[ref] assign[=] constant[None]
variable[aln] assign[=] name[params].aln
variable[fixed_pi] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b0242b60> begin[:]
if call[name[any], parameter[<ast.ListComp object at 0x7da1b0243370>]] begin[:]
if <ast.UnaryOp object at 0x7da1b02a63e0> begin[:]
call[name[print], parameter[constant[ERROR: a reference Fasta is required with VCF-format alignments]]]
return[<ast.UnaryOp object at 0x7da1b02a7910>]
variable[compress_seq] assign[=] call[name[read_vcf], parameter[name[params].aln, name[params].vcf_reference]]
variable[sequences] assign[=] call[name[compress_seq]][constant[sequences]]
variable[ref] assign[=] call[name[compress_seq]][constant[reference]]
variable[aln] assign[=] name[sequences]
if <ast.BoolOp object at 0x7da1b02a6380> begin[:]
variable[alpha] assign[=] <ast.IfExp object at 0x7da1b02a47f0>
variable[fixed_pi] assign[=] <ast.ListComp object at 0x7da1b02a64d0>
if compare[call[name[fixed_pi]][<ast.UnaryOp object at 0x7da1b02a5ff0>] equal[==] constant[0]] begin[:]
call[name[fixed_pi]][<ast.UnaryOp object at 0x7da1b02a7ee0>] assign[=] constant[0.05]
variable[fixed_pi] assign[=] <ast.ListComp object at 0x7da1b02a7790>
return[tuple[[<ast.Name object at 0x7da1b02a69b0>, <ast.Name object at 0x7da1b02a6710>, <ast.Name object at 0x7da1b02a5150>]]] | keyword[def] identifier[read_if_vcf] ( identifier[params] ):
literal[string]
identifier[ref] = keyword[None]
identifier[aln] = identifier[params] . identifier[aln]
identifier[fixed_pi] = keyword[None]
keyword[if] identifier[hasattr] ( identifier[params] , literal[string] ) keyword[and] identifier[params] . identifier[aln] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[any] ([ identifier[params] . identifier[aln] . identifier[lower] (). identifier[endswith] ( identifier[x] ) keyword[for] identifier[x] keyword[in] [ literal[string] , literal[string] ]]):
keyword[if] keyword[not] identifier[params] . identifier[vcf_reference] :
identifier[print] ( literal[string] )
keyword[return] - literal[int]
identifier[compress_seq] = identifier[read_vcf] ( identifier[params] . identifier[aln] , identifier[params] . identifier[vcf_reference] )
identifier[sequences] = identifier[compress_seq] [ literal[string] ]
identifier[ref] = identifier[compress_seq] [ literal[string] ]
identifier[aln] = identifier[sequences]
keyword[if] keyword[not] identifier[hasattr] ( identifier[params] , literal[string] ) keyword[or] identifier[params] . identifier[gtr] == literal[string] :
identifier[alpha] = identifier[alphabets] [ literal[string] ] keyword[if] identifier[params] . identifier[aa] keyword[else] identifier[alphabets] [ literal[string] ]
identifier[fixed_pi] =[ identifier[ref] . identifier[count] ( identifier[base] )/ identifier[len] ( identifier[ref] ) keyword[for] identifier[base] keyword[in] identifier[alpha] ]
keyword[if] identifier[fixed_pi] [- literal[int] ]== literal[int] :
identifier[fixed_pi] [- literal[int] ]= literal[int]
identifier[fixed_pi] =[ identifier[v] - literal[int] keyword[for] identifier[v] keyword[in] identifier[fixed_pi] ]
keyword[return] identifier[aln] , identifier[ref] , identifier[fixed_pi] | def read_if_vcf(params):
"""
Checks if input is VCF and reads in appropriately if it is
"""
ref = None
aln = params.aln
fixed_pi = None
if hasattr(params, 'aln') and params.aln is not None:
if any([params.aln.lower().endswith(x) for x in ['.vcf', '.vcf.gz']]):
if not params.vcf_reference:
print('ERROR: a reference Fasta is required with VCF-format alignments')
return -1 # depends on [control=['if'], data=[]]
compress_seq = read_vcf(params.aln, params.vcf_reference)
sequences = compress_seq['sequences']
ref = compress_seq['reference']
aln = sequences
if not hasattr(params, 'gtr') or params.gtr == 'infer': #if not specified, set it:
alpha = alphabets['aa'] if params.aa else alphabets['nuc']
fixed_pi = [ref.count(base) / len(ref) for base in alpha]
if fixed_pi[-1] == 0:
fixed_pi[-1] = 0.05
fixed_pi = [v - 0.01 for v in fixed_pi] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return (aln, ref, fixed_pi) |
def setattr(self, name, value):
# type: (Any, Any, Any) -> Any
'''Takes an object, a string, and a value and produces a new object
that is a copy of the original but with the attribute called ``name``
set to ``value``.
The following equality should hold for your definition:
.. code-block:: python
setattr(obj, 'attr', obj.attr) == obj
This function is used by many lenses (particularly GetattrLens) to set
attributes on states even when those states do not ordinarily support
``setattr``. This function is designed to have a similar signature
as python's built-in ``setattr`` except that it returns a new object
that has the attribute set rather than mutating the object in place.
It's what enables the ``lens.some_attribute`` functionality.
The corresponding method call for this hook is
``obj._lens_setattr(name, value)``.
The default implementation makes a copy of the object using
``copy.copy`` and then mutates the new object by calling python's
built in ``setattr`` on it.
'''
try:
self._lens_setattr
except AttributeError:
selfcopy = copy.copy(self)
builtin_setattr(selfcopy, name, value)
return selfcopy
else:
return self._lens_setattr(name, value) | def function[setattr, parameter[self, name, value]]:
constant[Takes an object, a string, and a value and produces a new object
that is a copy of the original but with the attribute called ``name``
set to ``value``.
The following equality should hold for your definition:
.. code-block:: python
setattr(obj, 'attr', obj.attr) == obj
This function is used by many lenses (particularly GetattrLens) to set
attributes on states even when those states do not ordinarily support
``setattr``. This function is designed to have a similar signature
as python's built-in ``setattr`` except that it returns a new object
that has the attribute set rather than mutating the object in place.
It's what enables the ``lens.some_attribute`` functionality.
The corresponding method call for this hook is
``obj._lens_setattr(name, value)``.
The default implementation makes a copy of the object using
``copy.copy`` and then mutates the new object by calling python's
built in ``setattr`` on it.
]
<ast.Try object at 0x7da1b034a290> | keyword[def] identifier[setattr] ( identifier[self] , identifier[name] , identifier[value] ):
literal[string]
keyword[try] :
identifier[self] . identifier[_lens_setattr]
keyword[except] identifier[AttributeError] :
identifier[selfcopy] = identifier[copy] . identifier[copy] ( identifier[self] )
identifier[builtin_setattr] ( identifier[selfcopy] , identifier[name] , identifier[value] )
keyword[return] identifier[selfcopy]
keyword[else] :
keyword[return] identifier[self] . identifier[_lens_setattr] ( identifier[name] , identifier[value] ) | def setattr(self, name, value):
# type: (Any, Any, Any) -> Any
"Takes an object, a string, and a value and produces a new object\n that is a copy of the original but with the attribute called ``name``\n set to ``value``.\n\n The following equality should hold for your definition:\n\n .. code-block:: python\n\n setattr(obj, 'attr', obj.attr) == obj\n\n This function is used by many lenses (particularly GetattrLens) to set\n attributes on states even when those states do not ordinarily support\n ``setattr``. This function is designed to have a similar signature\n as python's built-in ``setattr`` except that it returns a new object\n that has the attribute set rather than mutating the object in place.\n\n It's what enables the ``lens.some_attribute`` functionality.\n\n The corresponding method call for this hook is\n ``obj._lens_setattr(name, value)``.\n\n The default implementation makes a copy of the object using\n ``copy.copy`` and then mutates the new object by calling python's\n built in ``setattr`` on it.\n "
try:
self._lens_setattr # depends on [control=['try'], data=[]]
except AttributeError:
selfcopy = copy.copy(self)
builtin_setattr(selfcopy, name, value)
return selfcopy # depends on [control=['except'], data=[]]
else:
return self._lens_setattr(name, value) |
def set_release_description(self, description, **kwargs):
"""Set the release notes on the tag.
If the release doesn't exist yet, it will be created. If it already
exists, its description will be updated.
Args:
description (str): Description of the release.
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server fails to create the release
GitlabUpdateError: If the server fails to update the release
"""
id = self.get_id().replace('/', '%2F')
path = '%s/%s/release' % (self.manager.path, id)
data = {'description': description}
if self.release is None:
try:
server_data = self.manager.gitlab.http_post(path,
post_data=data,
**kwargs)
except exc.GitlabHttpError as e:
raise exc.GitlabCreateError(e.response_code, e.error_message)
else:
try:
server_data = self.manager.gitlab.http_put(path,
post_data=data,
**kwargs)
except exc.GitlabHttpError as e:
raise exc.GitlabUpdateError(e.response_code, e.error_message)
self.release = server_data | def function[set_release_description, parameter[self, description]]:
constant[Set the release notes on the tag.
If the release doesn't exist yet, it will be created. If it already
exists, its description will be updated.
Args:
description (str): Description of the release.
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server fails to create the release
GitlabUpdateError: If the server fails to update the release
]
variable[id] assign[=] call[call[name[self].get_id, parameter[]].replace, parameter[constant[/], constant[%2F]]]
variable[path] assign[=] binary_operation[constant[%s/%s/release] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6e49a0>, <ast.Name object at 0x7da20c6e6fb0>]]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e6440>], [<ast.Name object at 0x7da20c6e51e0>]]
if compare[name[self].release is constant[None]] begin[:]
<ast.Try object at 0x7da20c6e4fd0>
name[self].release assign[=] name[server_data] | keyword[def] identifier[set_release_description] ( identifier[self] , identifier[description] ,** identifier[kwargs] ):
literal[string]
identifier[id] = identifier[self] . identifier[get_id] (). identifier[replace] ( literal[string] , literal[string] )
identifier[path] = literal[string] %( identifier[self] . identifier[manager] . identifier[path] , identifier[id] )
identifier[data] ={ literal[string] : identifier[description] }
keyword[if] identifier[self] . identifier[release] keyword[is] keyword[None] :
keyword[try] :
identifier[server_data] = identifier[self] . identifier[manager] . identifier[gitlab] . identifier[http_post] ( identifier[path] ,
identifier[post_data] = identifier[data] ,
** identifier[kwargs] )
keyword[except] identifier[exc] . identifier[GitlabHttpError] keyword[as] identifier[e] :
keyword[raise] identifier[exc] . identifier[GitlabCreateError] ( identifier[e] . identifier[response_code] , identifier[e] . identifier[error_message] )
keyword[else] :
keyword[try] :
identifier[server_data] = identifier[self] . identifier[manager] . identifier[gitlab] . identifier[http_put] ( identifier[path] ,
identifier[post_data] = identifier[data] ,
** identifier[kwargs] )
keyword[except] identifier[exc] . identifier[GitlabHttpError] keyword[as] identifier[e] :
keyword[raise] identifier[exc] . identifier[GitlabUpdateError] ( identifier[e] . identifier[response_code] , identifier[e] . identifier[error_message] )
identifier[self] . identifier[release] = identifier[server_data] | def set_release_description(self, description, **kwargs):
"""Set the release notes on the tag.
If the release doesn't exist yet, it will be created. If it already
exists, its description will be updated.
Args:
description (str): Description of the release.
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server fails to create the release
GitlabUpdateError: If the server fails to update the release
"""
id = self.get_id().replace('/', '%2F')
path = '%s/%s/release' % (self.manager.path, id)
data = {'description': description}
if self.release is None:
try:
server_data = self.manager.gitlab.http_post(path, post_data=data, **kwargs) # depends on [control=['try'], data=[]]
except exc.GitlabHttpError as e:
raise exc.GitlabCreateError(e.response_code, e.error_message) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
else:
try:
server_data = self.manager.gitlab.http_put(path, post_data=data, **kwargs) # depends on [control=['try'], data=[]]
except exc.GitlabHttpError as e:
raise exc.GitlabUpdateError(e.response_code, e.error_message) # depends on [control=['except'], data=['e']]
self.release = server_data |
def clip_rect(self, x:float, y:float, w:float, h:float) -> None:
"""Clip further output to this rect."""
pass | def function[clip_rect, parameter[self, x, y, w, h]]:
constant[Clip further output to this rect.]
pass | keyword[def] identifier[clip_rect] ( identifier[self] , identifier[x] : identifier[float] , identifier[y] : identifier[float] , identifier[w] : identifier[float] , identifier[h] : identifier[float] )-> keyword[None] :
literal[string]
keyword[pass] | def clip_rect(self, x: float, y: float, w: float, h: float) -> None:
"""Clip further output to this rect."""
pass |
def query_raw(self, collection, query, request_handler='select', **kwargs):
"""
:param str collection: The name of the collection for the request
:param str request_handler: Request handler, default is 'select'
:param dict query: Python dictionary of Solr query parameters.
Sends a query to Solr, returns a dict. `query` should be a dictionary of solr request handler arguments.
Example::
res = solr.query_raw('SolrClient_unittest',{
'q':'*:*',
'facet':True,
'facet.field':'facet_test',
})
"""
headers = {'content-type': 'application/x-www-form-urlencoded'}
data = query
resp, con_inf = self.transport.send_request(method='POST',
endpoint=request_handler,
collection=collection,
data=data,
headers=headers,
**kwargs)
return resp | def function[query_raw, parameter[self, collection, query, request_handler]]:
constant[
:param str collection: The name of the collection for the request
:param str request_handler: Request handler, default is 'select'
:param dict query: Python dictionary of Solr query parameters.
Sends a query to Solr, returns a dict. `query` should be a dictionary of solr request handler arguments.
Example::
res = solr.query_raw('SolrClient_unittest',{
'q':'*:*',
'facet':True,
'facet.field':'facet_test',
})
]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b11bd6f0>], [<ast.Constant object at 0x7da1b11bdea0>]]
variable[data] assign[=] name[query]
<ast.Tuple object at 0x7da1b11be890> assign[=] call[name[self].transport.send_request, parameter[]]
return[name[resp]] | keyword[def] identifier[query_raw] ( identifier[self] , identifier[collection] , identifier[query] , identifier[request_handler] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[headers] ={ literal[string] : literal[string] }
identifier[data] = identifier[query]
identifier[resp] , identifier[con_inf] = identifier[self] . identifier[transport] . identifier[send_request] ( identifier[method] = literal[string] ,
identifier[endpoint] = identifier[request_handler] ,
identifier[collection] = identifier[collection] ,
identifier[data] = identifier[data] ,
identifier[headers] = identifier[headers] ,
** identifier[kwargs] )
keyword[return] identifier[resp] | def query_raw(self, collection, query, request_handler='select', **kwargs):
"""
:param str collection: The name of the collection for the request
:param str request_handler: Request handler, default is 'select'
:param dict query: Python dictionary of Solr query parameters.
Sends a query to Solr, returns a dict. `query` should be a dictionary of solr request handler arguments.
Example::
res = solr.query_raw('SolrClient_unittest',{
'q':'*:*',
'facet':True,
'facet.field':'facet_test',
})
"""
headers = {'content-type': 'application/x-www-form-urlencoded'}
data = query
(resp, con_inf) = self.transport.send_request(method='POST', endpoint=request_handler, collection=collection, data=data, headers=headers, **kwargs)
return resp |
def set_temp_url_key(self, key=None):
"""
Sets the key for the Temporary URL for the account. It should be a key
that is secret to the owner.
If no key is provided, a UUID value will be generated and used. It can
later be obtained by calling get_temp_url_key().
"""
if key is None:
key = uuid.uuid4().hex
meta = {"Temp-Url-Key": key}
self.set_account_metadata(meta)
self._cached_temp_url_key = key | def function[set_temp_url_key, parameter[self, key]]:
constant[
Sets the key for the Temporary URL for the account. It should be a key
that is secret to the owner.
If no key is provided, a UUID value will be generated and used. It can
later be obtained by calling get_temp_url_key().
]
if compare[name[key] is constant[None]] begin[:]
variable[key] assign[=] call[name[uuid].uuid4, parameter[]].hex
variable[meta] assign[=] dictionary[[<ast.Constant object at 0x7da1b05597b0>], [<ast.Name object at 0x7da1b055a8c0>]]
call[name[self].set_account_metadata, parameter[name[meta]]]
name[self]._cached_temp_url_key assign[=] name[key] | keyword[def] identifier[set_temp_url_key] ( identifier[self] , identifier[key] = keyword[None] ):
literal[string]
keyword[if] identifier[key] keyword[is] keyword[None] :
identifier[key] = identifier[uuid] . identifier[uuid4] (). identifier[hex]
identifier[meta] ={ literal[string] : identifier[key] }
identifier[self] . identifier[set_account_metadata] ( identifier[meta] )
identifier[self] . identifier[_cached_temp_url_key] = identifier[key] | def set_temp_url_key(self, key=None):
"""
Sets the key for the Temporary URL for the account. It should be a key
that is secret to the owner.
If no key is provided, a UUID value will be generated and used. It can
later be obtained by calling get_temp_url_key().
"""
if key is None:
key = uuid.uuid4().hex # depends on [control=['if'], data=['key']]
meta = {'Temp-Url-Key': key}
self.set_account_metadata(meta)
self._cached_temp_url_key = key |
def QA_fetch_stock_realtime_adv(code=None,
num=1,
collections=DATABASE.get_collection('realtime_{}'.format(datetime.date.today()))):
'''
返回当日的上下五档, code可以是股票可以是list, num是每个股票获取的数量
:param code:
:param num:
:param collections: realtime_XXXX-XX-XX 每天实时时间
:return: DataFrame
'''
if code is not None:
# code 必须转换成list 去查询数据库
if isinstance(code, str):
code = [code]
elif isinstance(code, list):
pass
else:
print(
"QA Error QA_fetch_stock_realtime_adv parameter code is not List type or String type")
items_from_collections = [item for item in collections.find(
{'code': {'$in': code}}, limit=num*len(code), sort=[('datetime', pymongo.DESCENDING)])]
if items_from_collections is None:
print("QA Error QA_fetch_stock_realtime_adv find parameter code={} num={} collection={} return NOne".format(
code, num, collections))
return
data = pd.DataFrame(items_from_collections)
data_set_index = data.set_index(
['datetime', 'code'], drop=False).drop(['_id'], axis=1)
return data_set_index
else:
print("QA Error QA_fetch_stock_realtime_adv parameter code is None") | def function[QA_fetch_stock_realtime_adv, parameter[code, num, collections]]:
constant[
返回当日的上下五档, code可以是股票可以是list, num是每个股票获取的数量
:param code:
:param num:
:param collections: realtime_XXXX-XX-XX 每天实时时间
:return: DataFrame
]
if compare[name[code] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[code], name[str]]] begin[:]
variable[code] assign[=] list[[<ast.Name object at 0x7da1b20622c0>]]
variable[items_from_collections] assign[=] <ast.ListComp object at 0x7da1b2061420>
if compare[name[items_from_collections] is constant[None]] begin[:]
call[name[print], parameter[call[constant[QA Error QA_fetch_stock_realtime_adv find parameter code={} num={} collection={} return NOne].format, parameter[name[code], name[num], name[collections]]]]]
return[None]
variable[data] assign[=] call[name[pd].DataFrame, parameter[name[items_from_collections]]]
variable[data_set_index] assign[=] call[call[name[data].set_index, parameter[list[[<ast.Constant object at 0x7da1b1ea1e70>, <ast.Constant object at 0x7da1b1ea00a0>]]]].drop, parameter[list[[<ast.Constant object at 0x7da1b1ea2950>]]]]
return[name[data_set_index]] | keyword[def] identifier[QA_fetch_stock_realtime_adv] ( identifier[code] = keyword[None] ,
identifier[num] = literal[int] ,
identifier[collections] = identifier[DATABASE] . identifier[get_collection] ( literal[string] . identifier[format] ( identifier[datetime] . identifier[date] . identifier[today] ()))):
literal[string]
keyword[if] identifier[code] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[code] , identifier[str] ):
identifier[code] =[ identifier[code] ]
keyword[elif] identifier[isinstance] ( identifier[code] , identifier[list] ):
keyword[pass]
keyword[else] :
identifier[print] (
literal[string] )
identifier[items_from_collections] =[ identifier[item] keyword[for] identifier[item] keyword[in] identifier[collections] . identifier[find] (
{ literal[string] :{ literal[string] : identifier[code] }}, identifier[limit] = identifier[num] * identifier[len] ( identifier[code] ), identifier[sort] =[( literal[string] , identifier[pymongo] . identifier[DESCENDING] )])]
keyword[if] identifier[items_from_collections] keyword[is] keyword[None] :
identifier[print] ( literal[string] . identifier[format] (
identifier[code] , identifier[num] , identifier[collections] ))
keyword[return]
identifier[data] = identifier[pd] . identifier[DataFrame] ( identifier[items_from_collections] )
identifier[data_set_index] = identifier[data] . identifier[set_index] (
[ literal[string] , literal[string] ], identifier[drop] = keyword[False] ). identifier[drop] ([ literal[string] ], identifier[axis] = literal[int] )
keyword[return] identifier[data_set_index]
keyword[else] :
identifier[print] ( literal[string] ) | def QA_fetch_stock_realtime_adv(code=None, num=1, collections=DATABASE.get_collection('realtime_{}'.format(datetime.date.today()))):
"""
返回当日的上下五档, code可以是股票可以是list, num是每个股票获取的数量
:param code:
:param num:
:param collections: realtime_XXXX-XX-XX 每天实时时间
:return: DataFrame
"""
if code is not None:
# code 必须转换成list 去查询数据库
if isinstance(code, str):
code = [code] # depends on [control=['if'], data=[]]
elif isinstance(code, list):
pass # depends on [control=['if'], data=[]]
else:
print('QA Error QA_fetch_stock_realtime_adv parameter code is not List type or String type')
items_from_collections = [item for item in collections.find({'code': {'$in': code}}, limit=num * len(code), sort=[('datetime', pymongo.DESCENDING)])]
if items_from_collections is None:
print('QA Error QA_fetch_stock_realtime_adv find parameter code={} num={} collection={} return NOne'.format(code, num, collections))
return # depends on [control=['if'], data=[]]
data = pd.DataFrame(items_from_collections)
data_set_index = data.set_index(['datetime', 'code'], drop=False).drop(['_id'], axis=1)
return data_set_index # depends on [control=['if'], data=['code']]
else:
print('QA Error QA_fetch_stock_realtime_adv parameter code is None') |
def pubkey(self):
"""If the :py:obj:`PGPKey` object is a private key, this method returns a corresponding public key object with
all the trimmings. Otherwise, returns ``None``
"""
if not self.is_public:
if self._sibling is None or isinstance(self._sibling, weakref.ref):
# create a new key shell
pub = PGPKey()
pub.ascii_headers = self.ascii_headers.copy()
# get the public half of the primary key
pub._key = self._key.pubkey()
# get the public half of each subkey
for skid, subkey in self.subkeys.items():
pub |= subkey.pubkey
# copy user ids and user attributes
for uid in self._uids:
pub |= copy.copy(uid)
# copy signatures that weren't copied with uids
for sig in self._signatures:
if sig.parent is None:
pub |= copy.copy(sig)
# keep connect the two halves using a weak reference
self._sibling = weakref.ref(pub)
pub._sibling = weakref.ref(self)
return self._sibling()
return None | def function[pubkey, parameter[self]]:
constant[If the :py:obj:`PGPKey` object is a private key, this method returns a corresponding public key object with
all the trimmings. Otherwise, returns ``None``
]
if <ast.UnaryOp object at 0x7da1b088dbd0> begin[:]
if <ast.BoolOp object at 0x7da1b088f010> begin[:]
variable[pub] assign[=] call[name[PGPKey], parameter[]]
name[pub].ascii_headers assign[=] call[name[self].ascii_headers.copy, parameter[]]
name[pub]._key assign[=] call[name[self]._key.pubkey, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b088d4e0>, <ast.Name object at 0x7da1b088e200>]]] in starred[call[name[self].subkeys.items, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da1b088e410>
for taget[name[uid]] in starred[name[self]._uids] begin[:]
<ast.AugAssign object at 0x7da1b088e1a0>
for taget[name[sig]] in starred[name[self]._signatures] begin[:]
if compare[name[sig].parent is constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b078ccd0>
name[self]._sibling assign[=] call[name[weakref].ref, parameter[name[pub]]]
name[pub]._sibling assign[=] call[name[weakref].ref, parameter[name[self]]]
return[call[name[self]._sibling, parameter[]]]
return[constant[None]] | keyword[def] identifier[pubkey] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_public] :
keyword[if] identifier[self] . identifier[_sibling] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[self] . identifier[_sibling] , identifier[weakref] . identifier[ref] ):
identifier[pub] = identifier[PGPKey] ()
identifier[pub] . identifier[ascii_headers] = identifier[self] . identifier[ascii_headers] . identifier[copy] ()
identifier[pub] . identifier[_key] = identifier[self] . identifier[_key] . identifier[pubkey] ()
keyword[for] identifier[skid] , identifier[subkey] keyword[in] identifier[self] . identifier[subkeys] . identifier[items] ():
identifier[pub] |= identifier[subkey] . identifier[pubkey]
keyword[for] identifier[uid] keyword[in] identifier[self] . identifier[_uids] :
identifier[pub] |= identifier[copy] . identifier[copy] ( identifier[uid] )
keyword[for] identifier[sig] keyword[in] identifier[self] . identifier[_signatures] :
keyword[if] identifier[sig] . identifier[parent] keyword[is] keyword[None] :
identifier[pub] |= identifier[copy] . identifier[copy] ( identifier[sig] )
identifier[self] . identifier[_sibling] = identifier[weakref] . identifier[ref] ( identifier[pub] )
identifier[pub] . identifier[_sibling] = identifier[weakref] . identifier[ref] ( identifier[self] )
keyword[return] identifier[self] . identifier[_sibling] ()
keyword[return] keyword[None] | def pubkey(self):
"""If the :py:obj:`PGPKey` object is a private key, this method returns a corresponding public key object with
all the trimmings. Otherwise, returns ``None``
"""
if not self.is_public:
if self._sibling is None or isinstance(self._sibling, weakref.ref):
# create a new key shell
pub = PGPKey()
pub.ascii_headers = self.ascii_headers.copy()
# get the public half of the primary key
pub._key = self._key.pubkey()
# get the public half of each subkey
for (skid, subkey) in self.subkeys.items():
pub |= subkey.pubkey # depends on [control=['for'], data=[]]
# copy user ids and user attributes
for uid in self._uids:
pub |= copy.copy(uid) # depends on [control=['for'], data=['uid']]
# copy signatures that weren't copied with uids
for sig in self._signatures:
if sig.parent is None:
pub |= copy.copy(sig) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sig']]
# keep connect the two halves using a weak reference
self._sibling = weakref.ref(pub)
pub._sibling = weakref.ref(self) # depends on [control=['if'], data=[]]
return self._sibling() # depends on [control=['if'], data=[]]
return None |
def backward(self, out_grads=None):
"""Backward computation."""
assert self.binded and self.params_initialized
for i_layer, module in reversed(list(zip(range(len(self._modules)), self._modules))):
module.backward(out_grads=out_grads)
if i_layer == 0:
break
out_grads = module.get_input_grads() | def function[backward, parameter[self, out_grads]]:
constant[Backward computation.]
assert[<ast.BoolOp object at 0x7da204346050>]
for taget[tuple[[<ast.Name object at 0x7da20e9565f0>, <ast.Name object at 0x7da20e9549a0>]]] in starred[call[name[reversed], parameter[call[name[list], parameter[call[name[zip], parameter[call[name[range], parameter[call[name[len], parameter[name[self]._modules]]]], name[self]._modules]]]]]]] begin[:]
call[name[module].backward, parameter[]]
if compare[name[i_layer] equal[==] constant[0]] begin[:]
break
variable[out_grads] assign[=] call[name[module].get_input_grads, parameter[]] | keyword[def] identifier[backward] ( identifier[self] , identifier[out_grads] = keyword[None] ):
literal[string]
keyword[assert] identifier[self] . identifier[binded] keyword[and] identifier[self] . identifier[params_initialized]
keyword[for] identifier[i_layer] , identifier[module] keyword[in] identifier[reversed] ( identifier[list] ( identifier[zip] ( identifier[range] ( identifier[len] ( identifier[self] . identifier[_modules] )), identifier[self] . identifier[_modules] ))):
identifier[module] . identifier[backward] ( identifier[out_grads] = identifier[out_grads] )
keyword[if] identifier[i_layer] == literal[int] :
keyword[break]
identifier[out_grads] = identifier[module] . identifier[get_input_grads] () | def backward(self, out_grads=None):
"""Backward computation."""
assert self.binded and self.params_initialized
for (i_layer, module) in reversed(list(zip(range(len(self._modules)), self._modules))):
module.backward(out_grads=out_grads)
if i_layer == 0:
break # depends on [control=['if'], data=[]]
out_grads = module.get_input_grads() # depends on [control=['for'], data=[]] |
def ffill_query_in_range(expr,
lower,
upper,
checkpoints=None,
odo_kwargs=None,
ts_field=TS_FIELD_NAME):
"""Query a blaze expression in a given time range properly forward filling
from values that fall before the lower date.
Parameters
----------
expr : Expr
Bound blaze expression.
lower : datetime
The lower date to query for.
upper : datetime
The upper date to query for.
checkpoints : Expr, optional
Bound blaze expression for a checkpoints table from which to get a
computed lower bound.
odo_kwargs : dict, optional
The extra keyword arguments to pass to ``odo``.
ts_field : str, optional
The name of the timestamp field in the given blaze expression.
Returns
-------
raw : pd.DataFrame
A strict dataframe for the data in the given date range. This may
start before the requested start date if a value is needed to ffill.
"""
odo_kwargs = odo_kwargs or {}
computed_lower, materialized_checkpoints = get_materialized_checkpoints(
checkpoints,
expr.fields,
lower,
odo_kwargs,
)
pred = expr[ts_field] <= upper
if computed_lower is not None:
# only constrain the lower date if we computed a new lower date
pred &= expr[ts_field] >= computed_lower
raw = pd.concat(
(
materialized_checkpoints,
odo(
expr[pred],
pd.DataFrame,
**odo_kwargs
),
),
ignore_index=True,
)
raw.loc[:, ts_field] = raw.loc[:, ts_field].astype('datetime64[ns]')
return raw | def function[ffill_query_in_range, parameter[expr, lower, upper, checkpoints, odo_kwargs, ts_field]]:
constant[Query a blaze expression in a given time range properly forward filling
from values that fall before the lower date.
Parameters
----------
expr : Expr
Bound blaze expression.
lower : datetime
The lower date to query for.
upper : datetime
The upper date to query for.
checkpoints : Expr, optional
Bound blaze expression for a checkpoints table from which to get a
computed lower bound.
odo_kwargs : dict, optional
The extra keyword arguments to pass to ``odo``.
ts_field : str, optional
The name of the timestamp field in the given blaze expression.
Returns
-------
raw : pd.DataFrame
A strict dataframe for the data in the given date range. This may
start before the requested start date if a value is needed to ffill.
]
variable[odo_kwargs] assign[=] <ast.BoolOp object at 0x7da18bc71ae0>
<ast.Tuple object at 0x7da18bc71660> assign[=] call[name[get_materialized_checkpoints], parameter[name[checkpoints], name[expr].fields, name[lower], name[odo_kwargs]]]
variable[pred] assign[=] compare[call[name[expr]][name[ts_field]] less_or_equal[<=] name[upper]]
if compare[name[computed_lower] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da18bc71e40>
variable[raw] assign[=] call[name[pd].concat, parameter[tuple[[<ast.Name object at 0x7da18bc71990>, <ast.Call object at 0x7da18bc72200>]]]]
call[name[raw].loc][tuple[[<ast.Slice object at 0x7da18bc73970>, <ast.Name object at 0x7da18bc739d0>]]] assign[=] call[call[name[raw].loc][tuple[[<ast.Slice object at 0x7da18bc71f90>, <ast.Name object at 0x7da18bc71180>]]].astype, parameter[constant[datetime64[ns]]]]
return[name[raw]] | keyword[def] identifier[ffill_query_in_range] ( identifier[expr] ,
identifier[lower] ,
identifier[upper] ,
identifier[checkpoints] = keyword[None] ,
identifier[odo_kwargs] = keyword[None] ,
identifier[ts_field] = identifier[TS_FIELD_NAME] ):
literal[string]
identifier[odo_kwargs] = identifier[odo_kwargs] keyword[or] {}
identifier[computed_lower] , identifier[materialized_checkpoints] = identifier[get_materialized_checkpoints] (
identifier[checkpoints] ,
identifier[expr] . identifier[fields] ,
identifier[lower] ,
identifier[odo_kwargs] ,
)
identifier[pred] = identifier[expr] [ identifier[ts_field] ]<= identifier[upper]
keyword[if] identifier[computed_lower] keyword[is] keyword[not] keyword[None] :
identifier[pred] &= identifier[expr] [ identifier[ts_field] ]>= identifier[computed_lower]
identifier[raw] = identifier[pd] . identifier[concat] (
(
identifier[materialized_checkpoints] ,
identifier[odo] (
identifier[expr] [ identifier[pred] ],
identifier[pd] . identifier[DataFrame] ,
** identifier[odo_kwargs]
),
),
identifier[ignore_index] = keyword[True] ,
)
identifier[raw] . identifier[loc] [:, identifier[ts_field] ]= identifier[raw] . identifier[loc] [:, identifier[ts_field] ]. identifier[astype] ( literal[string] )
keyword[return] identifier[raw] | def ffill_query_in_range(expr, lower, upper, checkpoints=None, odo_kwargs=None, ts_field=TS_FIELD_NAME):
"""Query a blaze expression in a given time range properly forward filling
from values that fall before the lower date.
Parameters
----------
expr : Expr
Bound blaze expression.
lower : datetime
The lower date to query for.
upper : datetime
The upper date to query for.
checkpoints : Expr, optional
Bound blaze expression for a checkpoints table from which to get a
computed lower bound.
odo_kwargs : dict, optional
The extra keyword arguments to pass to ``odo``.
ts_field : str, optional
The name of the timestamp field in the given blaze expression.
Returns
-------
raw : pd.DataFrame
A strict dataframe for the data in the given date range. This may
start before the requested start date if a value is needed to ffill.
"""
odo_kwargs = odo_kwargs or {}
(computed_lower, materialized_checkpoints) = get_materialized_checkpoints(checkpoints, expr.fields, lower, odo_kwargs)
pred = expr[ts_field] <= upper
if computed_lower is not None:
# only constrain the lower date if we computed a new lower date
pred &= expr[ts_field] >= computed_lower # depends on [control=['if'], data=['computed_lower']]
raw = pd.concat((materialized_checkpoints, odo(expr[pred], pd.DataFrame, **odo_kwargs)), ignore_index=True)
raw.loc[:, ts_field] = raw.loc[:, ts_field].astype('datetime64[ns]')
return raw |
def acis_request(method, params):
"""Request data from the ACIS Web Services API.
Makes a request from the ACIS Web Services API for data
based on a given method (StnMeta,StnData,MultiStnData,GridData,General)
and parameters string. Information about the parameters can be obtained at:
http://www.rcc-acis.org/docs_webservices.html
If a connection to the API fails, then it will raise an exception. Some bad
calls will also return empty dictionaries.
ACIS Web Services is a distributed system! A call to the main URL can be
delivered to any climate center running a public instance of the service.
This makes the calls efficient, but also occasionaly results in failed
calls when a server you are directed to is having problems. Generally,
reconnecting after waiting a few seconds will resolve a problem. If problems
are persistent, contact ACIS developers at the High Plains Regional Climate
Center or Northeast Regional Climate Center who will look into server
issues.
Parameters
----------
method : str
The Web Services request method (StnMeta, StnData, MultiStnData, GridData, General)
params : dict
A JSON array of parameters (See Web Services API)
Returns
-------
A dictionary of data based on the JSON parameters
Raises
------
:class: `ACIS_API_Exception`
When the API is unable to establish a connection or returns
unparsable data.
"""
base_url = 'http://data.rcc-acis.org/' # ACIS Web API URL
timeout = 300 if method == 'MultiStnData' else 60
try:
response = session_manager.create_session().post(base_url + method, json=params,
timeout=timeout)
return response.json()
except requests.exceptions.Timeout:
raise AcisApiException('Connection Timeout')
except requests.exceptions.TooManyRedirects:
raise AcisApiException('Bad URL. Check your ACIS connection method string.')
except ValueError:
raise AcisApiException('No data returned! The ACIS parameter dictionary'
'may be incorrectly formatted') | def function[acis_request, parameter[method, params]]:
constant[Request data from the ACIS Web Services API.
Makes a request from the ACIS Web Services API for data
based on a given method (StnMeta,StnData,MultiStnData,GridData,General)
and parameters string. Information about the parameters can be obtained at:
http://www.rcc-acis.org/docs_webservices.html
If a connection to the API fails, then it will raise an exception. Some bad
calls will also return empty dictionaries.
ACIS Web Services is a distributed system! A call to the main URL can be
delivered to any climate center running a public instance of the service.
This makes the calls efficient, but also occasionaly results in failed
calls when a server you are directed to is having problems. Generally,
reconnecting after waiting a few seconds will resolve a problem. If problems
are persistent, contact ACIS developers at the High Plains Regional Climate
Center or Northeast Regional Climate Center who will look into server
issues.
Parameters
----------
method : str
The Web Services request method (StnMeta, StnData, MultiStnData, GridData, General)
params : dict
A JSON array of parameters (See Web Services API)
Returns
-------
A dictionary of data based on the JSON parameters
Raises
------
:class: `ACIS_API_Exception`
When the API is unable to establish a connection or returns
unparsable data.
]
variable[base_url] assign[=] constant[http://data.rcc-acis.org/]
variable[timeout] assign[=] <ast.IfExp object at 0x7da1b11c3160>
<ast.Try object at 0x7da1b11c0bb0> | keyword[def] identifier[acis_request] ( identifier[method] , identifier[params] ):
literal[string]
identifier[base_url] = literal[string]
identifier[timeout] = literal[int] keyword[if] identifier[method] == literal[string] keyword[else] literal[int]
keyword[try] :
identifier[response] = identifier[session_manager] . identifier[create_session] (). identifier[post] ( identifier[base_url] + identifier[method] , identifier[json] = identifier[params] ,
identifier[timeout] = identifier[timeout] )
keyword[return] identifier[response] . identifier[json] ()
keyword[except] identifier[requests] . identifier[exceptions] . identifier[Timeout] :
keyword[raise] identifier[AcisApiException] ( literal[string] )
keyword[except] identifier[requests] . identifier[exceptions] . identifier[TooManyRedirects] :
keyword[raise] identifier[AcisApiException] ( literal[string] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[AcisApiException] ( literal[string]
literal[string] ) | def acis_request(method, params):
"""Request data from the ACIS Web Services API.
Makes a request from the ACIS Web Services API for data
based on a given method (StnMeta,StnData,MultiStnData,GridData,General)
and parameters string. Information about the parameters can be obtained at:
http://www.rcc-acis.org/docs_webservices.html
If a connection to the API fails, then it will raise an exception. Some bad
calls will also return empty dictionaries.
ACIS Web Services is a distributed system! A call to the main URL can be
delivered to any climate center running a public instance of the service.
This makes the calls efficient, but also occasionaly results in failed
calls when a server you are directed to is having problems. Generally,
reconnecting after waiting a few seconds will resolve a problem. If problems
are persistent, contact ACIS developers at the High Plains Regional Climate
Center or Northeast Regional Climate Center who will look into server
issues.
Parameters
----------
method : str
The Web Services request method (StnMeta, StnData, MultiStnData, GridData, General)
params : dict
A JSON array of parameters (See Web Services API)
Returns
-------
A dictionary of data based on the JSON parameters
Raises
------
:class: `ACIS_API_Exception`
When the API is unable to establish a connection or returns
unparsable data.
"""
base_url = 'http://data.rcc-acis.org/' # ACIS Web API URL
timeout = 300 if method == 'MultiStnData' else 60
try:
response = session_manager.create_session().post(base_url + method, json=params, timeout=timeout)
return response.json() # depends on [control=['try'], data=[]]
except requests.exceptions.Timeout:
raise AcisApiException('Connection Timeout') # depends on [control=['except'], data=[]]
except requests.exceptions.TooManyRedirects:
raise AcisApiException('Bad URL. Check your ACIS connection method string.') # depends on [control=['except'], data=[]]
except ValueError:
raise AcisApiException('No data returned! The ACIS parameter dictionarymay be incorrectly formatted') # depends on [control=['except'], data=[]] |
def draw_lineage(self, recs, nodecolor="mediumseagreen",
edgecolor="lightslateblue", dpi=96,
lineage_img="GO_lineage.png", engine="pygraphviz",
gml=False, draw_parents=True, draw_children=True):
"""Draw GO DAG subplot."""
assert engine in GraphEngines
grph = None
if engine == "pygraphviz":
grph = self.make_graph_pygraphviz(recs, nodecolor, edgecolor, dpi,
draw_parents=draw_parents,
draw_children=draw_children)
else:
grph = self.make_graph_pydot(recs, nodecolor, edgecolor, dpi,
draw_parents=draw_parents, draw_children=draw_children)
if gml:
import networkx as nx # use networkx to do the conversion
gmlbase = lineage_img.rsplit(".", 1)[0]
obj = nx.from_agraph(grph) if engine == "pygraphviz" else nx.from_pydot(grph)
del obj.graph['node']
del obj.graph['edge']
gmlfile = gmlbase + ".gml"
nx.write_gml(self.label_wrap, gmlfile)
sys.stderr.write("GML graph written to {0}\n".format(gmlfile))
sys.stderr.write(("lineage info for terms %s written to %s\n" %
([rec.item_id for rec in recs], lineage_img)))
if engine == "pygraphviz":
grph.draw(lineage_img, prog="dot")
else:
grph.write_png(lineage_img) | def function[draw_lineage, parameter[self, recs, nodecolor, edgecolor, dpi, lineage_img, engine, gml, draw_parents, draw_children]]:
constant[Draw GO DAG subplot.]
assert[compare[name[engine] in name[GraphEngines]]]
variable[grph] assign[=] constant[None]
if compare[name[engine] equal[==] constant[pygraphviz]] begin[:]
variable[grph] assign[=] call[name[self].make_graph_pygraphviz, parameter[name[recs], name[nodecolor], name[edgecolor], name[dpi]]]
if name[gml] begin[:]
import module[networkx] as alias[nx]
variable[gmlbase] assign[=] call[call[name[lineage_img].rsplit, parameter[constant[.], constant[1]]]][constant[0]]
variable[obj] assign[=] <ast.IfExp object at 0x7da18bc72920>
<ast.Delete object at 0x7da18bc73160>
<ast.Delete object at 0x7da18bc73d60>
variable[gmlfile] assign[=] binary_operation[name[gmlbase] + constant[.gml]]
call[name[nx].write_gml, parameter[name[self].label_wrap, name[gmlfile]]]
call[name[sys].stderr.write, parameter[call[constant[GML graph written to {0}
].format, parameter[name[gmlfile]]]]]
call[name[sys].stderr.write, parameter[binary_operation[constant[lineage info for terms %s written to %s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.ListComp object at 0x7da20e9b0430>, <ast.Name object at 0x7da20c6a9660>]]]]]
if compare[name[engine] equal[==] constant[pygraphviz]] begin[:]
call[name[grph].draw, parameter[name[lineage_img]]] | keyword[def] identifier[draw_lineage] ( identifier[self] , identifier[recs] , identifier[nodecolor] = literal[string] ,
identifier[edgecolor] = literal[string] , identifier[dpi] = literal[int] ,
identifier[lineage_img] = literal[string] , identifier[engine] = literal[string] ,
identifier[gml] = keyword[False] , identifier[draw_parents] = keyword[True] , identifier[draw_children] = keyword[True] ):
literal[string]
keyword[assert] identifier[engine] keyword[in] identifier[GraphEngines]
identifier[grph] = keyword[None]
keyword[if] identifier[engine] == literal[string] :
identifier[grph] = identifier[self] . identifier[make_graph_pygraphviz] ( identifier[recs] , identifier[nodecolor] , identifier[edgecolor] , identifier[dpi] ,
identifier[draw_parents] = identifier[draw_parents] ,
identifier[draw_children] = identifier[draw_children] )
keyword[else] :
identifier[grph] = identifier[self] . identifier[make_graph_pydot] ( identifier[recs] , identifier[nodecolor] , identifier[edgecolor] , identifier[dpi] ,
identifier[draw_parents] = identifier[draw_parents] , identifier[draw_children] = identifier[draw_children] )
keyword[if] identifier[gml] :
keyword[import] identifier[networkx] keyword[as] identifier[nx]
identifier[gmlbase] = identifier[lineage_img] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]
identifier[obj] = identifier[nx] . identifier[from_agraph] ( identifier[grph] ) keyword[if] identifier[engine] == literal[string] keyword[else] identifier[nx] . identifier[from_pydot] ( identifier[grph] )
keyword[del] identifier[obj] . identifier[graph] [ literal[string] ]
keyword[del] identifier[obj] . identifier[graph] [ literal[string] ]
identifier[gmlfile] = identifier[gmlbase] + literal[string]
identifier[nx] . identifier[write_gml] ( identifier[self] . identifier[label_wrap] , identifier[gmlfile] )
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] . identifier[format] ( identifier[gmlfile] ))
identifier[sys] . identifier[stderr] . identifier[write] (( literal[string] %
([ identifier[rec] . identifier[item_id] keyword[for] identifier[rec] keyword[in] identifier[recs] ], identifier[lineage_img] )))
keyword[if] identifier[engine] == literal[string] :
identifier[grph] . identifier[draw] ( identifier[lineage_img] , identifier[prog] = literal[string] )
keyword[else] :
identifier[grph] . identifier[write_png] ( identifier[lineage_img] ) | def draw_lineage(self, recs, nodecolor='mediumseagreen', edgecolor='lightslateblue', dpi=96, lineage_img='GO_lineage.png', engine='pygraphviz', gml=False, draw_parents=True, draw_children=True):
"""Draw GO DAG subplot."""
assert engine in GraphEngines
grph = None
if engine == 'pygraphviz':
grph = self.make_graph_pygraphviz(recs, nodecolor, edgecolor, dpi, draw_parents=draw_parents, draw_children=draw_children) # depends on [control=['if'], data=[]]
else:
grph = self.make_graph_pydot(recs, nodecolor, edgecolor, dpi, draw_parents=draw_parents, draw_children=draw_children)
if gml:
import networkx as nx # use networkx to do the conversion
gmlbase = lineage_img.rsplit('.', 1)[0]
obj = nx.from_agraph(grph) if engine == 'pygraphviz' else nx.from_pydot(grph)
del obj.graph['node']
del obj.graph['edge']
gmlfile = gmlbase + '.gml'
nx.write_gml(self.label_wrap, gmlfile)
sys.stderr.write('GML graph written to {0}\n'.format(gmlfile)) # depends on [control=['if'], data=[]]
sys.stderr.write('lineage info for terms %s written to %s\n' % ([rec.item_id for rec in recs], lineage_img))
if engine == 'pygraphviz':
grph.draw(lineage_img, prog='dot') # depends on [control=['if'], data=[]]
else:
grph.write_png(lineage_img) |
def do_DBINFO(self, *args):
"""Print some useful infos from Redis DB."""
info = DB.info()
keys = [
'keyspace_misses', 'keyspace_hits', 'used_memory_human',
'total_commands_processed', 'total_connections_received',
'connected_clients']
for key in keys:
print('{}: {}'.format(white(key), blue(info[key])))
nb_of_redis_db = int(DB.config_get('databases')['databases'])
for db_index in range(nb_of_redis_db - 1):
db_name = 'db{}'.format(db_index)
if db_name in info:
label = white('nb keys (db {})'.format(db_index))
print('{}: {}'.format(label, blue(info[db_name]['keys']))) | def function[do_DBINFO, parameter[self]]:
constant[Print some useful infos from Redis DB.]
variable[info] assign[=] call[name[DB].info, parameter[]]
variable[keys] assign[=] list[[<ast.Constant object at 0x7da1b05bd510>, <ast.Constant object at 0x7da1b05bea10>, <ast.Constant object at 0x7da1b05beda0>, <ast.Constant object at 0x7da1b05be590>, <ast.Constant object at 0x7da1b05bfee0>, <ast.Constant object at 0x7da1b05bf130>]]
for taget[name[key]] in starred[name[keys]] begin[:]
call[name[print], parameter[call[constant[{}: {}].format, parameter[call[name[white], parameter[name[key]]], call[name[blue], parameter[call[name[info]][name[key]]]]]]]]
variable[nb_of_redis_db] assign[=] call[name[int], parameter[call[call[name[DB].config_get, parameter[constant[databases]]]][constant[databases]]]]
for taget[name[db_index]] in starred[call[name[range], parameter[binary_operation[name[nb_of_redis_db] - constant[1]]]]] begin[:]
variable[db_name] assign[=] call[constant[db{}].format, parameter[name[db_index]]]
if compare[name[db_name] in name[info]] begin[:]
variable[label] assign[=] call[name[white], parameter[call[constant[nb keys (db {})].format, parameter[name[db_index]]]]]
call[name[print], parameter[call[constant[{}: {}].format, parameter[name[label], call[name[blue], parameter[call[call[name[info]][name[db_name]]][constant[keys]]]]]]]] | keyword[def] identifier[do_DBINFO] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[info] = identifier[DB] . identifier[info] ()
identifier[keys] =[
literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] ]
keyword[for] identifier[key] keyword[in] identifier[keys] :
identifier[print] ( literal[string] . identifier[format] ( identifier[white] ( identifier[key] ), identifier[blue] ( identifier[info] [ identifier[key] ])))
identifier[nb_of_redis_db] = identifier[int] ( identifier[DB] . identifier[config_get] ( literal[string] )[ literal[string] ])
keyword[for] identifier[db_index] keyword[in] identifier[range] ( identifier[nb_of_redis_db] - literal[int] ):
identifier[db_name] = literal[string] . identifier[format] ( identifier[db_index] )
keyword[if] identifier[db_name] keyword[in] identifier[info] :
identifier[label] = identifier[white] ( literal[string] . identifier[format] ( identifier[db_index] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[label] , identifier[blue] ( identifier[info] [ identifier[db_name] ][ literal[string] ]))) | def do_DBINFO(self, *args):
"""Print some useful infos from Redis DB."""
info = DB.info()
keys = ['keyspace_misses', 'keyspace_hits', 'used_memory_human', 'total_commands_processed', 'total_connections_received', 'connected_clients']
for key in keys:
print('{}: {}'.format(white(key), blue(info[key]))) # depends on [control=['for'], data=['key']]
nb_of_redis_db = int(DB.config_get('databases')['databases'])
for db_index in range(nb_of_redis_db - 1):
db_name = 'db{}'.format(db_index)
if db_name in info:
label = white('nb keys (db {})'.format(db_index))
print('{}: {}'.format(label, blue(info[db_name]['keys']))) # depends on [control=['if'], data=['db_name', 'info']] # depends on [control=['for'], data=['db_index']] |
def derivative(self, x):
r"""Return the derivative operator in the "C = R^2" sense.
The returned operator (``self``) is the derivative of the
operator variant where the complex domain is reinterpreted as
a product of two real spaces.
Parameters
----------
x : `domain` element
Point in which to take the derivative.
Examples
--------
>>> c2 = odl.cn(2)
>>> op = odl.ComplexModulusSquared(c2)
>>> op([3 + 4j, 2])
rn(2).element([ 25., 4.])
>>> deriv = op.derivative([3 + 4j, 2])
>>> deriv.domain
cn(2)
>>> deriv.range
rn(2)
>>> deriv([2 + 1j, 4j]) # [(3*2 + 4*1) / 5, (2*0 + 0*4) / 2]
rn(2).element([ 10., 0.])
Notes
-----
The derivative of the squared complex modulus
.. math::
&S: X(\mathbb{C}) \to X(\mathbb{R}), \\
&S(x) = \Re(x)^2 + \Im(x)^2,
with :math:`X(\mathbb{F}) = \mathbb{F}^n` or
:math:`L^2(\Omega, \mathbb{F})`, is given as
.. math::
&S'(x): X(\mathbb{C}) \to X(\mathbb{R}), \\
&S'(x)(y) = \Re(x)\,\Re(y) + \Im(x)\,\Im(y).
It is linear when identifying :math:`\mathbb{C}` with
:math:`\mathbb{R}^2`, but not complex-linear.
"""
op = self
x = self.domain.element(x)
class ComplexModulusSquaredDerivative(Operator):
"""Derivative of the squared complex modulus operator."""
def _call(self, y, out):
"""Return ``self(y)``."""
x.real.multiply(y.real, out=out)
out += x.imag * y.imag
return out
@property
def adjoint(self):
r"""Adjoint in the "C = R^2" sense.
Adjoint of the derivative:
Examples
--------
>>> c2 = odl.cn(2)
>>> op = odl.ComplexModulusSquared(c2)
>>> deriv = op.derivative([3 + 4j, 2])
>>> adj = deriv.adjoint
>>> adj.domain
rn(2)
>>> adj.range
cn(2)
>>> adj([2, 1]) # [2*(3 + 4j), 1*2]
cn(2).element([ 6.+8.j, 2.+0.j])
Adjointness only holds in the weaker sense that inner products
are the same when testing with vectors from the real space, but
not when testing complex vectors:
>>> y1 = deriv.range.element([1, 1])
>>> y2 = deriv.range.element([1, -1])
>>> adj(y1).inner(adj(y2)) # <M^* y1, M^* y2>
(21+0j)
>>> deriv(adj(y1)).inner(y2) # <M M^* y1, y2>
21.0
>>> x1 = deriv.domain.element([1j, 1j])
>>> x2 = deriv.domain.element([1 + 1j, 1j])
>>> deriv(x1).inner(deriv(x2)) # <M x1, M x2>
28.0
>>> adj(deriv(x1)).inner(x2) # <M^* M x1, x2>
(28+4j)
Notes
-----
The squared complex modulus derivative is given by
.. math::
&S'(x): X(\mathbb{C}) \to X(\mathbb{R}), \\
&S'(x)(y) = \Re(x)\,\Re(y) + \Im(x)\,\Im(y).
Thus, its adjoint can (formally) be identified as
.. math::
&S'(x)^*: X(\mathbb{R}) \to X(\mathbb{C}), \\
&S'(x)^*(u) = (\Re(x)\,u,\ \Im(x)\,u).
The operator :math:`A = S'(x)` has the weak adjointness
property
.. math::
\langle A^* y_1,\ A^* y_2 \rangle_{X(\mathbb{C})} =
\langle AA^* y_1,\ y_2 \rangle_{X(\mathbb{R})},
but in general,
.. math::
\langle A x,\ y \rangle_{X(\mathbb{R})} \neq
\langle x,\ A^* y \rangle_{X(\mathbb{C})},
in particular
.. math::
\langle A x_1,\ A x_2 \rangle_{X(\mathbb{R})} \neq
\langle A^*A x_1,\ x_2 \rangle_{X(\mathbb{C})}.
"""
deriv = self
class ComplexModulusSquaredDerivAdj(Operator):
def _call(self, u, out):
"""Implement ``self(u, out)``."""
out.assign(x)
out.real *= u
out.imag *= u
return out
@property
def adjoint(self):
"""Adjoint in the "C = R^2" sense."""
return deriv
return ComplexModulusSquaredDerivAdj(
deriv.range, deriv.domain, linear=deriv.domain.is_real)
return ComplexModulusSquaredDerivative(op.domain, op.range,
linear=op.domain.is_real) | def function[derivative, parameter[self, x]]:
constant[Return the derivative operator in the "C = R^2" sense.
The returned operator (``self``) is the derivative of the
operator variant where the complex domain is reinterpreted as
a product of two real spaces.
Parameters
----------
x : `domain` element
Point in which to take the derivative.
Examples
--------
>>> c2 = odl.cn(2)
>>> op = odl.ComplexModulusSquared(c2)
>>> op([3 + 4j, 2])
rn(2).element([ 25., 4.])
>>> deriv = op.derivative([3 + 4j, 2])
>>> deriv.domain
cn(2)
>>> deriv.range
rn(2)
>>> deriv([2 + 1j, 4j]) # [(3*2 + 4*1) / 5, (2*0 + 0*4) / 2]
rn(2).element([ 10., 0.])
Notes
-----
The derivative of the squared complex modulus
.. math::
&S: X(\mathbb{C}) \to X(\mathbb{R}), \\
&S(x) = \Re(x)^2 + \Im(x)^2,
with :math:`X(\mathbb{F}) = \mathbb{F}^n` or
:math:`L^2(\Omega, \mathbb{F})`, is given as
.. math::
&S'(x): X(\mathbb{C}) \to X(\mathbb{R}), \\
&S'(x)(y) = \Re(x)\,\Re(y) + \Im(x)\,\Im(y).
It is linear when identifying :math:`\mathbb{C}` with
:math:`\mathbb{R}^2`, but not complex-linear.
]
variable[op] assign[=] name[self]
variable[x] assign[=] call[name[self].domain.element, parameter[name[x]]]
class class[ComplexModulusSquaredDerivative, parameter[]] begin[:]
constant[Derivative of the squared complex modulus operator.]
def function[_call, parameter[self, y, out]]:
constant[Return ``self(y)``.]
call[name[x].real.multiply, parameter[name[y].real]]
<ast.AugAssign object at 0x7da18bc739d0>
return[name[out]]
def function[adjoint, parameter[self]]:
constant[Adjoint in the "C = R^2" sense.
Adjoint of the derivative:
Examples
--------
>>> c2 = odl.cn(2)
>>> op = odl.ComplexModulusSquared(c2)
>>> deriv = op.derivative([3 + 4j, 2])
>>> adj = deriv.adjoint
>>> adj.domain
rn(2)
>>> adj.range
cn(2)
>>> adj([2, 1]) # [2*(3 + 4j), 1*2]
cn(2).element([ 6.+8.j, 2.+0.j])
Adjointness only holds in the weaker sense that inner products
are the same when testing with vectors from the real space, but
not when testing complex vectors:
>>> y1 = deriv.range.element([1, 1])
>>> y2 = deriv.range.element([1, -1])
>>> adj(y1).inner(adj(y2)) # <M^* y1, M^* y2>
(21+0j)
>>> deriv(adj(y1)).inner(y2) # <M M^* y1, y2>
21.0
>>> x1 = deriv.domain.element([1j, 1j])
>>> x2 = deriv.domain.element([1 + 1j, 1j])
>>> deriv(x1).inner(deriv(x2)) # <M x1, M x2>
28.0
>>> adj(deriv(x1)).inner(x2) # <M^* M x1, x2>
(28+4j)
Notes
-----
The squared complex modulus derivative is given by
.. math::
&S'(x): X(\mathbb{C}) \to X(\mathbb{R}), \\
&S'(x)(y) = \Re(x)\,\Re(y) + \Im(x)\,\Im(y).
Thus, its adjoint can (formally) be identified as
.. math::
&S'(x)^*: X(\mathbb{R}) \to X(\mathbb{C}), \\
&S'(x)^*(u) = (\Re(x)\,u,\ \Im(x)\,u).
The operator :math:`A = S'(x)` has the weak adjointness
property
.. math::
\langle A^* y_1,\ A^* y_2 \rangle_{X(\mathbb{C})} =
\langle AA^* y_1,\ y_2 \rangle_{X(\mathbb{R})},
but in general,
.. math::
\langle A x,\ y \rangle_{X(\mathbb{R})} \neq
\langle x,\ A^* y \rangle_{X(\mathbb{C})},
in particular
.. math::
\langle A x_1,\ A x_2 \rangle_{X(\mathbb{R})} \neq
\langle A^*A x_1,\ x_2 \rangle_{X(\mathbb{C})}.
]
variable[deriv] assign[=] name[self]
class class[ComplexModulusSquaredDerivAdj, parameter[]] begin[:]
def function[_call, parameter[self, u, out]]:
constant[Implement ``self(u, out)``.]
call[name[out].assign, parameter[name[x]]]
<ast.AugAssign object at 0x7da18bc72aa0>
<ast.AugAssign object at 0x7da18bc70370>
return[name[out]]
def function[adjoint, parameter[self]]:
constant[Adjoint in the "C = R^2" sense.]
return[name[deriv]]
return[call[name[ComplexModulusSquaredDerivAdj], parameter[name[deriv].range, name[deriv].domain]]]
return[call[name[ComplexModulusSquaredDerivative], parameter[name[op].domain, name[op].range]]] | keyword[def] identifier[derivative] ( identifier[self] , identifier[x] ):
literal[string]
identifier[op] = identifier[self]
identifier[x] = identifier[self] . identifier[domain] . identifier[element] ( identifier[x] )
keyword[class] identifier[ComplexModulusSquaredDerivative] ( identifier[Operator] ):
literal[string]
keyword[def] identifier[_call] ( identifier[self] , identifier[y] , identifier[out] ):
literal[string]
identifier[x] . identifier[real] . identifier[multiply] ( identifier[y] . identifier[real] , identifier[out] = identifier[out] )
identifier[out] += identifier[x] . identifier[imag] * identifier[y] . identifier[imag]
keyword[return] identifier[out]
@ identifier[property]
keyword[def] identifier[adjoint] ( identifier[self] ):
literal[string]
identifier[deriv] = identifier[self]
keyword[class] identifier[ComplexModulusSquaredDerivAdj] ( identifier[Operator] ):
keyword[def] identifier[_call] ( identifier[self] , identifier[u] , identifier[out] ):
literal[string]
identifier[out] . identifier[assign] ( identifier[x] )
identifier[out] . identifier[real] *= identifier[u]
identifier[out] . identifier[imag] *= identifier[u]
keyword[return] identifier[out]
@ identifier[property]
keyword[def] identifier[adjoint] ( identifier[self] ):
literal[string]
keyword[return] identifier[deriv]
keyword[return] identifier[ComplexModulusSquaredDerivAdj] (
identifier[deriv] . identifier[range] , identifier[deriv] . identifier[domain] , identifier[linear] = identifier[deriv] . identifier[domain] . identifier[is_real] )
keyword[return] identifier[ComplexModulusSquaredDerivative] ( identifier[op] . identifier[domain] , identifier[op] . identifier[range] ,
identifier[linear] = identifier[op] . identifier[domain] . identifier[is_real] ) | def derivative(self, x):
"""Return the derivative operator in the "C = R^2" sense.
The returned operator (``self``) is the derivative of the
operator variant where the complex domain is reinterpreted as
a product of two real spaces.
Parameters
----------
x : `domain` element
Point in which to take the derivative.
Examples
--------
>>> c2 = odl.cn(2)
>>> op = odl.ComplexModulusSquared(c2)
>>> op([3 + 4j, 2])
rn(2).element([ 25., 4.])
>>> deriv = op.derivative([3 + 4j, 2])
>>> deriv.domain
cn(2)
>>> deriv.range
rn(2)
>>> deriv([2 + 1j, 4j]) # [(3*2 + 4*1) / 5, (2*0 + 0*4) / 2]
rn(2).element([ 10., 0.])
Notes
-----
The derivative of the squared complex modulus
.. math::
&S: X(\\mathbb{C}) \\to X(\\mathbb{R}), \\\\
&S(x) = \\Re(x)^2 + \\Im(x)^2,
with :math:`X(\\mathbb{F}) = \\mathbb{F}^n` or
:math:`L^2(\\Omega, \\mathbb{F})`, is given as
.. math::
&S'(x): X(\\mathbb{C}) \\to X(\\mathbb{R}), \\\\
&S'(x)(y) = \\Re(x)\\,\\Re(y) + \\Im(x)\\,\\Im(y).
It is linear when identifying :math:`\\mathbb{C}` with
:math:`\\mathbb{R}^2`, but not complex-linear.
"""
op = self
x = self.domain.element(x)
class ComplexModulusSquaredDerivative(Operator):
"""Derivative of the squared complex modulus operator."""
def _call(self, y, out):
"""Return ``self(y)``."""
x.real.multiply(y.real, out=out)
out += x.imag * y.imag
return out
@property
def adjoint(self):
"""Adjoint in the "C = R^2" sense.
Adjoint of the derivative:
Examples
--------
>>> c2 = odl.cn(2)
>>> op = odl.ComplexModulusSquared(c2)
>>> deriv = op.derivative([3 + 4j, 2])
>>> adj = deriv.adjoint
>>> adj.domain
rn(2)
>>> adj.range
cn(2)
>>> adj([2, 1]) # [2*(3 + 4j), 1*2]
cn(2).element([ 6.+8.j, 2.+0.j])
Adjointness only holds in the weaker sense that inner products
are the same when testing with vectors from the real space, but
not when testing complex vectors:
>>> y1 = deriv.range.element([1, 1])
>>> y2 = deriv.range.element([1, -1])
>>> adj(y1).inner(adj(y2)) # <M^* y1, M^* y2>
(21+0j)
>>> deriv(adj(y1)).inner(y2) # <M M^* y1, y2>
21.0
>>> x1 = deriv.domain.element([1j, 1j])
>>> x2 = deriv.domain.element([1 + 1j, 1j])
>>> deriv(x1).inner(deriv(x2)) # <M x1, M x2>
28.0
>>> adj(deriv(x1)).inner(x2) # <M^* M x1, x2>
(28+4j)
Notes
-----
The squared complex modulus derivative is given by
.. math::
&S'(x): X(\\mathbb{C}) \\to X(\\mathbb{R}), \\\\
&S'(x)(y) = \\Re(x)\\,\\Re(y) + \\Im(x)\\,\\Im(y).
Thus, its adjoint can (formally) be identified as
.. math::
&S'(x)^*: X(\\mathbb{R}) \\to X(\\mathbb{C}), \\\\
&S'(x)^*(u) = (\\Re(x)\\,u,\\ \\Im(x)\\,u).
The operator :math:`A = S'(x)` has the weak adjointness
property
.. math::
\\langle A^* y_1,\\ A^* y_2 \\rangle_{X(\\mathbb{C})} =
\\langle AA^* y_1,\\ y_2 \\rangle_{X(\\mathbb{R})},
but in general,
.. math::
\\langle A x,\\ y \\rangle_{X(\\mathbb{R})} \\neq
\\langle x,\\ A^* y \\rangle_{X(\\mathbb{C})},
in particular
.. math::
\\langle A x_1,\\ A x_2 \\rangle_{X(\\mathbb{R})} \\neq
\\langle A^*A x_1,\\ x_2 \\rangle_{X(\\mathbb{C})}.
"""
deriv = self
class ComplexModulusSquaredDerivAdj(Operator):
def _call(self, u, out):
"""Implement ``self(u, out)``."""
out.assign(x)
out.real *= u
out.imag *= u
return out
@property
def adjoint(self):
"""Adjoint in the "C = R^2" sense."""
return deriv
return ComplexModulusSquaredDerivAdj(deriv.range, deriv.domain, linear=deriv.domain.is_real)
return ComplexModulusSquaredDerivative(op.domain, op.range, linear=op.domain.is_real) |
def insert(self, var, value, index=None):
"""Insert at the index.
If the index is not provided appends to the end of the list.
"""
current = self.__get(var)
if not isinstance(current, list):
raise KeyError("%s: is not a list" % var)
if index is None:
current.append(value)
else:
current.insert(index, value)
if self.auto_save:
self.save() | def function[insert, parameter[self, var, value, index]]:
constant[Insert at the index.
If the index is not provided appends to the end of the list.
]
variable[current] assign[=] call[name[self].__get, parameter[name[var]]]
if <ast.UnaryOp object at 0x7da1b1142110> begin[:]
<ast.Raise object at 0x7da1b1141ab0>
if compare[name[index] is constant[None]] begin[:]
call[name[current].append, parameter[name[value]]]
if name[self].auto_save begin[:]
call[name[self].save, parameter[]] | keyword[def] identifier[insert] ( identifier[self] , identifier[var] , identifier[value] , identifier[index] = keyword[None] ):
literal[string]
identifier[current] = identifier[self] . identifier[__get] ( identifier[var] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[current] , identifier[list] ):
keyword[raise] identifier[KeyError] ( literal[string] % identifier[var] )
keyword[if] identifier[index] keyword[is] keyword[None] :
identifier[current] . identifier[append] ( identifier[value] )
keyword[else] :
identifier[current] . identifier[insert] ( identifier[index] , identifier[value] )
keyword[if] identifier[self] . identifier[auto_save] :
identifier[self] . identifier[save] () | def insert(self, var, value, index=None):
"""Insert at the index.
If the index is not provided appends to the end of the list.
"""
current = self.__get(var)
if not isinstance(current, list):
raise KeyError('%s: is not a list' % var) # depends on [control=['if'], data=[]]
if index is None:
current.append(value) # depends on [control=['if'], data=[]]
else:
current.insert(index, value)
if self.auto_save:
self.save() # depends on [control=['if'], data=[]] |
def _monitor_for_zero_connected_peers(self):
"""
Track if we lost connection to all peers.
Give some retries threshold to allow peers that are in the process of connecting or in the queue to be connected to run
"""
if len(self.Peers) == 0 and len(self.connection_queue) == 0:
if self.peer_zero_count > 2:
logger.debug("Peer count 0 exceeded max retries threshold, restarting...")
self.Restart()
else:
logger.debug(
f"Peer count is 0, allow for retries or queued connections to be established {self.peer_zero_count}")
self.peer_zero_count += 1 | def function[_monitor_for_zero_connected_peers, parameter[self]]:
constant[
Track if we lost connection to all peers.
Give some retries threshold to allow peers that are in the process of connecting or in the queue to be connected to run
]
if <ast.BoolOp object at 0x7da1b22aeef0> begin[:]
if compare[name[self].peer_zero_count greater[>] constant[2]] begin[:]
call[name[logger].debug, parameter[constant[Peer count 0 exceeded max retries threshold, restarting...]]]
call[name[self].Restart, parameter[]] | keyword[def] identifier[_monitor_for_zero_connected_peers] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[Peers] )== literal[int] keyword[and] identifier[len] ( identifier[self] . identifier[connection_queue] )== literal[int] :
keyword[if] identifier[self] . identifier[peer_zero_count] > literal[int] :
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[Restart] ()
keyword[else] :
identifier[logger] . identifier[debug] (
literal[string] )
identifier[self] . identifier[peer_zero_count] += literal[int] | def _monitor_for_zero_connected_peers(self):
"""
Track if we lost connection to all peers.
Give some retries threshold to allow peers that are in the process of connecting or in the queue to be connected to run
"""
if len(self.Peers) == 0 and len(self.connection_queue) == 0:
if self.peer_zero_count > 2:
logger.debug('Peer count 0 exceeded max retries threshold, restarting...')
self.Restart() # depends on [control=['if'], data=[]]
else:
logger.debug(f'Peer count is 0, allow for retries or queued connections to be established {self.peer_zero_count}')
self.peer_zero_count += 1 # depends on [control=['if'], data=[]] |
def computeMD5(filepath, relativepath = ""):
'''Computes an MD5 checksum.
Depending on the file size, we either run the computation in Python or spawn a subprocess.
The implementation is slower in Python than the tested OS but there is an overhead associated with the spawning.
On my one-machine test (CentOS release 5.4 final on the webserver), @2MB was where the times converged.
'''
filename = os.path.basename(filepath)
checksum = None
sz = os.path.getsize(filepath)
if sz < 2 * 1024 * 1024:
checksum = md5.new()
F = open(filepath, 'rb')
while True:
bytes = F.read(65536)
if len(bytes) == 0:
break # end of file
checksum.update(bytes)
checksum = checksum.hexdigest()
else:
p = subprocess.Popen(["md5sum", filepath], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
stdoutdata, stderrdata = p.communicate()
if stderrdata:
raise Exception(stderrdata)
stdoutdata = stdoutdata.split()
checksum = stdoutdata[0]
filename = os.path.basename(stdoutdata[1])
return "%s %s" % (checksum, os.path.join(relativepath, filename)) | def function[computeMD5, parameter[filepath, relativepath]]:
constant[Computes an MD5 checksum.
Depending on the file size, we either run the computation in Python or spawn a subprocess.
The implementation is slower in Python than the tested OS but there is an overhead associated with the spawning.
On my one-machine test (CentOS release 5.4 final on the webserver), @2MB was where the times converged.
]
variable[filename] assign[=] call[name[os].path.basename, parameter[name[filepath]]]
variable[checksum] assign[=] constant[None]
variable[sz] assign[=] call[name[os].path.getsize, parameter[name[filepath]]]
if compare[name[sz] less[<] binary_operation[binary_operation[constant[2] * constant[1024]] * constant[1024]]] begin[:]
variable[checksum] assign[=] call[name[md5].new, parameter[]]
variable[F] assign[=] call[name[open], parameter[name[filepath], constant[rb]]]
while constant[True] begin[:]
variable[bytes] assign[=] call[name[F].read, parameter[constant[65536]]]
if compare[call[name[len], parameter[name[bytes]]] equal[==] constant[0]] begin[:]
break
call[name[checksum].update, parameter[name[bytes]]]
variable[checksum] assign[=] call[name[checksum].hexdigest, parameter[]]
return[binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bc72dd0>, <ast.Call object at 0x7da18bc70c40>]]]] | keyword[def] identifier[computeMD5] ( identifier[filepath] , identifier[relativepath] = literal[string] ):
literal[string]
identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[filepath] )
identifier[checksum] = keyword[None]
identifier[sz] = identifier[os] . identifier[path] . identifier[getsize] ( identifier[filepath] )
keyword[if] identifier[sz] < literal[int] * literal[int] * literal[int] :
identifier[checksum] = identifier[md5] . identifier[new] ()
identifier[F] = identifier[open] ( identifier[filepath] , literal[string] )
keyword[while] keyword[True] :
identifier[bytes] = identifier[F] . identifier[read] ( literal[int] )
keyword[if] identifier[len] ( identifier[bytes] )== literal[int] :
keyword[break]
identifier[checksum] . identifier[update] ( identifier[bytes] )
identifier[checksum] = identifier[checksum] . identifier[hexdigest] ()
keyword[else] :
identifier[p] = identifier[subprocess] . identifier[Popen] ([ literal[string] , identifier[filepath] ], identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[PIPE] )
identifier[stdoutdata] , identifier[stderrdata] = identifier[p] . identifier[communicate] ()
keyword[if] identifier[stderrdata] :
keyword[raise] identifier[Exception] ( identifier[stderrdata] )
identifier[stdoutdata] = identifier[stdoutdata] . identifier[split] ()
identifier[checksum] = identifier[stdoutdata] [ literal[int] ]
identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[stdoutdata] [ literal[int] ])
keyword[return] literal[string] %( identifier[checksum] , identifier[os] . identifier[path] . identifier[join] ( identifier[relativepath] , identifier[filename] )) | def computeMD5(filepath, relativepath=''):
"""Computes an MD5 checksum.
Depending on the file size, we either run the computation in Python or spawn a subprocess.
The implementation is slower in Python than the tested OS but there is an overhead associated with the spawning.
On my one-machine test (CentOS release 5.4 final on the webserver), @2MB was where the times converged.
"""
filename = os.path.basename(filepath)
checksum = None
sz = os.path.getsize(filepath)
if sz < 2 * 1024 * 1024:
checksum = md5.new()
F = open(filepath, 'rb')
while True:
bytes = F.read(65536)
if len(bytes) == 0:
break # end of file # depends on [control=['if'], data=[]]
checksum.update(bytes) # depends on [control=['while'], data=[]]
checksum = checksum.hexdigest() # depends on [control=['if'], data=[]]
else:
p = subprocess.Popen(['md5sum', filepath], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdoutdata, stderrdata) = p.communicate()
if stderrdata:
raise Exception(stderrdata) # depends on [control=['if'], data=[]]
stdoutdata = stdoutdata.split()
checksum = stdoutdata[0]
filename = os.path.basename(stdoutdata[1])
return '%s %s' % (checksum, os.path.join(relativepath, filename)) |
def get_or_create_user(self, username, password):
'''
Get or create the given user
'''
# Get the groups for this user
info = self.get_ad_info(username, password)
self.debug("INFO found: {}".format(info))
# Find the user
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username)
# Update user
user.first_name = info.get('first_name', '')
user.last_name = info.get('last_name', '')
user.email = info.get('email', '')
# Check if the user is in the Administrators groups
is_admin = False
for domain in info['groups']:
if 'Domain Admins' in info['groups'][domain]:
is_admin = True
break
# Set the user permissions
user.is_staff = is_admin
user.is_superuser = is_admin
# Refresh the password
user.set_password(password)
# Validate the selected user and gotten information
user = self.validate(user, info)
if user:
self.debug("User got validated!")
# Autosave the user until this point
user.save()
# Synchronize user
self.synchronize(user, info)
else:
self.debug("User didn't pass validation!")
# Finally return user
return user | def function[get_or_create_user, parameter[self, username, password]]:
constant[
Get or create the given user
]
variable[info] assign[=] call[name[self].get_ad_info, parameter[name[username], name[password]]]
call[name[self].debug, parameter[call[constant[INFO found: {}].format, parameter[name[info]]]]]
<ast.Try object at 0x7da1b0ebfee0>
name[user].first_name assign[=] call[name[info].get, parameter[constant[first_name], constant[]]]
name[user].last_name assign[=] call[name[info].get, parameter[constant[last_name], constant[]]]
name[user].email assign[=] call[name[info].get, parameter[constant[email], constant[]]]
variable[is_admin] assign[=] constant[False]
for taget[name[domain]] in starred[call[name[info]][constant[groups]]] begin[:]
if compare[constant[Domain Admins] in call[call[name[info]][constant[groups]]][name[domain]]] begin[:]
variable[is_admin] assign[=] constant[True]
break
name[user].is_staff assign[=] name[is_admin]
name[user].is_superuser assign[=] name[is_admin]
call[name[user].set_password, parameter[name[password]]]
variable[user] assign[=] call[name[self].validate, parameter[name[user], name[info]]]
if name[user] begin[:]
call[name[self].debug, parameter[constant[User got validated!]]]
call[name[user].save, parameter[]]
call[name[self].synchronize, parameter[name[user], name[info]]]
return[name[user]] | keyword[def] identifier[get_or_create_user] ( identifier[self] , identifier[username] , identifier[password] ):
literal[string]
identifier[info] = identifier[self] . identifier[get_ad_info] ( identifier[username] , identifier[password] )
identifier[self] . identifier[debug] ( literal[string] . identifier[format] ( identifier[info] ))
keyword[try] :
identifier[user] = identifier[User] . identifier[objects] . identifier[get] ( identifier[username] = identifier[username] )
keyword[except] identifier[User] . identifier[DoesNotExist] :
identifier[user] = identifier[User] ( identifier[username] = identifier[username] )
identifier[user] . identifier[first_name] = identifier[info] . identifier[get] ( literal[string] , literal[string] )
identifier[user] . identifier[last_name] = identifier[info] . identifier[get] ( literal[string] , literal[string] )
identifier[user] . identifier[email] = identifier[info] . identifier[get] ( literal[string] , literal[string] )
identifier[is_admin] = keyword[False]
keyword[for] identifier[domain] keyword[in] identifier[info] [ literal[string] ]:
keyword[if] literal[string] keyword[in] identifier[info] [ literal[string] ][ identifier[domain] ]:
identifier[is_admin] = keyword[True]
keyword[break]
identifier[user] . identifier[is_staff] = identifier[is_admin]
identifier[user] . identifier[is_superuser] = identifier[is_admin]
identifier[user] . identifier[set_password] ( identifier[password] )
identifier[user] = identifier[self] . identifier[validate] ( identifier[user] , identifier[info] )
keyword[if] identifier[user] :
identifier[self] . identifier[debug] ( literal[string] )
identifier[user] . identifier[save] ()
identifier[self] . identifier[synchronize] ( identifier[user] , identifier[info] )
keyword[else] :
identifier[self] . identifier[debug] ( literal[string] )
keyword[return] identifier[user] | def get_or_create_user(self, username, password):
"""
Get or create the given user
"""
# Get the groups for this user
info = self.get_ad_info(username, password)
self.debug('INFO found: {}'.format(info))
# Find the user
try:
user = User.objects.get(username=username) # depends on [control=['try'], data=[]]
except User.DoesNotExist:
user = User(username=username) # depends on [control=['except'], data=[]]
# Update user
user.first_name = info.get('first_name', '')
user.last_name = info.get('last_name', '')
user.email = info.get('email', '')
# Check if the user is in the Administrators groups
is_admin = False
for domain in info['groups']:
if 'Domain Admins' in info['groups'][domain]:
is_admin = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['domain']]
# Set the user permissions
user.is_staff = is_admin
user.is_superuser = is_admin
# Refresh the password
user.set_password(password)
# Validate the selected user and gotten information
user = self.validate(user, info)
if user:
self.debug('User got validated!')
# Autosave the user until this point
user.save()
# Synchronize user
self.synchronize(user, info) # depends on [control=['if'], data=[]]
else:
self.debug("User didn't pass validation!")
# Finally return user
return user |
def to_phy(self):
"""Convert this to the standard :class:`pyparser.capture.common.PhyInfo`
class.
"""
kwargs = {}
for attr in ['signal', 'noise', 'freq_mhz', 'fcs_error', 'rate', 'mcs',
'len', 'caplen', 'epoch_ts', 'end_epoch_ts']:
kwargs[attr] = getattr(self, attr, None)
kwargs['has_fcs'] = True
return PhyInfo(**kwargs) | def function[to_phy, parameter[self]]:
constant[Convert this to the standard :class:`pyparser.capture.common.PhyInfo`
class.
]
variable[kwargs] assign[=] dictionary[[], []]
for taget[name[attr]] in starred[list[[<ast.Constant object at 0x7da204566e30>, <ast.Constant object at 0x7da204567a00>, <ast.Constant object at 0x7da204567640>, <ast.Constant object at 0x7da204564f40>, <ast.Constant object at 0x7da204566a10>, <ast.Constant object at 0x7da204565930>, <ast.Constant object at 0x7da204565630>, <ast.Constant object at 0x7da2045642e0>, <ast.Constant object at 0x7da204565690>, <ast.Constant object at 0x7da204564310>]]] begin[:]
call[name[kwargs]][name[attr]] assign[=] call[name[getattr], parameter[name[self], name[attr], constant[None]]]
call[name[kwargs]][constant[has_fcs]] assign[=] constant[True]
return[call[name[PhyInfo], parameter[]]] | keyword[def] identifier[to_phy] ( identifier[self] ):
literal[string]
identifier[kwargs] ={}
keyword[for] identifier[attr] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[kwargs] [ identifier[attr] ]= identifier[getattr] ( identifier[self] , identifier[attr] , keyword[None] )
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[return] identifier[PhyInfo] (** identifier[kwargs] ) | def to_phy(self):
"""Convert this to the standard :class:`pyparser.capture.common.PhyInfo`
class.
"""
kwargs = {}
for attr in ['signal', 'noise', 'freq_mhz', 'fcs_error', 'rate', 'mcs', 'len', 'caplen', 'epoch_ts', 'end_epoch_ts']:
kwargs[attr] = getattr(self, attr, None) # depends on [control=['for'], data=['attr']]
kwargs['has_fcs'] = True
return PhyInfo(**kwargs) |
def fail_if_not_in_zset(self, key, member, client=None):
"""
Fails with an error containing the string '<FAIL_IF_NOT_IN_ZSET>' if
the given ``member`` is not in the ZSET ``key``. This can be used in
a pipeline to assert that the member is in the ZSET and cancel the
execution otherwise.
"""
self._fail_if_not_in_zset(keys=[key], args=[member], client=client) | def function[fail_if_not_in_zset, parameter[self, key, member, client]]:
constant[
Fails with an error containing the string '<FAIL_IF_NOT_IN_ZSET>' if
the given ``member`` is not in the ZSET ``key``. This can be used in
a pipeline to assert that the member is in the ZSET and cancel the
execution otherwise.
]
call[name[self]._fail_if_not_in_zset, parameter[]] | keyword[def] identifier[fail_if_not_in_zset] ( identifier[self] , identifier[key] , identifier[member] , identifier[client] = keyword[None] ):
literal[string]
identifier[self] . identifier[_fail_if_not_in_zset] ( identifier[keys] =[ identifier[key] ], identifier[args] =[ identifier[member] ], identifier[client] = identifier[client] ) | def fail_if_not_in_zset(self, key, member, client=None):
"""
Fails with an error containing the string '<FAIL_IF_NOT_IN_ZSET>' if
the given ``member`` is not in the ZSET ``key``. This can be used in
a pipeline to assert that the member is in the ZSET and cancel the
execution otherwise.
"""
self._fail_if_not_in_zset(keys=[key], args=[member], client=client) |
def default(cls):
"Make the current foreground color the default."
wAttributes = cls._get_text_attributes()
wAttributes &= ~win32.FOREGROUND_MASK
wAttributes |= win32.FOREGROUND_GREY
wAttributes &= ~win32.FOREGROUND_INTENSITY
cls._set_text_attributes(wAttributes) | def function[default, parameter[cls]]:
constant[Make the current foreground color the default.]
variable[wAttributes] assign[=] call[name[cls]._get_text_attributes, parameter[]]
<ast.AugAssign object at 0x7da1b08da4d0>
<ast.AugAssign object at 0x7da1b08da920>
<ast.AugAssign object at 0x7da18dc07430>
call[name[cls]._set_text_attributes, parameter[name[wAttributes]]] | keyword[def] identifier[default] ( identifier[cls] ):
literal[string]
identifier[wAttributes] = identifier[cls] . identifier[_get_text_attributes] ()
identifier[wAttributes] &=~ identifier[win32] . identifier[FOREGROUND_MASK]
identifier[wAttributes] |= identifier[win32] . identifier[FOREGROUND_GREY]
identifier[wAttributes] &=~ identifier[win32] . identifier[FOREGROUND_INTENSITY]
identifier[cls] . identifier[_set_text_attributes] ( identifier[wAttributes] ) | def default(cls):
"""Make the current foreground color the default."""
wAttributes = cls._get_text_attributes()
wAttributes &= ~win32.FOREGROUND_MASK
wAttributes |= win32.FOREGROUND_GREY
wAttributes &= ~win32.FOREGROUND_INTENSITY
cls._set_text_attributes(wAttributes) |
def get_ratetimestamp(self, base, code):
"""Return rate timestamp as a datetime/date or None"""
self.get_latestcurrencyrates(base)
try:
return datetime.fromtimestamp(self.rates["timestamp"])
except KeyError:
return None | def function[get_ratetimestamp, parameter[self, base, code]]:
constant[Return rate timestamp as a datetime/date or None]
call[name[self].get_latestcurrencyrates, parameter[name[base]]]
<ast.Try object at 0x7da18f721f60> | keyword[def] identifier[get_ratetimestamp] ( identifier[self] , identifier[base] , identifier[code] ):
literal[string]
identifier[self] . identifier[get_latestcurrencyrates] ( identifier[base] )
keyword[try] :
keyword[return] identifier[datetime] . identifier[fromtimestamp] ( identifier[self] . identifier[rates] [ literal[string] ])
keyword[except] identifier[KeyError] :
keyword[return] keyword[None] | def get_ratetimestamp(self, base, code):
"""Return rate timestamp as a datetime/date or None"""
self.get_latestcurrencyrates(base)
try:
return datetime.fromtimestamp(self.rates['timestamp']) # depends on [control=['try'], data=[]]
except KeyError:
return None # depends on [control=['except'], data=[]] |
def apply_scopes(self):
"""
Get the underlying query builder instance with applied global scopes.
:type: Builder
"""
if not self._scopes:
return self
builder = copy.copy(self)
query = builder.get_query()
# We will keep track of how many wheres are on the query before running the
# scope so that we can properly group the added scope constraints in the
# query as their own isolated nested where statement and avoid issues.
original_where_count = len(query.wheres)
where_counts = [0, original_where_count]
for scope in self._scopes.values():
self._apply_scope(scope, builder)
# Again, we will keep track of the count each time we add where clauses so that
# we will properly isolate each set of scope constraints inside of their own
# nested where clause to avoid any conflicts or issues with logical order.
where_counts.append(len(query.wheres))
if self._should_nest_wheres_for_scope(query, original_where_count):
self._nest_wheres_for_scope(query, Collection(where_counts).unique().all())
return builder | def function[apply_scopes, parameter[self]]:
constant[
Get the underlying query builder instance with applied global scopes.
:type: Builder
]
if <ast.UnaryOp object at 0x7da1b17d4640> begin[:]
return[name[self]]
variable[builder] assign[=] call[name[copy].copy, parameter[name[self]]]
variable[query] assign[=] call[name[builder].get_query, parameter[]]
variable[original_where_count] assign[=] call[name[len], parameter[name[query].wheres]]
variable[where_counts] assign[=] list[[<ast.Constant object at 0x7da1b17d7a90>, <ast.Name object at 0x7da1b17d71f0>]]
for taget[name[scope]] in starred[call[name[self]._scopes.values, parameter[]]] begin[:]
call[name[self]._apply_scope, parameter[name[scope], name[builder]]]
call[name[where_counts].append, parameter[call[name[len], parameter[name[query].wheres]]]]
if call[name[self]._should_nest_wheres_for_scope, parameter[name[query], name[original_where_count]]] begin[:]
call[name[self]._nest_wheres_for_scope, parameter[name[query], call[call[call[name[Collection], parameter[name[where_counts]]].unique, parameter[]].all, parameter[]]]]
return[name[builder]] | keyword[def] identifier[apply_scopes] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_scopes] :
keyword[return] identifier[self]
identifier[builder] = identifier[copy] . identifier[copy] ( identifier[self] )
identifier[query] = identifier[builder] . identifier[get_query] ()
identifier[original_where_count] = identifier[len] ( identifier[query] . identifier[wheres] )
identifier[where_counts] =[ literal[int] , identifier[original_where_count] ]
keyword[for] identifier[scope] keyword[in] identifier[self] . identifier[_scopes] . identifier[values] ():
identifier[self] . identifier[_apply_scope] ( identifier[scope] , identifier[builder] )
identifier[where_counts] . identifier[append] ( identifier[len] ( identifier[query] . identifier[wheres] ))
keyword[if] identifier[self] . identifier[_should_nest_wheres_for_scope] ( identifier[query] , identifier[original_where_count] ):
identifier[self] . identifier[_nest_wheres_for_scope] ( identifier[query] , identifier[Collection] ( identifier[where_counts] ). identifier[unique] (). identifier[all] ())
keyword[return] identifier[builder] | def apply_scopes(self):
"""
Get the underlying query builder instance with applied global scopes.
:type: Builder
"""
if not self._scopes:
return self # depends on [control=['if'], data=[]]
builder = copy.copy(self)
query = builder.get_query()
# We will keep track of how many wheres are on the query before running the
# scope so that we can properly group the added scope constraints in the
# query as their own isolated nested where statement and avoid issues.
original_where_count = len(query.wheres)
where_counts = [0, original_where_count]
for scope in self._scopes.values():
self._apply_scope(scope, builder)
# Again, we will keep track of the count each time we add where clauses so that
# we will properly isolate each set of scope constraints inside of their own
# nested where clause to avoid any conflicts or issues with logical order.
where_counts.append(len(query.wheres)) # depends on [control=['for'], data=['scope']]
if self._should_nest_wheres_for_scope(query, original_where_count):
self._nest_wheres_for_scope(query, Collection(where_counts).unique().all()) # depends on [control=['if'], data=[]]
return builder |
def _get_environ(environ):
# type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
"""
Returns our whitelisted environment variables.
"""
keys = ["SERVER_NAME", "SERVER_PORT"]
if _should_send_default_pii():
# Add all three headers here to make debugging of proxy setup easier.
keys += ["REMOTE_ADDR", "HTTP_X_FORWARDED_FOR", "HTTP_X_REAL_IP"]
for key in keys:
if key in environ:
yield key, environ[key] | def function[_get_environ, parameter[environ]]:
constant[
Returns our whitelisted environment variables.
]
variable[keys] assign[=] list[[<ast.Constant object at 0x7da1b18a1bd0>, <ast.Constant object at 0x7da1b18a3190>]]
if call[name[_should_send_default_pii], parameter[]] begin[:]
<ast.AugAssign object at 0x7da1b18a14e0>
for taget[name[key]] in starred[name[keys]] begin[:]
if compare[name[key] in name[environ]] begin[:]
<ast.Yield object at 0x7da1b18a0550> | keyword[def] identifier[_get_environ] ( identifier[environ] ):
literal[string]
identifier[keys] =[ literal[string] , literal[string] ]
keyword[if] identifier[_should_send_default_pii] ():
identifier[keys] +=[ literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[key] keyword[in] identifier[keys] :
keyword[if] identifier[key] keyword[in] identifier[environ] :
keyword[yield] identifier[key] , identifier[environ] [ identifier[key] ] | def _get_environ(environ):
# type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
'\n Returns our whitelisted environment variables.\n '
keys = ['SERVER_NAME', 'SERVER_PORT']
if _should_send_default_pii():
# Add all three headers here to make debugging of proxy setup easier.
keys += ['REMOTE_ADDR', 'HTTP_X_FORWARDED_FOR', 'HTTP_X_REAL_IP'] # depends on [control=['if'], data=[]]
for key in keys:
if key in environ:
yield (key, environ[key]) # depends on [control=['if'], data=['key', 'environ']] # depends on [control=['for'], data=['key']] |
def show_mesh(mesh):
r"""
Visualizes the mesh of a region as obtained by ``get_mesh`` function in
the ``metrics`` submodule.
Parameters
----------
mesh : tuple
A mesh returned by ``skimage.measure.marching_cubes``
Returns
-------
fig : Matplotlib figure
A handle to a matplotlib 3D axis
"""
lim_max = sp.amax(mesh.verts, axis=0)
lim_min = sp.amin(mesh.verts, axis=0)
# Display resulting triangular mesh using Matplotlib.
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Fancy indexing: `verts[faces]` to generate a collection of triangles
mesh = Poly3DCollection(mesh.verts[mesh.faces])
mesh.set_edgecolor('k')
ax.add_collection3d(mesh)
ax.set_xlabel("x-axis")
ax.set_ylabel("y-axis")
ax.set_zlabel("z-axis")
ax.set_xlim(lim_min[0], lim_max[0])
ax.set_ylim(lim_min[1], lim_max[1])
ax.set_zlim(lim_min[2], lim_max[2])
return fig | def function[show_mesh, parameter[mesh]]:
constant[
Visualizes the mesh of a region as obtained by ``get_mesh`` function in
the ``metrics`` submodule.
Parameters
----------
mesh : tuple
A mesh returned by ``skimage.measure.marching_cubes``
Returns
-------
fig : Matplotlib figure
A handle to a matplotlib 3D axis
]
variable[lim_max] assign[=] call[name[sp].amax, parameter[name[mesh].verts]]
variable[lim_min] assign[=] call[name[sp].amin, parameter[name[mesh].verts]]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
variable[mesh] assign[=] call[name[Poly3DCollection], parameter[call[name[mesh].verts][name[mesh].faces]]]
call[name[mesh].set_edgecolor, parameter[constant[k]]]
call[name[ax].add_collection3d, parameter[name[mesh]]]
call[name[ax].set_xlabel, parameter[constant[x-axis]]]
call[name[ax].set_ylabel, parameter[constant[y-axis]]]
call[name[ax].set_zlabel, parameter[constant[z-axis]]]
call[name[ax].set_xlim, parameter[call[name[lim_min]][constant[0]], call[name[lim_max]][constant[0]]]]
call[name[ax].set_ylim, parameter[call[name[lim_min]][constant[1]], call[name[lim_max]][constant[1]]]]
call[name[ax].set_zlim, parameter[call[name[lim_min]][constant[2]], call[name[lim_max]][constant[2]]]]
return[name[fig]] | keyword[def] identifier[show_mesh] ( identifier[mesh] ):
literal[string]
identifier[lim_max] = identifier[sp] . identifier[amax] ( identifier[mesh] . identifier[verts] , identifier[axis] = literal[int] )
identifier[lim_min] = identifier[sp] . identifier[amin] ( identifier[mesh] . identifier[verts] , identifier[axis] = literal[int] )
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] , identifier[projection] = literal[string] )
identifier[mesh] = identifier[Poly3DCollection] ( identifier[mesh] . identifier[verts] [ identifier[mesh] . identifier[faces] ])
identifier[mesh] . identifier[set_edgecolor] ( literal[string] )
identifier[ax] . identifier[add_collection3d] ( identifier[mesh] )
identifier[ax] . identifier[set_xlabel] ( literal[string] )
identifier[ax] . identifier[set_ylabel] ( literal[string] )
identifier[ax] . identifier[set_zlabel] ( literal[string] )
identifier[ax] . identifier[set_xlim] ( identifier[lim_min] [ literal[int] ], identifier[lim_max] [ literal[int] ])
identifier[ax] . identifier[set_ylim] ( identifier[lim_min] [ literal[int] ], identifier[lim_max] [ literal[int] ])
identifier[ax] . identifier[set_zlim] ( identifier[lim_min] [ literal[int] ], identifier[lim_max] [ literal[int] ])
keyword[return] identifier[fig] | def show_mesh(mesh):
"""
Visualizes the mesh of a region as obtained by ``get_mesh`` function in
the ``metrics`` submodule.
Parameters
----------
mesh : tuple
A mesh returned by ``skimage.measure.marching_cubes``
Returns
-------
fig : Matplotlib figure
A handle to a matplotlib 3D axis
"""
lim_max = sp.amax(mesh.verts, axis=0)
lim_min = sp.amin(mesh.verts, axis=0)
# Display resulting triangular mesh using Matplotlib.
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Fancy indexing: `verts[faces]` to generate a collection of triangles
mesh = Poly3DCollection(mesh.verts[mesh.faces])
mesh.set_edgecolor('k')
ax.add_collection3d(mesh)
ax.set_xlabel('x-axis')
ax.set_ylabel('y-axis')
ax.set_zlabel('z-axis')
ax.set_xlim(lim_min[0], lim_max[0])
ax.set_ylim(lim_min[1], lim_max[1])
ax.set_zlim(lim_min[2], lim_max[2])
return fig |
def update_tag(self, name):
# The `_tag` is to avoid conflicts with MutableMapping.update.
"""
Update (i.e., rename) the tag
:param str name: the new name for the tag
:return: an updated `Tag` object
:rtype: Tag
:raises DOAPIError: if the API endpoint replies with an error
"""
api = self.doapi_manager
return api._tag(api.request(self.url, method='PUT',
data={"name": name})["tag"]) | def function[update_tag, parameter[self, name]]:
constant[
Update (i.e., rename) the tag
:param str name: the new name for the tag
:return: an updated `Tag` object
:rtype: Tag
:raises DOAPIError: if the API endpoint replies with an error
]
variable[api] assign[=] name[self].doapi_manager
return[call[name[api]._tag, parameter[call[call[name[api].request, parameter[name[self].url]]][constant[tag]]]]] | keyword[def] identifier[update_tag] ( identifier[self] , identifier[name] ):
literal[string]
identifier[api] = identifier[self] . identifier[doapi_manager]
keyword[return] identifier[api] . identifier[_tag] ( identifier[api] . identifier[request] ( identifier[self] . identifier[url] , identifier[method] = literal[string] ,
identifier[data] ={ literal[string] : identifier[name] })[ literal[string] ]) | def update_tag(self, name):
# The `_tag` is to avoid conflicts with MutableMapping.update.
'\n Update (i.e., rename) the tag\n\n :param str name: the new name for the tag\n :return: an updated `Tag` object\n :rtype: Tag\n :raises DOAPIError: if the API endpoint replies with an error\n '
api = self.doapi_manager
return api._tag(api.request(self.url, method='PUT', data={'name': name})['tag']) |
def _build_xpath_expr(attrs):
"""Build an xpath expression to simulate bs4's ability to pass in kwargs to
search for attributes when using the lxml parser.
Parameters
----------
attrs : dict
A dict of HTML attributes. These are NOT checked for validity.
Returns
-------
expr : unicode
An XPath expression that checks for the given HTML attributes.
"""
# give class attribute as class_ because class is a python keyword
if 'class_' in attrs:
attrs['class'] = attrs.pop('class_')
s = ["@{key}={val!r}".format(key=k, val=v) for k, v in attrs.items()]
return '[{expr}]'.format(expr=' and '.join(s)) | def function[_build_xpath_expr, parameter[attrs]]:
constant[Build an xpath expression to simulate bs4's ability to pass in kwargs to
search for attributes when using the lxml parser.
Parameters
----------
attrs : dict
A dict of HTML attributes. These are NOT checked for validity.
Returns
-------
expr : unicode
An XPath expression that checks for the given HTML attributes.
]
if compare[constant[class_] in name[attrs]] begin[:]
call[name[attrs]][constant[class]] assign[=] call[name[attrs].pop, parameter[constant[class_]]]
variable[s] assign[=] <ast.ListComp object at 0x7da18f722e30>
return[call[constant[[{expr}]].format, parameter[]]] | keyword[def] identifier[_build_xpath_expr] ( identifier[attrs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[attrs] :
identifier[attrs] [ literal[string] ]= identifier[attrs] . identifier[pop] ( literal[string] )
identifier[s] =[ literal[string] . identifier[format] ( identifier[key] = identifier[k] , identifier[val] = identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attrs] . identifier[items] ()]
keyword[return] literal[string] . identifier[format] ( identifier[expr] = literal[string] . identifier[join] ( identifier[s] )) | def _build_xpath_expr(attrs):
"""Build an xpath expression to simulate bs4's ability to pass in kwargs to
search for attributes when using the lxml parser.
Parameters
----------
attrs : dict
A dict of HTML attributes. These are NOT checked for validity.
Returns
-------
expr : unicode
An XPath expression that checks for the given HTML attributes.
"""
# give class attribute as class_ because class is a python keyword
if 'class_' in attrs:
attrs['class'] = attrs.pop('class_') # depends on [control=['if'], data=['attrs']]
s = ['@{key}={val!r}'.format(key=k, val=v) for (k, v) in attrs.items()]
return '[{expr}]'.format(expr=' and '.join(s)) |
def send_keyboard_input(text=None, key_list=None):
"""
Args:
text (None):
key_list (list):
References:
http://stackoverflow.com/questions/14788036/python-win32api-sendmesage
http://www.pinvoke.net/default.aspx/user32.sendinput
CommandLine:
python -m utool.util_cplat --test-send_keyboard_input
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_cplat import * # NOQA
>>> text = '%paste'
>>> result = send_keyboard_input('%paste')
>>> print(result)
"""
#key_mapping = {
# 'enter':
#}
if WIN32:
#raise NotImplementedError()
#import win32api
#import win32gui
#import win32con
#hwnd = win32gui.GetForegroundWindow()
#print('entering text into %r' % (win32gui.GetWindowText(hwnd ),))
#win32con.VK_RETURN
#def callback(hwnd, hwnds):
#if win32gui.IsWindowVisible(hwnd) and win32gui.IsWindowEnabled(hwnd):
#hwnds[win32gui.GetClassName(hwnd)] = hwnd
#return True
#hwnds = {}
#win32gui.EnumChildWindows(hwnd, callback, hwnds)
#for ord_char in map(ord, text):
#win32api.SendMessage(hwnd, win32con.WM_CHAR, ord_char, 0)
from utool._internal import win32_send_keys
pause = float(.05)
text = 'paste'
keys = text
kw = dict(with_spaces=False, with_tabs=True, with_newlines=False)
win32_send_keys.SendKeys(keys, pause=pause, turn_off_numlock=True, **kw)
#win32_send_keys
#import time
#keys_ = win32_send_keys.parse_keys(keys, **kw)
#for k in keys_:
# k.Run()
# time.sleep(pause)
else:
if key_list is None:
char_map = {
'%': 'shift+5'
}
key_list = [char_map.get(char, char) for char in text]
xdotool_args = ['xdotool', 'key'] + key_list
#, 'shift+5', 'p', 'a', 's', 't', 'e', 'enter']
cmd = ' '.join(xdotool_args)
print('Running: cmd=%r' % (cmd,))
print('+---')
print(cmd)
print('L___')
os.system(cmd) | def function[send_keyboard_input, parameter[text, key_list]]:
constant[
Args:
text (None):
key_list (list):
References:
http://stackoverflow.com/questions/14788036/python-win32api-sendmesage
http://www.pinvoke.net/default.aspx/user32.sendinput
CommandLine:
python -m utool.util_cplat --test-send_keyboard_input
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_cplat import * # NOQA
>>> text = '%paste'
>>> result = send_keyboard_input('%paste')
>>> print(result)
]
if name[WIN32] begin[:]
from relative_module[utool._internal] import module[win32_send_keys]
variable[pause] assign[=] call[name[float], parameter[constant[0.05]]]
variable[text] assign[=] constant[paste]
variable[keys] assign[=] name[text]
variable[kw] assign[=] call[name[dict], parameter[]]
call[name[win32_send_keys].SendKeys, parameter[name[keys]]] | keyword[def] identifier[send_keyboard_input] ( identifier[text] = keyword[None] , identifier[key_list] = keyword[None] ):
literal[string]
keyword[if] identifier[WIN32] :
keyword[from] identifier[utool] . identifier[_internal] keyword[import] identifier[win32_send_keys]
identifier[pause] = identifier[float] ( literal[int] )
identifier[text] = literal[string]
identifier[keys] = identifier[text]
identifier[kw] = identifier[dict] ( identifier[with_spaces] = keyword[False] , identifier[with_tabs] = keyword[True] , identifier[with_newlines] = keyword[False] )
identifier[win32_send_keys] . identifier[SendKeys] ( identifier[keys] , identifier[pause] = identifier[pause] , identifier[turn_off_numlock] = keyword[True] ,** identifier[kw] )
keyword[else] :
keyword[if] identifier[key_list] keyword[is] keyword[None] :
identifier[char_map] ={
literal[string] : literal[string]
}
identifier[key_list] =[ identifier[char_map] . identifier[get] ( identifier[char] , identifier[char] ) keyword[for] identifier[char] keyword[in] identifier[text] ]
identifier[xdotool_args] =[ literal[string] , literal[string] ]+ identifier[key_list]
identifier[cmd] = literal[string] . identifier[join] ( identifier[xdotool_args] )
identifier[print] ( literal[string] %( identifier[cmd] ,))
identifier[print] ( literal[string] )
identifier[print] ( identifier[cmd] )
identifier[print] ( literal[string] )
identifier[os] . identifier[system] ( identifier[cmd] ) | def send_keyboard_input(text=None, key_list=None):
"""
Args:
text (None):
key_list (list):
References:
http://stackoverflow.com/questions/14788036/python-win32api-sendmesage
http://www.pinvoke.net/default.aspx/user32.sendinput
CommandLine:
python -m utool.util_cplat --test-send_keyboard_input
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_cplat import * # NOQA
>>> text = '%paste'
>>> result = send_keyboard_input('%paste')
>>> print(result)
"""
#key_mapping = {
# 'enter':
#}
if WIN32:
#raise NotImplementedError()
#import win32api
#import win32gui
#import win32con
#hwnd = win32gui.GetForegroundWindow()
#print('entering text into %r' % (win32gui.GetWindowText(hwnd ),))
#win32con.VK_RETURN
#def callback(hwnd, hwnds):
#if win32gui.IsWindowVisible(hwnd) and win32gui.IsWindowEnabled(hwnd):
#hwnds[win32gui.GetClassName(hwnd)] = hwnd
#return True
#hwnds = {}
#win32gui.EnumChildWindows(hwnd, callback, hwnds)
#for ord_char in map(ord, text):
#win32api.SendMessage(hwnd, win32con.WM_CHAR, ord_char, 0)
from utool._internal import win32_send_keys
pause = float(0.05)
text = 'paste'
keys = text
kw = dict(with_spaces=False, with_tabs=True, with_newlines=False)
win32_send_keys.SendKeys(keys, pause=pause, turn_off_numlock=True, **kw) # depends on [control=['if'], data=[]]
else:
#win32_send_keys
#import time
#keys_ = win32_send_keys.parse_keys(keys, **kw)
#for k in keys_:
# k.Run()
# time.sleep(pause)
if key_list is None:
char_map = {'%': 'shift+5'}
key_list = [char_map.get(char, char) for char in text] # depends on [control=['if'], data=['key_list']]
xdotool_args = ['xdotool', 'key'] + key_list
#, 'shift+5', 'p', 'a', 's', 't', 'e', 'enter']
cmd = ' '.join(xdotool_args)
print('Running: cmd=%r' % (cmd,))
print('+---')
print(cmd)
print('L___')
os.system(cmd) |
def delta(self):
""" returns the time (in seconds) that the batch took, if complete """
completed_date = parse_api_datetime(self.batch["CompletedDate"])
created_date = parse_api_datetime(self.batch["CreatedDate"])
td = completed_date - created_date
return td.total_seconds() | def function[delta, parameter[self]]:
constant[ returns the time (in seconds) that the batch took, if complete ]
variable[completed_date] assign[=] call[name[parse_api_datetime], parameter[call[name[self].batch][constant[CompletedDate]]]]
variable[created_date] assign[=] call[name[parse_api_datetime], parameter[call[name[self].batch][constant[CreatedDate]]]]
variable[td] assign[=] binary_operation[name[completed_date] - name[created_date]]
return[call[name[td].total_seconds, parameter[]]] | keyword[def] identifier[delta] ( identifier[self] ):
literal[string]
identifier[completed_date] = identifier[parse_api_datetime] ( identifier[self] . identifier[batch] [ literal[string] ])
identifier[created_date] = identifier[parse_api_datetime] ( identifier[self] . identifier[batch] [ literal[string] ])
identifier[td] = identifier[completed_date] - identifier[created_date]
keyword[return] identifier[td] . identifier[total_seconds] () | def delta(self):
""" returns the time (in seconds) that the batch took, if complete """
completed_date = parse_api_datetime(self.batch['CompletedDate'])
created_date = parse_api_datetime(self.batch['CreatedDate'])
td = completed_date - created_date
return td.total_seconds() |
def replace_all(text, replace_dict):
"""
Replace multiple strings in a text.
.. note::
Replacements are made successively, without any warranty on the order \
in which they are made.
:param text: Text to replace in.
:param replace_dict: Dictionary mapping strings to replace with their \
substitution.
:returns: Text after replacements.
>>> replace_all("foo bar foo thing", {"foo": "oof", "bar": "rab"})
'oof rab oof thing'
"""
for i, j in replace_dict.items():
text = text.replace(i, j)
return text | def function[replace_all, parameter[text, replace_dict]]:
constant[
Replace multiple strings in a text.
.. note::
Replacements are made successively, without any warranty on the order in which they are made.
:param text: Text to replace in.
:param replace_dict: Dictionary mapping strings to replace with their substitution.
:returns: Text after replacements.
>>> replace_all("foo bar foo thing", {"foo": "oof", "bar": "rab"})
'oof rab oof thing'
]
for taget[tuple[[<ast.Name object at 0x7da1b2436ad0>, <ast.Name object at 0x7da1b24363b0>]]] in starred[call[name[replace_dict].items, parameter[]]] begin[:]
variable[text] assign[=] call[name[text].replace, parameter[name[i], name[j]]]
return[name[text]] | keyword[def] identifier[replace_all] ( identifier[text] , identifier[replace_dict] ):
literal[string]
keyword[for] identifier[i] , identifier[j] keyword[in] identifier[replace_dict] . identifier[items] ():
identifier[text] = identifier[text] . identifier[replace] ( identifier[i] , identifier[j] )
keyword[return] identifier[text] | def replace_all(text, replace_dict):
"""
Replace multiple strings in a text.
.. note::
Replacements are made successively, without any warranty on the order in which they are made.
:param text: Text to replace in.
:param replace_dict: Dictionary mapping strings to replace with their substitution.
:returns: Text after replacements.
>>> replace_all("foo bar foo thing", {"foo": "oof", "bar": "rab"})
'oof rab oof thing'
"""
for (i, j) in replace_dict.items():
text = text.replace(i, j) # depends on [control=['for'], data=[]]
return text |
def inv_cipher(rkey, ct, Nk=4):
"""AES decryption cipher."""
assert Nk in {4, 6, 8}
Nr = Nk + 6
rkey = rkey.reshape(4*(Nr+1), 32)
ct = ct.reshape(128)
# first round
state = add_round_key(ct, rkey[4*Nr:4*(Nr+1)])
for i in range(Nr-1, 0, -1):
state = inv_shift_rows(state)
state = inv_sub_bytes(state)
state = add_round_key(state, rkey[4*i:4*(i+1)])
state = inv_mix_columns(state)
# final round
state = inv_shift_rows(state)
state = inv_sub_bytes(state)
state = add_round_key(state, rkey[0:4])
return state | def function[inv_cipher, parameter[rkey, ct, Nk]]:
constant[AES decryption cipher.]
assert[compare[name[Nk] in <ast.Set object at 0x7da1b0d0e1a0>]]
variable[Nr] assign[=] binary_operation[name[Nk] + constant[6]]
variable[rkey] assign[=] call[name[rkey].reshape, parameter[binary_operation[constant[4] * binary_operation[name[Nr] + constant[1]]], constant[32]]]
variable[ct] assign[=] call[name[ct].reshape, parameter[constant[128]]]
variable[state] assign[=] call[name[add_round_key], parameter[name[ct], call[name[rkey]][<ast.Slice object at 0x7da1b0ebf550>]]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[Nr] - constant[1]], constant[0], <ast.UnaryOp object at 0x7da1b0ebd360>]]] begin[:]
variable[state] assign[=] call[name[inv_shift_rows], parameter[name[state]]]
variable[state] assign[=] call[name[inv_sub_bytes], parameter[name[state]]]
variable[state] assign[=] call[name[add_round_key], parameter[name[state], call[name[rkey]][<ast.Slice object at 0x7da1b0c666b0>]]]
variable[state] assign[=] call[name[inv_mix_columns], parameter[name[state]]]
variable[state] assign[=] call[name[inv_shift_rows], parameter[name[state]]]
variable[state] assign[=] call[name[inv_sub_bytes], parameter[name[state]]]
variable[state] assign[=] call[name[add_round_key], parameter[name[state], call[name[rkey]][<ast.Slice object at 0x7da1b0c66d10>]]]
return[name[state]] | keyword[def] identifier[inv_cipher] ( identifier[rkey] , identifier[ct] , identifier[Nk] = literal[int] ):
literal[string]
keyword[assert] identifier[Nk] keyword[in] { literal[int] , literal[int] , literal[int] }
identifier[Nr] = identifier[Nk] + literal[int]
identifier[rkey] = identifier[rkey] . identifier[reshape] ( literal[int] *( identifier[Nr] + literal[int] ), literal[int] )
identifier[ct] = identifier[ct] . identifier[reshape] ( literal[int] )
identifier[state] = identifier[add_round_key] ( identifier[ct] , identifier[rkey] [ literal[int] * identifier[Nr] : literal[int] *( identifier[Nr] + literal[int] )])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[Nr] - literal[int] , literal[int] ,- literal[int] ):
identifier[state] = identifier[inv_shift_rows] ( identifier[state] )
identifier[state] = identifier[inv_sub_bytes] ( identifier[state] )
identifier[state] = identifier[add_round_key] ( identifier[state] , identifier[rkey] [ literal[int] * identifier[i] : literal[int] *( identifier[i] + literal[int] )])
identifier[state] = identifier[inv_mix_columns] ( identifier[state] )
identifier[state] = identifier[inv_shift_rows] ( identifier[state] )
identifier[state] = identifier[inv_sub_bytes] ( identifier[state] )
identifier[state] = identifier[add_round_key] ( identifier[state] , identifier[rkey] [ literal[int] : literal[int] ])
keyword[return] identifier[state] | def inv_cipher(rkey, ct, Nk=4):
"""AES decryption cipher."""
assert Nk in {4, 6, 8}
Nr = Nk + 6
rkey = rkey.reshape(4 * (Nr + 1), 32)
ct = ct.reshape(128)
# first round
state = add_round_key(ct, rkey[4 * Nr:4 * (Nr + 1)])
for i in range(Nr - 1, 0, -1):
state = inv_shift_rows(state)
state = inv_sub_bytes(state)
state = add_round_key(state, rkey[4 * i:4 * (i + 1)])
state = inv_mix_columns(state) # depends on [control=['for'], data=['i']]
# final round
state = inv_shift_rows(state)
state = inv_sub_bytes(state)
state = add_round_key(state, rkey[0:4])
return state |
def StopPreviousService(self):
"""Stops the Windows service hosting the GRR process."""
StopService(
service_name=config.CONFIG["Nanny.service_name"],
service_binary_name=config.CONFIG["Nanny.service_binary_name"])
if not config.CONFIG["Client.fleetspeak_enabled"]:
return
StopService(service_name=config.CONFIG["Client.fleetspeak_service_name"])
# Delete GRR's Fleetspeak config from the registry so Fleetspeak
# doesn't try to restart GRR unless/until installation completes
# successfully.
key_path = config.CONFIG["Client.fleetspeak_unsigned_services_regkey"]
regkey = OpenRegkey(key_path)
try:
winreg.DeleteValue(regkey, config.CONFIG["Client.name"])
logging.info("Deleted value '%s' of key '%s'.",
config.CONFIG["Client.name"], key_path)
except OSError as e:
# Windows will raise a no-such-file-or-directory error if
# GRR's config hasn't been written to the registry yet.
if e.errno != errno.ENOENT:
raise | def function[StopPreviousService, parameter[self]]:
constant[Stops the Windows service hosting the GRR process.]
call[name[StopService], parameter[]]
if <ast.UnaryOp object at 0x7da1b1c1ba00> begin[:]
return[None]
call[name[StopService], parameter[]]
variable[key_path] assign[=] call[name[config].CONFIG][constant[Client.fleetspeak_unsigned_services_regkey]]
variable[regkey] assign[=] call[name[OpenRegkey], parameter[name[key_path]]]
<ast.Try object at 0x7da1b1c1ac80> | keyword[def] identifier[StopPreviousService] ( identifier[self] ):
literal[string]
identifier[StopService] (
identifier[service_name] = identifier[config] . identifier[CONFIG] [ literal[string] ],
identifier[service_binary_name] = identifier[config] . identifier[CONFIG] [ literal[string] ])
keyword[if] keyword[not] identifier[config] . identifier[CONFIG] [ literal[string] ]:
keyword[return]
identifier[StopService] ( identifier[service_name] = identifier[config] . identifier[CONFIG] [ literal[string] ])
identifier[key_path] = identifier[config] . identifier[CONFIG] [ literal[string] ]
identifier[regkey] = identifier[OpenRegkey] ( identifier[key_path] )
keyword[try] :
identifier[winreg] . identifier[DeleteValue] ( identifier[regkey] , identifier[config] . identifier[CONFIG] [ literal[string] ])
identifier[logging] . identifier[info] ( literal[string] ,
identifier[config] . identifier[CONFIG] [ literal[string] ], identifier[key_path] )
keyword[except] identifier[OSError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[errno] != identifier[errno] . identifier[ENOENT] :
keyword[raise] | def StopPreviousService(self):
"""Stops the Windows service hosting the GRR process."""
StopService(service_name=config.CONFIG['Nanny.service_name'], service_binary_name=config.CONFIG['Nanny.service_binary_name'])
if not config.CONFIG['Client.fleetspeak_enabled']:
return # depends on [control=['if'], data=[]]
StopService(service_name=config.CONFIG['Client.fleetspeak_service_name'])
# Delete GRR's Fleetspeak config from the registry so Fleetspeak
# doesn't try to restart GRR unless/until installation completes
# successfully.
key_path = config.CONFIG['Client.fleetspeak_unsigned_services_regkey']
regkey = OpenRegkey(key_path)
try:
winreg.DeleteValue(regkey, config.CONFIG['Client.name'])
logging.info("Deleted value '%s' of key '%s'.", config.CONFIG['Client.name'], key_path) # depends on [control=['try'], data=[]]
except OSError as e:
# Windows will raise a no-such-file-or-directory error if
# GRR's config hasn't been written to the registry yet.
if e.errno != errno.ENOENT:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] |
def _percent_selection(self, non_empty_slices):
"""Chooses slices at a given percentage between the first and last non-empty slice."""
return np.around(self._sampler * len(non_empty_slices) / 100).astype('int64') | def function[_percent_selection, parameter[self, non_empty_slices]]:
constant[Chooses slices at a given percentage between the first and last non-empty slice.]
return[call[call[name[np].around, parameter[binary_operation[binary_operation[name[self]._sampler * call[name[len], parameter[name[non_empty_slices]]]] / constant[100]]]].astype, parameter[constant[int64]]]] | keyword[def] identifier[_percent_selection] ( identifier[self] , identifier[non_empty_slices] ):
literal[string]
keyword[return] identifier[np] . identifier[around] ( identifier[self] . identifier[_sampler] * identifier[len] ( identifier[non_empty_slices] )/ literal[int] ). identifier[astype] ( literal[string] ) | def _percent_selection(self, non_empty_slices):
"""Chooses slices at a given percentage between the first and last non-empty slice."""
return np.around(self._sampler * len(non_empty_slices) / 100).astype('int64') |
def result(self, timeout=None):
"""
Waits up to timeout for the result the threaded job.
Returns immediately the result if the job has already been done.
:param timeout: The maximum time to wait for a result (in seconds)
:raise OSError: The timeout raised before the job finished
:raise Exception: Raises the exception that occurred executing
the method
"""
if self._done_event.wait(timeout) or self._done_event.is_set():
if self._exception is not None:
raise self._exception
return self._result
raise OSError("Timeout raised") | def function[result, parameter[self, timeout]]:
constant[
Waits up to timeout for the result the threaded job.
Returns immediately the result if the job has already been done.
:param timeout: The maximum time to wait for a result (in seconds)
:raise OSError: The timeout raised before the job finished
:raise Exception: Raises the exception that occurred executing
the method
]
if <ast.BoolOp object at 0x7da20c992fb0> begin[:]
if compare[name[self]._exception is_not constant[None]] begin[:]
<ast.Raise object at 0x7da2047e9480>
return[name[self]._result]
<ast.Raise object at 0x7da2047ea440> | keyword[def] identifier[result] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_done_event] . identifier[wait] ( identifier[timeout] ) keyword[or] identifier[self] . identifier[_done_event] . identifier[is_set] ():
keyword[if] identifier[self] . identifier[_exception] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[self] . identifier[_exception]
keyword[return] identifier[self] . identifier[_result]
keyword[raise] identifier[OSError] ( literal[string] ) | def result(self, timeout=None):
"""
Waits up to timeout for the result the threaded job.
Returns immediately the result if the job has already been done.
:param timeout: The maximum time to wait for a result (in seconds)
:raise OSError: The timeout raised before the job finished
:raise Exception: Raises the exception that occurred executing
the method
"""
if self._done_event.wait(timeout) or self._done_event.is_set():
if self._exception is not None:
raise self._exception # depends on [control=['if'], data=[]]
return self._result # depends on [control=['if'], data=[]]
raise OSError('Timeout raised') |
def objective_names(lang="en"):
"""This resource returns a list of the localized WvW objective names for
the specified language.
:param lang: The language to query the names for.
:return: A dictionary mapping the objective Ids to the names.
*Note that these are not the names displayed in the game, but rather the
abstract type.*
"""
params = {"lang": lang}
cache_name = "objective_names.%(lang)s.json" % params
data = get_cached("wvw/objective_names.json", cache_name, params=params)
return dict([(objective["id"], objective["name"]) for objective in data]) | def function[objective_names, parameter[lang]]:
constant[This resource returns a list of the localized WvW objective names for
the specified language.
:param lang: The language to query the names for.
:return: A dictionary mapping the objective Ids to the names.
*Note that these are not the names displayed in the game, but rather the
abstract type.*
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c795540>], [<ast.Name object at 0x7da20c795240>]]
variable[cache_name] assign[=] binary_operation[constant[objective_names.%(lang)s.json] <ast.Mod object at 0x7da2590d6920> name[params]]
variable[data] assign[=] call[name[get_cached], parameter[constant[wvw/objective_names.json], name[cache_name]]]
return[call[name[dict], parameter[<ast.ListComp object at 0x7da20c794be0>]]] | keyword[def] identifier[objective_names] ( identifier[lang] = literal[string] ):
literal[string]
identifier[params] ={ literal[string] : identifier[lang] }
identifier[cache_name] = literal[string] % identifier[params]
identifier[data] = identifier[get_cached] ( literal[string] , identifier[cache_name] , identifier[params] = identifier[params] )
keyword[return] identifier[dict] ([( identifier[objective] [ literal[string] ], identifier[objective] [ literal[string] ]) keyword[for] identifier[objective] keyword[in] identifier[data] ]) | def objective_names(lang='en'):
"""This resource returns a list of the localized WvW objective names for
the specified language.
:param lang: The language to query the names for.
:return: A dictionary mapping the objective Ids to the names.
*Note that these are not the names displayed in the game, but rather the
abstract type.*
"""
params = {'lang': lang}
cache_name = 'objective_names.%(lang)s.json' % params
data = get_cached('wvw/objective_names.json', cache_name, params=params)
return dict([(objective['id'], objective['name']) for objective in data]) |
def fraction_not_nans(curve):
"""
Returns the fraction of the curve extents that are good (non-nan data).
"""
return 1 - (len(np.extract(np.isnan(curve), curve)) / len(curve)) | def function[fraction_not_nans, parameter[curve]]:
constant[
Returns the fraction of the curve extents that are good (non-nan data).
]
return[binary_operation[constant[1] - binary_operation[call[name[len], parameter[call[name[np].extract, parameter[call[name[np].isnan, parameter[name[curve]]], name[curve]]]]] / call[name[len], parameter[name[curve]]]]]] | keyword[def] identifier[fraction_not_nans] ( identifier[curve] ):
literal[string]
keyword[return] literal[int] -( identifier[len] ( identifier[np] . identifier[extract] ( identifier[np] . identifier[isnan] ( identifier[curve] ), identifier[curve] ))/ identifier[len] ( identifier[curve] )) | def fraction_not_nans(curve):
"""
Returns the fraction of the curve extents that are good (non-nan data).
"""
return 1 - len(np.extract(np.isnan(curve), curve)) / len(curve) |
def status(Name,
region=None, key=None, keyid=None, profile=None):
'''
Given a trail name describe its properties.
Returns a dictionary of interesting properties.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.describe mytrail
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
trail = conn.get_trail_status(Name=Name)
if trail:
keys = ('IsLogging', 'LatestDeliveryError', 'LatestNotificationError',
'LatestDeliveryTime', 'LatestNotificationTime',
'StartLoggingTime', 'StopLoggingTime',
'LatestCloudWatchLogsDeliveryError',
'LatestCloudWatchLogsDeliveryTime',
'LatestDigestDeliveryTime', 'LatestDigestDeliveryError',
'LatestDeliveryAttemptTime',
'LatestNotificationAttemptTime',
'LatestNotificationAttemptSucceeded',
'LatestDeliveryAttemptSucceeded',
'TimeLoggingStarted',
'TimeLoggingStopped')
return {'trail': dict([(k, trail.get(k)) for k in keys])}
else:
return {'trail': None}
except ClientError as e:
err = __utils__['boto3.get_error'](e)
if e.response.get('Error', {}).get('Code') == 'TrailNotFoundException':
return {'trail': None}
return {'error': __utils__['boto3.get_error'](e)} | def function[status, parameter[Name, region, key, keyid, profile]]:
constant[
Given a trail name describe its properties.
Returns a dictionary of interesting properties.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.describe mytrail
]
<ast.Try object at 0x7da1b2044730> | keyword[def] identifier[status] ( identifier[Name] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
keyword[try] :
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
identifier[trail] = identifier[conn] . identifier[get_trail_status] ( identifier[Name] = identifier[Name] )
keyword[if] identifier[trail] :
identifier[keys] =( literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] ,
literal[string] ,
literal[string] , literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] )
keyword[return] { literal[string] : identifier[dict] ([( identifier[k] , identifier[trail] . identifier[get] ( identifier[k] )) keyword[for] identifier[k] keyword[in] identifier[keys] ])}
keyword[else] :
keyword[return] { literal[string] : keyword[None] }
keyword[except] identifier[ClientError] keyword[as] identifier[e] :
identifier[err] = identifier[__utils__] [ literal[string] ]( identifier[e] )
keyword[if] identifier[e] . identifier[response] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )== literal[string] :
keyword[return] { literal[string] : keyword[None] }
keyword[return] { literal[string] : identifier[__utils__] [ literal[string] ]( identifier[e] )} | def status(Name, region=None, key=None, keyid=None, profile=None):
"""
Given a trail name describe its properties.
Returns a dictionary of interesting properties.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.describe mytrail
"""
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
trail = conn.get_trail_status(Name=Name)
if trail:
keys = ('IsLogging', 'LatestDeliveryError', 'LatestNotificationError', 'LatestDeliveryTime', 'LatestNotificationTime', 'StartLoggingTime', 'StopLoggingTime', 'LatestCloudWatchLogsDeliveryError', 'LatestCloudWatchLogsDeliveryTime', 'LatestDigestDeliveryTime', 'LatestDigestDeliveryError', 'LatestDeliveryAttemptTime', 'LatestNotificationAttemptTime', 'LatestNotificationAttemptSucceeded', 'LatestDeliveryAttemptSucceeded', 'TimeLoggingStarted', 'TimeLoggingStopped')
return {'trail': dict([(k, trail.get(k)) for k in keys])} # depends on [control=['if'], data=[]]
else:
return {'trail': None} # depends on [control=['try'], data=[]]
except ClientError as e:
err = __utils__['boto3.get_error'](e)
if e.response.get('Error', {}).get('Code') == 'TrailNotFoundException':
return {'trail': None} # depends on [control=['if'], data=[]]
return {'error': __utils__['boto3.get_error'](e)} # depends on [control=['except'], data=['e']] |
def append_path(self, path):
"""Append :obj:`path` onto the current path.
The path may be either the return value from one of :meth:`copy_path`
or :meth:`copy_path_flat` or it may be constructed manually.
:param path:
An iterable of tuples
in the same format as returned by :meth:`copy_path`.
"""
# Both objects need to stay alive
# until after cairo.cairo_append_path() is finished, but not after.
path, _ = _encode_path(path)
cairo.cairo_append_path(self._pointer, path)
self._check_status() | def function[append_path, parameter[self, path]]:
constant[Append :obj:`path` onto the current path.
The path may be either the return value from one of :meth:`copy_path`
or :meth:`copy_path_flat` or it may be constructed manually.
:param path:
An iterable of tuples
in the same format as returned by :meth:`copy_path`.
]
<ast.Tuple object at 0x7da1b100d270> assign[=] call[name[_encode_path], parameter[name[path]]]
call[name[cairo].cairo_append_path, parameter[name[self]._pointer, name[path]]]
call[name[self]._check_status, parameter[]] | keyword[def] identifier[append_path] ( identifier[self] , identifier[path] ):
literal[string]
identifier[path] , identifier[_] = identifier[_encode_path] ( identifier[path] )
identifier[cairo] . identifier[cairo_append_path] ( identifier[self] . identifier[_pointer] , identifier[path] )
identifier[self] . identifier[_check_status] () | def append_path(self, path):
"""Append :obj:`path` onto the current path.
The path may be either the return value from one of :meth:`copy_path`
or :meth:`copy_path_flat` or it may be constructed manually.
:param path:
An iterable of tuples
in the same format as returned by :meth:`copy_path`.
"""
# Both objects need to stay alive
# until after cairo.cairo_append_path() is finished, but not after.
(path, _) = _encode_path(path)
cairo.cairo_append_path(self._pointer, path)
self._check_status() |
def _set_cngn_mon_dev(self, v, load=False):
"""
Setter method for cngn_mon_dev, mapped from YANG variable /tm_state/cngn_mon_dev (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_cngn_mon_dev is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cngn_mon_dev() directly.
YANG Description: TM discard pkt config
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=cngn_mon_dev.cngn_mon_dev, is_container='container', presence=False, yang_name="cngn-mon-dev", rest_name="cngn-mon-dev", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'sysdiag-cngn-mon-dev', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-sysdiag-operational', defining_module='brocade-sysdiag-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cngn_mon_dev must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=cngn_mon_dev.cngn_mon_dev, is_container='container', presence=False, yang_name="cngn-mon-dev", rest_name="cngn-mon-dev", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'sysdiag-cngn-mon-dev', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-sysdiag-operational', defining_module='brocade-sysdiag-operational', yang_type='container', is_config=False)""",
})
self.__cngn_mon_dev = t
if hasattr(self, '_set'):
self._set() | def function[_set_cngn_mon_dev, parameter[self, v, load]]:
constant[
Setter method for cngn_mon_dev, mapped from YANG variable /tm_state/cngn_mon_dev (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_cngn_mon_dev is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cngn_mon_dev() directly.
YANG Description: TM discard pkt config
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18dc041c0>
name[self].__cngn_mon_dev assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_cngn_mon_dev] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[cngn_mon_dev] . identifier[cngn_mon_dev] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[False] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__cngn_mon_dev] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_cngn_mon_dev(self, v, load=False):
"""
Setter method for cngn_mon_dev, mapped from YANG variable /tm_state/cngn_mon_dev (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_cngn_mon_dev is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cngn_mon_dev() directly.
YANG Description: TM discard pkt config
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=cngn_mon_dev.cngn_mon_dev, is_container='container', presence=False, yang_name='cngn-mon-dev', rest_name='cngn-mon-dev', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'sysdiag-cngn-mon-dev', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-sysdiag-operational', defining_module='brocade-sysdiag-operational', yang_type='container', is_config=False) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'cngn_mon_dev must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=cngn_mon_dev.cngn_mon_dev, is_container=\'container\', presence=False, yang_name="cngn-mon-dev", rest_name="cngn-mon-dev", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'sysdiag-cngn-mon-dev\', u\'cli-suppress-show-path\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-sysdiag-operational\', defining_module=\'brocade-sysdiag-operational\', yang_type=\'container\', is_config=False)'}) # depends on [control=['except'], data=[]]
self.__cngn_mon_dev = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def check_keypoints(keypoints, rows, cols):
"""Check if keypoints boundaries are in range [0, 1)"""
for kp in keypoints:
check_keypoint(kp, rows, cols) | def function[check_keypoints, parameter[keypoints, rows, cols]]:
constant[Check if keypoints boundaries are in range [0, 1)]
for taget[name[kp]] in starred[name[keypoints]] begin[:]
call[name[check_keypoint], parameter[name[kp], name[rows], name[cols]]] | keyword[def] identifier[check_keypoints] ( identifier[keypoints] , identifier[rows] , identifier[cols] ):
literal[string]
keyword[for] identifier[kp] keyword[in] identifier[keypoints] :
identifier[check_keypoint] ( identifier[kp] , identifier[rows] , identifier[cols] ) | def check_keypoints(keypoints, rows, cols):
"""Check if keypoints boundaries are in range [0, 1)"""
for kp in keypoints:
check_keypoint(kp, rows, cols) # depends on [control=['for'], data=['kp']] |
def collapse_segments (path):
"""Remove all redundant segments from the given URL path.
Precondition: path is an unquoted url path"""
# replace backslashes
# note: this is _against_ the specification (which would require
# backslashes to be left alone, and finally quoted with '%5C')
# But replacing has several positive effects:
# - Prevents path attacks on Windows systems (using \.. parent refs)
# - Fixes bad URLs where users used backslashes instead of slashes.
# This is a far more probable case than users having an intentional
# backslash in the path name.
path = path.replace('\\', '/')
# shrink multiple slashes to one slash
path = _slashes_ro.sub("/", path)
# collapse redundant path segments
path = _thisdir_ro.sub("", path)
path = _samedir_ro.sub("/", path)
# collapse parent path segments
# note: here we exploit the fact that the replacements happen
# to be from left to right (see also _parentdir_ro above)
newpath = _parentdir_ro.sub("/", path)
while newpath != path:
path = newpath
newpath = _parentdir_ro.sub("/", path)
# collapse parent path segments of relative paths
# (ie. without leading slash)
newpath = _relparentdir_ro.sub("", path)
while newpath != path:
path = newpath
newpath = _relparentdir_ro.sub("", path)
return path | def function[collapse_segments, parameter[path]]:
constant[Remove all redundant segments from the given URL path.
Precondition: path is an unquoted url path]
variable[path] assign[=] call[name[path].replace, parameter[constant[\], constant[/]]]
variable[path] assign[=] call[name[_slashes_ro].sub, parameter[constant[/], name[path]]]
variable[path] assign[=] call[name[_thisdir_ro].sub, parameter[constant[], name[path]]]
variable[path] assign[=] call[name[_samedir_ro].sub, parameter[constant[/], name[path]]]
variable[newpath] assign[=] call[name[_parentdir_ro].sub, parameter[constant[/], name[path]]]
while compare[name[newpath] not_equal[!=] name[path]] begin[:]
variable[path] assign[=] name[newpath]
variable[newpath] assign[=] call[name[_parentdir_ro].sub, parameter[constant[/], name[path]]]
variable[newpath] assign[=] call[name[_relparentdir_ro].sub, parameter[constant[], name[path]]]
while compare[name[newpath] not_equal[!=] name[path]] begin[:]
variable[path] assign[=] name[newpath]
variable[newpath] assign[=] call[name[_relparentdir_ro].sub, parameter[constant[], name[path]]]
return[name[path]] | keyword[def] identifier[collapse_segments] ( identifier[path] ):
literal[string]
identifier[path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] )
identifier[path] = identifier[_slashes_ro] . identifier[sub] ( literal[string] , identifier[path] )
identifier[path] = identifier[_thisdir_ro] . identifier[sub] ( literal[string] , identifier[path] )
identifier[path] = identifier[_samedir_ro] . identifier[sub] ( literal[string] , identifier[path] )
identifier[newpath] = identifier[_parentdir_ro] . identifier[sub] ( literal[string] , identifier[path] )
keyword[while] identifier[newpath] != identifier[path] :
identifier[path] = identifier[newpath]
identifier[newpath] = identifier[_parentdir_ro] . identifier[sub] ( literal[string] , identifier[path] )
identifier[newpath] = identifier[_relparentdir_ro] . identifier[sub] ( literal[string] , identifier[path] )
keyword[while] identifier[newpath] != identifier[path] :
identifier[path] = identifier[newpath]
identifier[newpath] = identifier[_relparentdir_ro] . identifier[sub] ( literal[string] , identifier[path] )
keyword[return] identifier[path] | def collapse_segments(path):
"""Remove all redundant segments from the given URL path.
Precondition: path is an unquoted url path"""
# replace backslashes
# note: this is _against_ the specification (which would require
# backslashes to be left alone, and finally quoted with '%5C')
# But replacing has several positive effects:
# - Prevents path attacks on Windows systems (using \.. parent refs)
# - Fixes bad URLs where users used backslashes instead of slashes.
# This is a far more probable case than users having an intentional
# backslash in the path name.
path = path.replace('\\', '/')
# shrink multiple slashes to one slash
path = _slashes_ro.sub('/', path)
# collapse redundant path segments
path = _thisdir_ro.sub('', path)
path = _samedir_ro.sub('/', path)
# collapse parent path segments
# note: here we exploit the fact that the replacements happen
# to be from left to right (see also _parentdir_ro above)
newpath = _parentdir_ro.sub('/', path)
while newpath != path:
path = newpath
newpath = _parentdir_ro.sub('/', path) # depends on [control=['while'], data=['newpath', 'path']]
# collapse parent path segments of relative paths
# (ie. without leading slash)
newpath = _relparentdir_ro.sub('', path)
while newpath != path:
path = newpath
newpath = _relparentdir_ro.sub('', path) # depends on [control=['while'], data=['newpath', 'path']]
return path |
def getDescriptor(self):
"""
Returns the currently active endpoint descriptor
(depending on current USB speed).
"""
result = USBEndpointDescriptor()
self._ioctl(ENDPOINT_DESC, result, True)
return result | def function[getDescriptor, parameter[self]]:
constant[
Returns the currently active endpoint descriptor
(depending on current USB speed).
]
variable[result] assign[=] call[name[USBEndpointDescriptor], parameter[]]
call[name[self]._ioctl, parameter[name[ENDPOINT_DESC], name[result], constant[True]]]
return[name[result]] | keyword[def] identifier[getDescriptor] ( identifier[self] ):
literal[string]
identifier[result] = identifier[USBEndpointDescriptor] ()
identifier[self] . identifier[_ioctl] ( identifier[ENDPOINT_DESC] , identifier[result] , keyword[True] )
keyword[return] identifier[result] | def getDescriptor(self):
"""
Returns the currently active endpoint descriptor
(depending on current USB speed).
"""
result = USBEndpointDescriptor()
self._ioctl(ENDPOINT_DESC, result, True)
return result |
def process(self, context, data):
"""
Default interface for microservices. Process the input data for
the input context.
"""
self.context = context
# Find the entityID for the SP that initiated the flow.
try:
sp_entity_id = context.state.state_dict['SATOSA_BASE']['requester']
except KeyError as err:
satosa_logging(logger, logging.ERROR, "Unable to determine the entityID for the SP requester", context.state)
return super().process(context, data)
satosa_logging(logger, logging.DEBUG, "entityID for the SP requester is {}".format(sp_entity_id), context.state)
# Get the configuration for the SP.
if sp_entity_id in self.config.keys():
config = self.config[sp_entity_id]
else:
config = self.config['default']
satosa_logging(logger, logging.DEBUG, "Using config {}".format(self._filter_config(config)), context.state)
# Ignore this SP entirely if so configured.
if config['ignore']:
satosa_logging(logger, logging.INFO, "Ignoring SP {}".format(sp_entity_id), None)
return super().process(context, data)
# The list of values for the LDAP search filters that will be tried in order to find the
# LDAP directory record for the user.
filter_values = []
# Loop over the configured list of identifiers from the IdP to consider and find
# asserted values to construct the ordered list of values for the LDAP search filters.
for candidate in config['ordered_identifier_candidates']:
value = self._construct_filter_value(candidate, data)
# If we have constructed a non empty value then add it as the next filter value
# to use when searching for the user record.
if value:
filter_values.append(value)
satosa_logging(logger, logging.DEBUG, "Added search filter value {} to list of search filters".format(value), context.state)
# Initialize an empty LDAP record. The first LDAP record found using the ordered
# list of search filter values will be the record used.
record = None
try:
connection = config['connection']
for filter_val in filter_values:
if record:
break
search_filter = '({0}={1})'.format(config['ldap_identifier_attribute'], filter_val)
satosa_logging(logger, logging.DEBUG, "Constructed search filter {}".format(search_filter), context.state)
satosa_logging(logger, logging.DEBUG, "Querying LDAP server...", context.state)
message_id = connection.search(config['search_base'], search_filter, attributes=config['search_return_attributes'].keys())
responses = connection.get_response(message_id)[0]
satosa_logging(logger, logging.DEBUG, "Done querying LDAP server", context.state)
satosa_logging(logger, logging.DEBUG, "LDAP server returned {} records".format(len(responses)), context.state)
# for now consider only the first record found (if any)
if len(responses) > 0:
if len(responses) > 1:
satosa_logging(logger, logging.WARN, "LDAP server returned {} records using search filter value {}".format(len(responses), filter_val), context.state)
record = responses[0]
break
except LDAPException as err:
satosa_logging(logger, logging.ERROR, "Caught LDAP exception: {}".format(err), context.state)
except LdapAttributeStoreError as err:
satosa_logging(logger, logging.ERROR, "Caught LDAP Attribute Store exception: {}".format(err), context.state)
except Exception as err:
satosa_logging(logger, logging.ERROR, "Caught unhandled exception: {}".format(err), context.state)
else:
err = None
finally:
if err:
return super().process(context, data)
# Before using a found record, if any, to populate attributes
# clear any attributes incoming to this microservice if so configured.
if config['clear_input_attributes']:
satosa_logging(logger, logging.DEBUG, "Clearing values for these input attributes: {}".format(data.attributes), context.state)
data.attributes = {}
# Use a found record, if any, to populate attributes and input for NameID
if record:
satosa_logging(logger, logging.DEBUG, "Using record with DN {}".format(record["dn"]), context.state)
satosa_logging(logger, logging.DEBUG, "Record with DN {} has attributes {}".format(record["dn"], record["attributes"]), context.state)
# Populate attributes as configured.
self._populate_attributes(config, record, context, data)
# Populate input for NameID if configured. SATOSA core does the hashing of input
# to create a persistent NameID.
self._populate_input_for_name_id(config, record, context, data)
else:
satosa_logging(logger, logging.WARN, "No record found in LDAP so no attributes will be added", context.state)
on_ldap_search_result_empty = config['on_ldap_search_result_empty']
if on_ldap_search_result_empty:
# Redirect to the configured URL with
# the entityIDs for the target SP and IdP used by the user
# as query string parameters (URL encoded).
encoded_sp_entity_id = urllib.parse.quote_plus(sp_entity_id)
encoded_idp_entity_id = urllib.parse.quote_plus(data.auth_info.issuer)
url = "{}?sp={}&idp={}".format(on_ldap_search_result_empty, encoded_sp_entity_id, encoded_idp_entity_id)
satosa_logging(logger, logging.INFO, "Redirecting to {}".format(url), context.state)
return Redirect(url)
satosa_logging(logger, logging.DEBUG, "Returning data.attributes {}".format(str(data.attributes)), context.state)
return super().process(context, data) | def function[process, parameter[self, context, data]]:
constant[
Default interface for microservices. Process the input data for
the input context.
]
name[self].context assign[=] name[context]
<ast.Try object at 0x7da1b157bd60>
call[name[satosa_logging], parameter[name[logger], name[logging].DEBUG, call[constant[entityID for the SP requester is {}].format, parameter[name[sp_entity_id]]], name[context].state]]
if compare[name[sp_entity_id] in call[name[self].config.keys, parameter[]]] begin[:]
variable[config] assign[=] call[name[self].config][name[sp_entity_id]]
call[name[satosa_logging], parameter[name[logger], name[logging].DEBUG, call[constant[Using config {}].format, parameter[call[name[self]._filter_config, parameter[name[config]]]]], name[context].state]]
if call[name[config]][constant[ignore]] begin[:]
call[name[satosa_logging], parameter[name[logger], name[logging].INFO, call[constant[Ignoring SP {}].format, parameter[name[sp_entity_id]]], constant[None]]]
return[call[call[name[super], parameter[]].process, parameter[name[context], name[data]]]]
variable[filter_values] assign[=] list[[]]
for taget[name[candidate]] in starred[call[name[config]][constant[ordered_identifier_candidates]]] begin[:]
variable[value] assign[=] call[name[self]._construct_filter_value, parameter[name[candidate], name[data]]]
if name[value] begin[:]
call[name[filter_values].append, parameter[name[value]]]
call[name[satosa_logging], parameter[name[logger], name[logging].DEBUG, call[constant[Added search filter value {} to list of search filters].format, parameter[name[value]]], name[context].state]]
variable[record] assign[=] constant[None]
<ast.Try object at 0x7da1b157a110>
if call[name[config]][constant[clear_input_attributes]] begin[:]
call[name[satosa_logging], parameter[name[logger], name[logging].DEBUG, call[constant[Clearing values for these input attributes: {}].format, parameter[name[data].attributes]], name[context].state]]
name[data].attributes assign[=] dictionary[[], []]
if name[record] begin[:]
call[name[satosa_logging], parameter[name[logger], name[logging].DEBUG, call[constant[Using record with DN {}].format, parameter[call[name[record]][constant[dn]]]], name[context].state]]
call[name[satosa_logging], parameter[name[logger], name[logging].DEBUG, call[constant[Record with DN {} has attributes {}].format, parameter[call[name[record]][constant[dn]], call[name[record]][constant[attributes]]]], name[context].state]]
call[name[self]._populate_attributes, parameter[name[config], name[record], name[context], name[data]]]
call[name[self]._populate_input_for_name_id, parameter[name[config], name[record], name[context], name[data]]]
call[name[satosa_logging], parameter[name[logger], name[logging].DEBUG, call[constant[Returning data.attributes {}].format, parameter[call[name[str], parameter[name[data].attributes]]]], name[context].state]]
return[call[call[name[super], parameter[]].process, parameter[name[context], name[data]]]] | keyword[def] identifier[process] ( identifier[self] , identifier[context] , identifier[data] ):
literal[string]
identifier[self] . identifier[context] = identifier[context]
keyword[try] :
identifier[sp_entity_id] = identifier[context] . identifier[state] . identifier[state_dict] [ literal[string] ][ literal[string] ]
keyword[except] identifier[KeyError] keyword[as] identifier[err] :
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[ERROR] , literal[string] , identifier[context] . identifier[state] )
keyword[return] identifier[super] (). identifier[process] ( identifier[context] , identifier[data] )
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[sp_entity_id] ), identifier[context] . identifier[state] )
keyword[if] identifier[sp_entity_id] keyword[in] identifier[self] . identifier[config] . identifier[keys] ():
identifier[config] = identifier[self] . identifier[config] [ identifier[sp_entity_id] ]
keyword[else] :
identifier[config] = identifier[self] . identifier[config] [ literal[string] ]
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[self] . identifier[_filter_config] ( identifier[config] )), identifier[context] . identifier[state] )
keyword[if] identifier[config] [ literal[string] ]:
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[INFO] , literal[string] . identifier[format] ( identifier[sp_entity_id] ), keyword[None] )
keyword[return] identifier[super] (). identifier[process] ( identifier[context] , identifier[data] )
identifier[filter_values] =[]
keyword[for] identifier[candidate] keyword[in] identifier[config] [ literal[string] ]:
identifier[value] = identifier[self] . identifier[_construct_filter_value] ( identifier[candidate] , identifier[data] )
keyword[if] identifier[value] :
identifier[filter_values] . identifier[append] ( identifier[value] )
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[value] ), identifier[context] . identifier[state] )
identifier[record] = keyword[None]
keyword[try] :
identifier[connection] = identifier[config] [ literal[string] ]
keyword[for] identifier[filter_val] keyword[in] identifier[filter_values] :
keyword[if] identifier[record] :
keyword[break]
identifier[search_filter] = literal[string] . identifier[format] ( identifier[config] [ literal[string] ], identifier[filter_val] )
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[search_filter] ), identifier[context] . identifier[state] )
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] , identifier[context] . identifier[state] )
identifier[message_id] = identifier[connection] . identifier[search] ( identifier[config] [ literal[string] ], identifier[search_filter] , identifier[attributes] = identifier[config] [ literal[string] ]. identifier[keys] ())
identifier[responses] = identifier[connection] . identifier[get_response] ( identifier[message_id] )[ literal[int] ]
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] , identifier[context] . identifier[state] )
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[len] ( identifier[responses] )), identifier[context] . identifier[state] )
keyword[if] identifier[len] ( identifier[responses] )> literal[int] :
keyword[if] identifier[len] ( identifier[responses] )> literal[int] :
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[WARN] , literal[string] . identifier[format] ( identifier[len] ( identifier[responses] ), identifier[filter_val] ), identifier[context] . identifier[state] )
identifier[record] = identifier[responses] [ literal[int] ]
keyword[break]
keyword[except] identifier[LDAPException] keyword[as] identifier[err] :
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[ERROR] , literal[string] . identifier[format] ( identifier[err] ), identifier[context] . identifier[state] )
keyword[except] identifier[LdapAttributeStoreError] keyword[as] identifier[err] :
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[ERROR] , literal[string] . identifier[format] ( identifier[err] ), identifier[context] . identifier[state] )
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[ERROR] , literal[string] . identifier[format] ( identifier[err] ), identifier[context] . identifier[state] )
keyword[else] :
identifier[err] = keyword[None]
keyword[finally] :
keyword[if] identifier[err] :
keyword[return] identifier[super] (). identifier[process] ( identifier[context] , identifier[data] )
keyword[if] identifier[config] [ literal[string] ]:
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[data] . identifier[attributes] ), identifier[context] . identifier[state] )
identifier[data] . identifier[attributes] ={}
keyword[if] identifier[record] :
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[record] [ literal[string] ]), identifier[context] . identifier[state] )
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[record] [ literal[string] ], identifier[record] [ literal[string] ]), identifier[context] . identifier[state] )
identifier[self] . identifier[_populate_attributes] ( identifier[config] , identifier[record] , identifier[context] , identifier[data] )
identifier[self] . identifier[_populate_input_for_name_id] ( identifier[config] , identifier[record] , identifier[context] , identifier[data] )
keyword[else] :
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[WARN] , literal[string] , identifier[context] . identifier[state] )
identifier[on_ldap_search_result_empty] = identifier[config] [ literal[string] ]
keyword[if] identifier[on_ldap_search_result_empty] :
identifier[encoded_sp_entity_id] = identifier[urllib] . identifier[parse] . identifier[quote_plus] ( identifier[sp_entity_id] )
identifier[encoded_idp_entity_id] = identifier[urllib] . identifier[parse] . identifier[quote_plus] ( identifier[data] . identifier[auth_info] . identifier[issuer] )
identifier[url] = literal[string] . identifier[format] ( identifier[on_ldap_search_result_empty] , identifier[encoded_sp_entity_id] , identifier[encoded_idp_entity_id] )
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[INFO] , literal[string] . identifier[format] ( identifier[url] ), identifier[context] . identifier[state] )
keyword[return] identifier[Redirect] ( identifier[url] )
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[str] ( identifier[data] . identifier[attributes] )), identifier[context] . identifier[state] )
keyword[return] identifier[super] (). identifier[process] ( identifier[context] , identifier[data] ) | def process(self, context, data):
"""
Default interface for microservices. Process the input data for
the input context.
"""
self.context = context
# Find the entityID for the SP that initiated the flow.
try:
sp_entity_id = context.state.state_dict['SATOSA_BASE']['requester'] # depends on [control=['try'], data=[]]
except KeyError as err:
satosa_logging(logger, logging.ERROR, 'Unable to determine the entityID for the SP requester', context.state)
return super().process(context, data) # depends on [control=['except'], data=[]]
satosa_logging(logger, logging.DEBUG, 'entityID for the SP requester is {}'.format(sp_entity_id), context.state)
# Get the configuration for the SP.
if sp_entity_id in self.config.keys():
config = self.config[sp_entity_id] # depends on [control=['if'], data=['sp_entity_id']]
else:
config = self.config['default']
satosa_logging(logger, logging.DEBUG, 'Using config {}'.format(self._filter_config(config)), context.state)
# Ignore this SP entirely if so configured.
if config['ignore']:
satosa_logging(logger, logging.INFO, 'Ignoring SP {}'.format(sp_entity_id), None)
return super().process(context, data) # depends on [control=['if'], data=[]]
# The list of values for the LDAP search filters that will be tried in order to find the
# LDAP directory record for the user.
filter_values = []
# Loop over the configured list of identifiers from the IdP to consider and find
# asserted values to construct the ordered list of values for the LDAP search filters.
for candidate in config['ordered_identifier_candidates']:
value = self._construct_filter_value(candidate, data)
# If we have constructed a non empty value then add it as the next filter value
# to use when searching for the user record.
if value:
filter_values.append(value)
satosa_logging(logger, logging.DEBUG, 'Added search filter value {} to list of search filters'.format(value), context.state) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['candidate']]
# Initialize an empty LDAP record. The first LDAP record found using the ordered
# list of search filter values will be the record used.
record = None
try:
connection = config['connection']
for filter_val in filter_values:
if record:
break # depends on [control=['if'], data=[]]
search_filter = '({0}={1})'.format(config['ldap_identifier_attribute'], filter_val)
satosa_logging(logger, logging.DEBUG, 'Constructed search filter {}'.format(search_filter), context.state)
satosa_logging(logger, logging.DEBUG, 'Querying LDAP server...', context.state)
message_id = connection.search(config['search_base'], search_filter, attributes=config['search_return_attributes'].keys())
responses = connection.get_response(message_id)[0]
satosa_logging(logger, logging.DEBUG, 'Done querying LDAP server', context.state)
satosa_logging(logger, logging.DEBUG, 'LDAP server returned {} records'.format(len(responses)), context.state)
# for now consider only the first record found (if any)
if len(responses) > 0:
if len(responses) > 1:
satosa_logging(logger, logging.WARN, 'LDAP server returned {} records using search filter value {}'.format(len(responses), filter_val), context.state) # depends on [control=['if'], data=[]]
record = responses[0]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filter_val']] # depends on [control=['try'], data=[]]
except LDAPException as err:
satosa_logging(logger, logging.ERROR, 'Caught LDAP exception: {}'.format(err), context.state) # depends on [control=['except'], data=['err']]
except LdapAttributeStoreError as err:
satosa_logging(logger, logging.ERROR, 'Caught LDAP Attribute Store exception: {}'.format(err), context.state) # depends on [control=['except'], data=['err']]
except Exception as err:
satosa_logging(logger, logging.ERROR, 'Caught unhandled exception: {}'.format(err), context.state) # depends on [control=['except'], data=['err']]
else:
err = None
finally:
if err:
return super().process(context, data) # depends on [control=['if'], data=[]]
# Before using a found record, if any, to populate attributes
# clear any attributes incoming to this microservice if so configured.
if config['clear_input_attributes']:
satosa_logging(logger, logging.DEBUG, 'Clearing values for these input attributes: {}'.format(data.attributes), context.state)
data.attributes = {} # depends on [control=['if'], data=[]]
# Use a found record, if any, to populate attributes and input for NameID
if record:
satosa_logging(logger, logging.DEBUG, 'Using record with DN {}'.format(record['dn']), context.state)
satosa_logging(logger, logging.DEBUG, 'Record with DN {} has attributes {}'.format(record['dn'], record['attributes']), context.state)
# Populate attributes as configured.
self._populate_attributes(config, record, context, data)
# Populate input for NameID if configured. SATOSA core does the hashing of input
# to create a persistent NameID.
self._populate_input_for_name_id(config, record, context, data) # depends on [control=['if'], data=[]]
else:
satosa_logging(logger, logging.WARN, 'No record found in LDAP so no attributes will be added', context.state)
on_ldap_search_result_empty = config['on_ldap_search_result_empty']
if on_ldap_search_result_empty:
# Redirect to the configured URL with
# the entityIDs for the target SP and IdP used by the user
# as query string parameters (URL encoded).
encoded_sp_entity_id = urllib.parse.quote_plus(sp_entity_id)
encoded_idp_entity_id = urllib.parse.quote_plus(data.auth_info.issuer)
url = '{}?sp={}&idp={}'.format(on_ldap_search_result_empty, encoded_sp_entity_id, encoded_idp_entity_id)
satosa_logging(logger, logging.INFO, 'Redirecting to {}'.format(url), context.state)
return Redirect(url) # depends on [control=['if'], data=[]]
satosa_logging(logger, logging.DEBUG, 'Returning data.attributes {}'.format(str(data.attributes)), context.state)
return super().process(context, data) |
def confidence_interval_hazard_(self):
"""
The confidence interval of the hazard.
"""
return self._compute_confidence_bounds_of_transform(self._hazard, self.alpha, self._ci_labels) | def function[confidence_interval_hazard_, parameter[self]]:
constant[
The confidence interval of the hazard.
]
return[call[name[self]._compute_confidence_bounds_of_transform, parameter[name[self]._hazard, name[self].alpha, name[self]._ci_labels]]] | keyword[def] identifier[confidence_interval_hazard_] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[_compute_confidence_bounds_of_transform] ( identifier[self] . identifier[_hazard] , identifier[self] . identifier[alpha] , identifier[self] . identifier[_ci_labels] ) | def confidence_interval_hazard_(self):
"""
The confidence interval of the hazard.
"""
return self._compute_confidence_bounds_of_transform(self._hazard, self.alpha, self._ci_labels) |
def cmd(send, msg, args):
"""Gets the weather.
Syntax: {command} <[--date (date)] [--hour (hour)] (location)|--set (default)>
Powered by Weather Underground, www.wunderground.com
"""
apikey = args['config']['api']['weatherapikey']
parser = arguments.ArgParser(args['config'])
parser.add_argument('--date', action=arguments.DateParser)
parser.add_argument('--hour', type=int)
parser.add_argument('--set', action='store_true')
parser.add_argument('string', nargs='*')
try:
cmdargs = parser.parse_args(msg)
except arguments.ArgumentException as e:
send(str(e))
return
if isinstance(cmdargs.string, list):
cmdargs.string = " ".join(cmdargs.string)
if cmdargs.set:
set_default(args['nick'], cmdargs.string, args['db'], send, apikey)
return
if cmdargs.hour is not None and cmdargs.hour > 23:
send("Invalid Hour")
cmdargs.hour = None
nick = args['nick'] if args['name'] == 'weather' else '`bjones'
if not cmdargs.string:
cmdargs.string = get_default(nick, args['db'], send, args['config'], args['source'])
if cmdargs.hour is not None:
get_hourly(cmdargs, send, apikey)
elif cmdargs.date:
get_forecast(cmdargs, send, apikey)
else:
get_weather(cmdargs, send, apikey) | def function[cmd, parameter[send, msg, args]]:
constant[Gets the weather.
Syntax: {command} <[--date (date)] [--hour (hour)] (location)|--set (default)>
Powered by Weather Underground, www.wunderground.com
]
variable[apikey] assign[=] call[call[call[name[args]][constant[config]]][constant[api]]][constant[weatherapikey]]
variable[parser] assign[=] call[name[arguments].ArgParser, parameter[call[name[args]][constant[config]]]]
call[name[parser].add_argument, parameter[constant[--date]]]
call[name[parser].add_argument, parameter[constant[--hour]]]
call[name[parser].add_argument, parameter[constant[--set]]]
call[name[parser].add_argument, parameter[constant[string]]]
<ast.Try object at 0x7da1b20f9420>
if call[name[isinstance], parameter[name[cmdargs].string, name[list]]] begin[:]
name[cmdargs].string assign[=] call[constant[ ].join, parameter[name[cmdargs].string]]
if name[cmdargs].set begin[:]
call[name[set_default], parameter[call[name[args]][constant[nick]], name[cmdargs].string, call[name[args]][constant[db]], name[send], name[apikey]]]
return[None]
if <ast.BoolOp object at 0x7da1b1ff86d0> begin[:]
call[name[send], parameter[constant[Invalid Hour]]]
name[cmdargs].hour assign[=] constant[None]
variable[nick] assign[=] <ast.IfExp object at 0x7da1b1ff8a90>
if <ast.UnaryOp object at 0x7da1b1ff8ca0> begin[:]
name[cmdargs].string assign[=] call[name[get_default], parameter[name[nick], call[name[args]][constant[db]], name[send], call[name[args]][constant[config]], call[name[args]][constant[source]]]]
if compare[name[cmdargs].hour is_not constant[None]] begin[:]
call[name[get_hourly], parameter[name[cmdargs], name[send], name[apikey]]] | keyword[def] identifier[cmd] ( identifier[send] , identifier[msg] , identifier[args] ):
literal[string]
identifier[apikey] = identifier[args] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[parser] = identifier[arguments] . identifier[ArgParser] ( identifier[args] [ literal[string] ])
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = identifier[arguments] . identifier[DateParser] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] )
keyword[try] :
identifier[cmdargs] = identifier[parser] . identifier[parse_args] ( identifier[msg] )
keyword[except] identifier[arguments] . identifier[ArgumentException] keyword[as] identifier[e] :
identifier[send] ( identifier[str] ( identifier[e] ))
keyword[return]
keyword[if] identifier[isinstance] ( identifier[cmdargs] . identifier[string] , identifier[list] ):
identifier[cmdargs] . identifier[string] = literal[string] . identifier[join] ( identifier[cmdargs] . identifier[string] )
keyword[if] identifier[cmdargs] . identifier[set] :
identifier[set_default] ( identifier[args] [ literal[string] ], identifier[cmdargs] . identifier[string] , identifier[args] [ literal[string] ], identifier[send] , identifier[apikey] )
keyword[return]
keyword[if] identifier[cmdargs] . identifier[hour] keyword[is] keyword[not] keyword[None] keyword[and] identifier[cmdargs] . identifier[hour] > literal[int] :
identifier[send] ( literal[string] )
identifier[cmdargs] . identifier[hour] = keyword[None]
identifier[nick] = identifier[args] [ literal[string] ] keyword[if] identifier[args] [ literal[string] ]== literal[string] keyword[else] literal[string]
keyword[if] keyword[not] identifier[cmdargs] . identifier[string] :
identifier[cmdargs] . identifier[string] = identifier[get_default] ( identifier[nick] , identifier[args] [ literal[string] ], identifier[send] , identifier[args] [ literal[string] ], identifier[args] [ literal[string] ])
keyword[if] identifier[cmdargs] . identifier[hour] keyword[is] keyword[not] keyword[None] :
identifier[get_hourly] ( identifier[cmdargs] , identifier[send] , identifier[apikey] )
keyword[elif] identifier[cmdargs] . identifier[date] :
identifier[get_forecast] ( identifier[cmdargs] , identifier[send] , identifier[apikey] )
keyword[else] :
identifier[get_weather] ( identifier[cmdargs] , identifier[send] , identifier[apikey] ) | def cmd(send, msg, args):
"""Gets the weather.
Syntax: {command} <[--date (date)] [--hour (hour)] (location)|--set (default)>
Powered by Weather Underground, www.wunderground.com
"""
apikey = args['config']['api']['weatherapikey']
parser = arguments.ArgParser(args['config'])
parser.add_argument('--date', action=arguments.DateParser)
parser.add_argument('--hour', type=int)
parser.add_argument('--set', action='store_true')
parser.add_argument('string', nargs='*')
try:
cmdargs = parser.parse_args(msg) # depends on [control=['try'], data=[]]
except arguments.ArgumentException as e:
send(str(e))
return # depends on [control=['except'], data=['e']]
if isinstance(cmdargs.string, list):
cmdargs.string = ' '.join(cmdargs.string) # depends on [control=['if'], data=[]]
if cmdargs.set:
set_default(args['nick'], cmdargs.string, args['db'], send, apikey)
return # depends on [control=['if'], data=[]]
if cmdargs.hour is not None and cmdargs.hour > 23:
send('Invalid Hour')
cmdargs.hour = None # depends on [control=['if'], data=[]]
nick = args['nick'] if args['name'] == 'weather' else '`bjones'
if not cmdargs.string:
cmdargs.string = get_default(nick, args['db'], send, args['config'], args['source']) # depends on [control=['if'], data=[]]
if cmdargs.hour is not None:
get_hourly(cmdargs, send, apikey) # depends on [control=['if'], data=[]]
elif cmdargs.date:
get_forecast(cmdargs, send, apikey) # depends on [control=['if'], data=[]]
else:
get_weather(cmdargs, send, apikey) |
def fit_cmd(argv=sys.argv[1:]): # pragma: no cover
"""\
Fit a model and save to database.
Will use 'dataset_loader_train', 'model', and 'model_perister' from
the configuration file, to load a dataset to train a model with, and
persist it.
Usage:
pld-fit [options]
Options:
-n --no-save Don't persist the fitted model to disk.
--no-activate Don't activate the fitted model.
--save-if-better-than=<k> Persist only if test score better than given
value.
-e --evaluate Evaluate fitted model on train and test set and
print out results.
-h --help Show this screen.
"""
arguments = docopt(fit_cmd.__doc__, argv=argv)
no_save = arguments['--no-save']
no_activate = arguments['--no-activate']
save_if_better_than = arguments['--save-if-better-than']
evaluate = arguments['--evaluate'] or bool(save_if_better_than)
if save_if_better_than is not None:
save_if_better_than = float(save_if_better_than)
initialize_config(__mode__='fit')
fit(
persist=not no_save,
activate=not no_activate,
evaluate=evaluate,
persist_if_better_than=save_if_better_than,
) | def function[fit_cmd, parameter[argv]]:
constant[Fit a model and save to database.
Will use 'dataset_loader_train', 'model', and 'model_perister' from
the configuration file, to load a dataset to train a model with, and
persist it.
Usage:
pld-fit [options]
Options:
-n --no-save Don't persist the fitted model to disk.
--no-activate Don't activate the fitted model.
--save-if-better-than=<k> Persist only if test score better than given
value.
-e --evaluate Evaluate fitted model on train and test set and
print out results.
-h --help Show this screen.
]
variable[arguments] assign[=] call[name[docopt], parameter[name[fit_cmd].__doc__]]
variable[no_save] assign[=] call[name[arguments]][constant[--no-save]]
variable[no_activate] assign[=] call[name[arguments]][constant[--no-activate]]
variable[save_if_better_than] assign[=] call[name[arguments]][constant[--save-if-better-than]]
variable[evaluate] assign[=] <ast.BoolOp object at 0x7da20c796b00>
if compare[name[save_if_better_than] is_not constant[None]] begin[:]
variable[save_if_better_than] assign[=] call[name[float], parameter[name[save_if_better_than]]]
call[name[initialize_config], parameter[]]
call[name[fit], parameter[]] | keyword[def] identifier[fit_cmd] ( identifier[argv] = identifier[sys] . identifier[argv] [ literal[int] :]):
literal[string]
identifier[arguments] = identifier[docopt] ( identifier[fit_cmd] . identifier[__doc__] , identifier[argv] = identifier[argv] )
identifier[no_save] = identifier[arguments] [ literal[string] ]
identifier[no_activate] = identifier[arguments] [ literal[string] ]
identifier[save_if_better_than] = identifier[arguments] [ literal[string] ]
identifier[evaluate] = identifier[arguments] [ literal[string] ] keyword[or] identifier[bool] ( identifier[save_if_better_than] )
keyword[if] identifier[save_if_better_than] keyword[is] keyword[not] keyword[None] :
identifier[save_if_better_than] = identifier[float] ( identifier[save_if_better_than] )
identifier[initialize_config] ( identifier[__mode__] = literal[string] )
identifier[fit] (
identifier[persist] = keyword[not] identifier[no_save] ,
identifier[activate] = keyword[not] identifier[no_activate] ,
identifier[evaluate] = identifier[evaluate] ,
identifier[persist_if_better_than] = identifier[save_if_better_than] ,
) | def fit_cmd(argv=sys.argv[1:]): # pragma: no cover
"Fit a model and save to database.\n\nWill use 'dataset_loader_train', 'model', and 'model_perister' from\nthe configuration file, to load a dataset to train a model with, and\npersist it.\n\nUsage:\n pld-fit [options]\n\nOptions:\n -n --no-save Don't persist the fitted model to disk.\n\n --no-activate Don't activate the fitted model.\n\n --save-if-better-than=<k> Persist only if test score better than given\n value.\n\n -e --evaluate Evaluate fitted model on train and test set and\n print out results.\n\n -h --help Show this screen.\n"
arguments = docopt(fit_cmd.__doc__, argv=argv)
no_save = arguments['--no-save']
no_activate = arguments['--no-activate']
save_if_better_than = arguments['--save-if-better-than']
evaluate = arguments['--evaluate'] or bool(save_if_better_than)
if save_if_better_than is not None:
save_if_better_than = float(save_if_better_than) # depends on [control=['if'], data=['save_if_better_than']]
initialize_config(__mode__='fit')
fit(persist=not no_save, activate=not no_activate, evaluate=evaluate, persist_if_better_than=save_if_better_than) |
def _pool_connect(self, agg):
""" `agg` should be (host, port)
Returns a live connection from the connection pool
"""
return self._pool.connect(agg[0], agg[1], self._user, self._password, self._database) | def function[_pool_connect, parameter[self, agg]]:
constant[ `agg` should be (host, port)
Returns a live connection from the connection pool
]
return[call[name[self]._pool.connect, parameter[call[name[agg]][constant[0]], call[name[agg]][constant[1]], name[self]._user, name[self]._password, name[self]._database]]] | keyword[def] identifier[_pool_connect] ( identifier[self] , identifier[agg] ):
literal[string]
keyword[return] identifier[self] . identifier[_pool] . identifier[connect] ( identifier[agg] [ literal[int] ], identifier[agg] [ literal[int] ], identifier[self] . identifier[_user] , identifier[self] . identifier[_password] , identifier[self] . identifier[_database] ) | def _pool_connect(self, agg):
""" `agg` should be (host, port)
Returns a live connection from the connection pool
"""
return self._pool.connect(agg[0], agg[1], self._user, self._password, self._database) |
def bot_has_role(item):
"""Similar to :func:`.has_role` except checks if the bot itself has the
role.
This check raises one of two special exceptions, :exc:`.BotMissingRole` if the bot
is missing the role, or :exc:`.NoPrivateMessage` if it is used in a private message.
Both inherit from :exc:`.CheckFailure`.
.. versionchanged:: 1.1.0
Raise :exc:`.BotMissingRole` or :exc:`.NoPrivateMessage`
instead of generic :exc:`.CheckFailure`
"""
def predicate(ctx):
ch = ctx.channel
if not isinstance(ch, discord.abc.GuildChannel):
raise NoPrivateMessage()
me = ch.guild.me
if isinstance(item, int):
role = discord.utils.get(me.roles, id=item)
else:
role = discord.utils.get(me.roles, name=item)
if role is None:
raise BotMissingRole(item)
return True
return check(predicate) | def function[bot_has_role, parameter[item]]:
constant[Similar to :func:`.has_role` except checks if the bot itself has the
role.
This check raises one of two special exceptions, :exc:`.BotMissingRole` if the bot
is missing the role, or :exc:`.NoPrivateMessage` if it is used in a private message.
Both inherit from :exc:`.CheckFailure`.
.. versionchanged:: 1.1.0
Raise :exc:`.BotMissingRole` or :exc:`.NoPrivateMessage`
instead of generic :exc:`.CheckFailure`
]
def function[predicate, parameter[ctx]]:
variable[ch] assign[=] name[ctx].channel
if <ast.UnaryOp object at 0x7da1b1eb9480> begin[:]
<ast.Raise object at 0x7da1b1eb9900>
variable[me] assign[=] name[ch].guild.me
if call[name[isinstance], parameter[name[item], name[int]]] begin[:]
variable[role] assign[=] call[name[discord].utils.get, parameter[name[me].roles]]
if compare[name[role] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1ff2320>
return[constant[True]]
return[call[name[check], parameter[name[predicate]]]] | keyword[def] identifier[bot_has_role] ( identifier[item] ):
literal[string]
keyword[def] identifier[predicate] ( identifier[ctx] ):
identifier[ch] = identifier[ctx] . identifier[channel]
keyword[if] keyword[not] identifier[isinstance] ( identifier[ch] , identifier[discord] . identifier[abc] . identifier[GuildChannel] ):
keyword[raise] identifier[NoPrivateMessage] ()
identifier[me] = identifier[ch] . identifier[guild] . identifier[me]
keyword[if] identifier[isinstance] ( identifier[item] , identifier[int] ):
identifier[role] = identifier[discord] . identifier[utils] . identifier[get] ( identifier[me] . identifier[roles] , identifier[id] = identifier[item] )
keyword[else] :
identifier[role] = identifier[discord] . identifier[utils] . identifier[get] ( identifier[me] . identifier[roles] , identifier[name] = identifier[item] )
keyword[if] identifier[role] keyword[is] keyword[None] :
keyword[raise] identifier[BotMissingRole] ( identifier[item] )
keyword[return] keyword[True]
keyword[return] identifier[check] ( identifier[predicate] ) | def bot_has_role(item):
"""Similar to :func:`.has_role` except checks if the bot itself has the
role.
This check raises one of two special exceptions, :exc:`.BotMissingRole` if the bot
is missing the role, or :exc:`.NoPrivateMessage` if it is used in a private message.
Both inherit from :exc:`.CheckFailure`.
.. versionchanged:: 1.1.0
Raise :exc:`.BotMissingRole` or :exc:`.NoPrivateMessage`
instead of generic :exc:`.CheckFailure`
"""
def predicate(ctx):
ch = ctx.channel
if not isinstance(ch, discord.abc.GuildChannel):
raise NoPrivateMessage() # depends on [control=['if'], data=[]]
me = ch.guild.me
if isinstance(item, int):
role = discord.utils.get(me.roles, id=item) # depends on [control=['if'], data=[]]
else:
role = discord.utils.get(me.roles, name=item)
if role is None:
raise BotMissingRole(item) # depends on [control=['if'], data=[]]
return True
return check(predicate) |
def delete_upload_id(cls, tables: I2B2Tables, upload_id: int) -> int:
"""
Delete all observation_fact records with the supplied upload_id
:param tables: i2b2 sql connection
:param upload_id: upload identifier to remove
:return: number or records that were deleted
"""
return cls._delete_upload_id(tables.crc_connection, tables.observation_fact, upload_id) | def function[delete_upload_id, parameter[cls, tables, upload_id]]:
constant[
Delete all observation_fact records with the supplied upload_id
:param tables: i2b2 sql connection
:param upload_id: upload identifier to remove
:return: number or records that were deleted
]
return[call[name[cls]._delete_upload_id, parameter[name[tables].crc_connection, name[tables].observation_fact, name[upload_id]]]] | keyword[def] identifier[delete_upload_id] ( identifier[cls] , identifier[tables] : identifier[I2B2Tables] , identifier[upload_id] : identifier[int] )-> identifier[int] :
literal[string]
keyword[return] identifier[cls] . identifier[_delete_upload_id] ( identifier[tables] . identifier[crc_connection] , identifier[tables] . identifier[observation_fact] , identifier[upload_id] ) | def delete_upload_id(cls, tables: I2B2Tables, upload_id: int) -> int:
"""
Delete all observation_fact records with the supplied upload_id
:param tables: i2b2 sql connection
:param upload_id: upload identifier to remove
:return: number or records that were deleted
"""
return cls._delete_upload_id(tables.crc_connection, tables.observation_fact, upload_id) |
def _interleave(a, b):
"""Interleave arrays a and b; b may have multiple columns and must be
shorter by 1.
"""
b = np.column_stack([b]) # Turn b into a column array.
nx, ny = b.shape
c = np.zeros((nx + 1, ny + 1))
c[:, 0] = a
c[:-1, 1:] = b
return c.ravel()[:-(c.shape[1] - 1)] | def function[_interleave, parameter[a, b]]:
constant[Interleave arrays a and b; b may have multiple columns and must be
shorter by 1.
]
variable[b] assign[=] call[name[np].column_stack, parameter[list[[<ast.Name object at 0x7da20e9571c0>]]]]
<ast.Tuple object at 0x7da20e956500> assign[=] name[b].shape
variable[c] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da20e9559c0>, <ast.BinOp object at 0x7da20e955ae0>]]]]
call[name[c]][tuple[[<ast.Slice object at 0x7da1b0e15b70>, <ast.Constant object at 0x7da1b0e15840>]]] assign[=] name[a]
call[name[c]][tuple[[<ast.Slice object at 0x7da1b0e155a0>, <ast.Slice object at 0x7da1b0e14ac0>]]] assign[=] name[b]
return[call[call[name[c].ravel, parameter[]]][<ast.Slice object at 0x7da1b0e17130>]] | keyword[def] identifier[_interleave] ( identifier[a] , identifier[b] ):
literal[string]
identifier[b] = identifier[np] . identifier[column_stack] ([ identifier[b] ])
identifier[nx] , identifier[ny] = identifier[b] . identifier[shape]
identifier[c] = identifier[np] . identifier[zeros] (( identifier[nx] + literal[int] , identifier[ny] + literal[int] ))
identifier[c] [:, literal[int] ]= identifier[a]
identifier[c] [:- literal[int] , literal[int] :]= identifier[b]
keyword[return] identifier[c] . identifier[ravel] ()[:-( identifier[c] . identifier[shape] [ literal[int] ]- literal[int] )] | def _interleave(a, b):
"""Interleave arrays a and b; b may have multiple columns and must be
shorter by 1.
"""
b = np.column_stack([b]) # Turn b into a column array.
(nx, ny) = b.shape
c = np.zeros((nx + 1, ny + 1))
c[:, 0] = a
c[:-1, 1:] = b
return c.ravel()[:-(c.shape[1] - 1)] |
def create(cls, propertyfile, allow_unknown):
"""
Create a Property instance by attempting to parse the given property file.
@param propertyfile: A file name of a property file
@param allow_unknown: Whether to accept unknown properties
"""
with open(propertyfile) as f:
content = f.read().strip()
# parse content for known properties
is_svcomp = False
known_properties = []
only_known_svcomp_property = True
if content == 'OBSERVER AUTOMATON' or content == 'SATISFIABLE':
known_properties = [_PROPERTY_NAMES[content]]
elif content.startswith('CHECK'):
is_svcomp = True
for line in filter(None, content.splitlines()):
if content.startswith('CHECK'):
# SV-COMP property, either a well-known one or a new one
props_in_line = [
prop for (substring, prop) in _PROPERTY_NAMES.items() if substring in line]
if len(props_in_line) == 1:
known_properties.append(props_in_line[0])
else:
only_known_svcomp_property = False
else:
# not actually an SV-COMP property file
is_svcomp = False
known_properties = []
break
# check if some known property content was found
subproperties = None
if only_known_svcomp_property and len(known_properties) == 1:
is_well_known = True
name = known_properties[0]
elif only_known_svcomp_property and set(known_properties) == _MEMSAFETY_SUBPROPERTIES:
is_well_known = True
name = _PROP_MEMSAFETY
subproperties = list(known_properties)
else:
if not allow_unknown:
raise BenchExecException(
'File "{0}" does not contain a known property.'.format(propertyfile))
is_well_known = False
name = os.path.splitext(os.path.basename(propertyfile))[0]
return cls(propertyfile, is_well_known, is_svcomp, name, subproperties) | def function[create, parameter[cls, propertyfile, allow_unknown]]:
constant[
Create a Property instance by attempting to parse the given property file.
@param propertyfile: A file name of a property file
@param allow_unknown: Whether to accept unknown properties
]
with call[name[open], parameter[name[propertyfile]]] begin[:]
variable[content] assign[=] call[call[name[f].read, parameter[]].strip, parameter[]]
variable[is_svcomp] assign[=] constant[False]
variable[known_properties] assign[=] list[[]]
variable[only_known_svcomp_property] assign[=] constant[True]
if <ast.BoolOp object at 0x7da18bcc8f70> begin[:]
variable[known_properties] assign[=] list[[<ast.Subscript object at 0x7da18bcc8d00>]]
variable[subproperties] assign[=] constant[None]
if <ast.BoolOp object at 0x7da18bccb940> begin[:]
variable[is_well_known] assign[=] constant[True]
variable[name] assign[=] call[name[known_properties]][constant[0]]
return[call[name[cls], parameter[name[propertyfile], name[is_well_known], name[is_svcomp], name[name], name[subproperties]]]] | keyword[def] identifier[create] ( identifier[cls] , identifier[propertyfile] , identifier[allow_unknown] ):
literal[string]
keyword[with] identifier[open] ( identifier[propertyfile] ) keyword[as] identifier[f] :
identifier[content] = identifier[f] . identifier[read] (). identifier[strip] ()
identifier[is_svcomp] = keyword[False]
identifier[known_properties] =[]
identifier[only_known_svcomp_property] = keyword[True]
keyword[if] identifier[content] == literal[string] keyword[or] identifier[content] == literal[string] :
identifier[known_properties] =[ identifier[_PROPERTY_NAMES] [ identifier[content] ]]
keyword[elif] identifier[content] . identifier[startswith] ( literal[string] ):
identifier[is_svcomp] = keyword[True]
keyword[for] identifier[line] keyword[in] identifier[filter] ( keyword[None] , identifier[content] . identifier[splitlines] ()):
keyword[if] identifier[content] . identifier[startswith] ( literal[string] ):
identifier[props_in_line] =[
identifier[prop] keyword[for] ( identifier[substring] , identifier[prop] ) keyword[in] identifier[_PROPERTY_NAMES] . identifier[items] () keyword[if] identifier[substring] keyword[in] identifier[line] ]
keyword[if] identifier[len] ( identifier[props_in_line] )== literal[int] :
identifier[known_properties] . identifier[append] ( identifier[props_in_line] [ literal[int] ])
keyword[else] :
identifier[only_known_svcomp_property] = keyword[False]
keyword[else] :
identifier[is_svcomp] = keyword[False]
identifier[known_properties] =[]
keyword[break]
identifier[subproperties] = keyword[None]
keyword[if] identifier[only_known_svcomp_property] keyword[and] identifier[len] ( identifier[known_properties] )== literal[int] :
identifier[is_well_known] = keyword[True]
identifier[name] = identifier[known_properties] [ literal[int] ]
keyword[elif] identifier[only_known_svcomp_property] keyword[and] identifier[set] ( identifier[known_properties] )== identifier[_MEMSAFETY_SUBPROPERTIES] :
identifier[is_well_known] = keyword[True]
identifier[name] = identifier[_PROP_MEMSAFETY]
identifier[subproperties] = identifier[list] ( identifier[known_properties] )
keyword[else] :
keyword[if] keyword[not] identifier[allow_unknown] :
keyword[raise] identifier[BenchExecException] (
literal[string] . identifier[format] ( identifier[propertyfile] ))
identifier[is_well_known] = keyword[False]
identifier[name] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[propertyfile] ))[ literal[int] ]
keyword[return] identifier[cls] ( identifier[propertyfile] , identifier[is_well_known] , identifier[is_svcomp] , identifier[name] , identifier[subproperties] ) | def create(cls, propertyfile, allow_unknown):
"""
Create a Property instance by attempting to parse the given property file.
@param propertyfile: A file name of a property file
@param allow_unknown: Whether to accept unknown properties
"""
with open(propertyfile) as f:
content = f.read().strip() # depends on [control=['with'], data=['f']]
# parse content for known properties
is_svcomp = False
known_properties = []
only_known_svcomp_property = True
if content == 'OBSERVER AUTOMATON' or content == 'SATISFIABLE':
known_properties = [_PROPERTY_NAMES[content]] # depends on [control=['if'], data=[]]
elif content.startswith('CHECK'):
is_svcomp = True
for line in filter(None, content.splitlines()):
if content.startswith('CHECK'):
# SV-COMP property, either a well-known one or a new one
props_in_line = [prop for (substring, prop) in _PROPERTY_NAMES.items() if substring in line]
if len(props_in_line) == 1:
known_properties.append(props_in_line[0]) # depends on [control=['if'], data=[]]
else:
only_known_svcomp_property = False # depends on [control=['if'], data=[]]
else:
# not actually an SV-COMP property file
is_svcomp = False
known_properties = []
break # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]]
# check if some known property content was found
subproperties = None
if only_known_svcomp_property and len(known_properties) == 1:
is_well_known = True
name = known_properties[0] # depends on [control=['if'], data=[]]
elif only_known_svcomp_property and set(known_properties) == _MEMSAFETY_SUBPROPERTIES:
is_well_known = True
name = _PROP_MEMSAFETY
subproperties = list(known_properties) # depends on [control=['if'], data=[]]
else:
if not allow_unknown:
raise BenchExecException('File "{0}" does not contain a known property.'.format(propertyfile)) # depends on [control=['if'], data=[]]
is_well_known = False
name = os.path.splitext(os.path.basename(propertyfile))[0]
return cls(propertyfile, is_well_known, is_svcomp, name, subproperties) |
def create(self, unique_name=values.unset, friendly_name=values.unset,
data_enabled=values.unset, data_limit=values.unset,
data_metering=values.unset, messaging_enabled=values.unset,
voice_enabled=values.unset, national_roaming_enabled=values.unset,
international_roaming=values.unset,
national_roaming_data_limit=values.unset,
international_roaming_data_limit=values.unset):
"""
Create a new RatePlanInstance
:param unicode unique_name: A user-provided string that uniquely identifies this resource as an alternative to the Sid.
:param unicode friendly_name: A user-provided string that identifies this resource.
:param bool data_enabled: Defines whether SIMs are capable of using GPRS/3G/LTE data connectivity.
:param unicode data_limit: Network-enforced limit specifying the total Megabytes of data usage allowed during one month on the home network.
:param unicode data_metering: The model by which to meter data usage, in accordance with the two available data metering models.
:param bool messaging_enabled: Defines whether SIMs are capable of making and sending and receiving SMS messages via either Commands or Programmable SMS APIs.
:param bool voice_enabled: Defines whether SIMs are capable of making and receiving voice calls.
:param bool national_roaming_enabled: Defines whether SIMs can roam onto other networks in the SIM's home country.
:param unicode international_roaming: The international_roaming
:param unicode national_roaming_data_limit: Network-enforced limit specifying the total Megabytes of national roaming data usage allowed during one month.
:param unicode international_roaming_data_limit: The international_roaming_data_limit
:returns: Newly created RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
"""
data = values.of({
'UniqueName': unique_name,
'FriendlyName': friendly_name,
'DataEnabled': data_enabled,
'DataLimit': data_limit,
'DataMetering': data_metering,
'MessagingEnabled': messaging_enabled,
'VoiceEnabled': voice_enabled,
'NationalRoamingEnabled': national_roaming_enabled,
'InternationalRoaming': serialize.map(international_roaming, lambda e: e),
'NationalRoamingDataLimit': national_roaming_data_limit,
'InternationalRoamingDataLimit': international_roaming_data_limit,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return RatePlanInstance(self._version, payload, ) | def function[create, parameter[self, unique_name, friendly_name, data_enabled, data_limit, data_metering, messaging_enabled, voice_enabled, national_roaming_enabled, international_roaming, national_roaming_data_limit, international_roaming_data_limit]]:
constant[
Create a new RatePlanInstance
:param unicode unique_name: A user-provided string that uniquely identifies this resource as an alternative to the Sid.
:param unicode friendly_name: A user-provided string that identifies this resource.
:param bool data_enabled: Defines whether SIMs are capable of using GPRS/3G/LTE data connectivity.
:param unicode data_limit: Network-enforced limit specifying the total Megabytes of data usage allowed during one month on the home network.
:param unicode data_metering: The model by which to meter data usage, in accordance with the two available data metering models.
:param bool messaging_enabled: Defines whether SIMs are capable of making and sending and receiving SMS messages via either Commands or Programmable SMS APIs.
:param bool voice_enabled: Defines whether SIMs are capable of making and receiving voice calls.
:param bool national_roaming_enabled: Defines whether SIMs can roam onto other networks in the SIM's home country.
:param unicode international_roaming: The international_roaming
:param unicode national_roaming_data_limit: Network-enforced limit specifying the total Megabytes of national roaming data usage allowed during one month.
:param unicode international_roaming_data_limit: The international_roaming_data_limit
:returns: Newly created RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
]
variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da20c993040>, <ast.Constant object at 0x7da20c992710>, <ast.Constant object at 0x7da20c990220>, <ast.Constant object at 0x7da20c990040>, <ast.Constant object at 0x7da20c9932e0>, <ast.Constant object at 0x7da20c990760>, <ast.Constant object at 0x7da20c992a40>, <ast.Constant object at 0x7da20c993cd0>, <ast.Constant object at 0x7da20c991390>, <ast.Constant object at 0x7da20c9914b0>, <ast.Constant object at 0x7da20c990370>], [<ast.Name object at 0x7da20c990ac0>, <ast.Name object at 0x7da20c993850>, <ast.Name object at 0x7da20c991690>, <ast.Name object at 0x7da20c992950>, <ast.Name object at 0x7da20c993340>, <ast.Name object at 0x7da20c990a90>, <ast.Name object at 0x7da20c993dc0>, <ast.Name object at 0x7da20c992bc0>, <ast.Call object at 0x7da20c990790>, <ast.Name object at 0x7da20c9924a0>, <ast.Name object at 0x7da20c9925f0>]]]]
variable[payload] assign[=] call[name[self]._version.create, parameter[constant[POST], name[self]._uri]]
return[call[name[RatePlanInstance], parameter[name[self]._version, name[payload]]]] | keyword[def] identifier[create] ( identifier[self] , identifier[unique_name] = identifier[values] . identifier[unset] , identifier[friendly_name] = identifier[values] . identifier[unset] ,
identifier[data_enabled] = identifier[values] . identifier[unset] , identifier[data_limit] = identifier[values] . identifier[unset] ,
identifier[data_metering] = identifier[values] . identifier[unset] , identifier[messaging_enabled] = identifier[values] . identifier[unset] ,
identifier[voice_enabled] = identifier[values] . identifier[unset] , identifier[national_roaming_enabled] = identifier[values] . identifier[unset] ,
identifier[international_roaming] = identifier[values] . identifier[unset] ,
identifier[national_roaming_data_limit] = identifier[values] . identifier[unset] ,
identifier[international_roaming_data_limit] = identifier[values] . identifier[unset] ):
literal[string]
identifier[data] = identifier[values] . identifier[of] ({
literal[string] : identifier[unique_name] ,
literal[string] : identifier[friendly_name] ,
literal[string] : identifier[data_enabled] ,
literal[string] : identifier[data_limit] ,
literal[string] : identifier[data_metering] ,
literal[string] : identifier[messaging_enabled] ,
literal[string] : identifier[voice_enabled] ,
literal[string] : identifier[national_roaming_enabled] ,
literal[string] : identifier[serialize] . identifier[map] ( identifier[international_roaming] , keyword[lambda] identifier[e] : identifier[e] ),
literal[string] : identifier[national_roaming_data_limit] ,
literal[string] : identifier[international_roaming_data_limit] ,
})
identifier[payload] = identifier[self] . identifier[_version] . identifier[create] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[data] = identifier[data] ,
)
keyword[return] identifier[RatePlanInstance] ( identifier[self] . identifier[_version] , identifier[payload] ,) | def create(self, unique_name=values.unset, friendly_name=values.unset, data_enabled=values.unset, data_limit=values.unset, data_metering=values.unset, messaging_enabled=values.unset, voice_enabled=values.unset, national_roaming_enabled=values.unset, international_roaming=values.unset, national_roaming_data_limit=values.unset, international_roaming_data_limit=values.unset):
"""
Create a new RatePlanInstance
:param unicode unique_name: A user-provided string that uniquely identifies this resource as an alternative to the Sid.
:param unicode friendly_name: A user-provided string that identifies this resource.
:param bool data_enabled: Defines whether SIMs are capable of using GPRS/3G/LTE data connectivity.
:param unicode data_limit: Network-enforced limit specifying the total Megabytes of data usage allowed during one month on the home network.
:param unicode data_metering: The model by which to meter data usage, in accordance with the two available data metering models.
:param bool messaging_enabled: Defines whether SIMs are capable of making and sending and receiving SMS messages via either Commands or Programmable SMS APIs.
:param bool voice_enabled: Defines whether SIMs are capable of making and receiving voice calls.
:param bool national_roaming_enabled: Defines whether SIMs can roam onto other networks in the SIM's home country.
:param unicode international_roaming: The international_roaming
:param unicode national_roaming_data_limit: Network-enforced limit specifying the total Megabytes of national roaming data usage allowed during one month.
:param unicode international_roaming_data_limit: The international_roaming_data_limit
:returns: Newly created RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
"""
data = values.of({'UniqueName': unique_name, 'FriendlyName': friendly_name, 'DataEnabled': data_enabled, 'DataLimit': data_limit, 'DataMetering': data_metering, 'MessagingEnabled': messaging_enabled, 'VoiceEnabled': voice_enabled, 'NationalRoamingEnabled': national_roaming_enabled, 'InternationalRoaming': serialize.map(international_roaming, lambda e: e), 'NationalRoamingDataLimit': national_roaming_data_limit, 'InternationalRoamingDataLimit': international_roaming_data_limit})
payload = self._version.create('POST', self._uri, data=data)
return RatePlanInstance(self._version, payload) |
def sqliteRowsToDicts(sqliteRows):
"""
Unpacks sqlite rows as returned by fetchall
into an array of simple dicts.
:param sqliteRows: array of rows returned from fetchall DB call
:return: array of dicts, keyed by the column names.
"""
return map(lambda r: dict(zip(r.keys(), r)), sqliteRows) | def function[sqliteRowsToDicts, parameter[sqliteRows]]:
constant[
Unpacks sqlite rows as returned by fetchall
into an array of simple dicts.
:param sqliteRows: array of rows returned from fetchall DB call
:return: array of dicts, keyed by the column names.
]
return[call[name[map], parameter[<ast.Lambda object at 0x7da1b26afdf0>, name[sqliteRows]]]] | keyword[def] identifier[sqliteRowsToDicts] ( identifier[sqliteRows] ):
literal[string]
keyword[return] identifier[map] ( keyword[lambda] identifier[r] : identifier[dict] ( identifier[zip] ( identifier[r] . identifier[keys] (), identifier[r] )), identifier[sqliteRows] ) | def sqliteRowsToDicts(sqliteRows):
"""
Unpacks sqlite rows as returned by fetchall
into an array of simple dicts.
:param sqliteRows: array of rows returned from fetchall DB call
:return: array of dicts, keyed by the column names.
"""
return map(lambda r: dict(zip(r.keys(), r)), sqliteRows) |
def _validate_authority_uri_abs_path(host, path):
"""Ensure that path in URL with authority starts with a leading slash.
Raise ValueError if not.
"""
if len(host) > 0 and len(path) > 0 and not path.startswith("/"):
raise ValueError(
"Path in a URL with authority " "should start with a slash ('/') if set"
) | def function[_validate_authority_uri_abs_path, parameter[host, path]]:
constant[Ensure that path in URL with authority starts with a leading slash.
Raise ValueError if not.
]
if <ast.BoolOp object at 0x7da18bc72b90> begin[:]
<ast.Raise object at 0x7da18bc70eb0> | keyword[def] identifier[_validate_authority_uri_abs_path] ( identifier[host] , identifier[path] ):
literal[string]
keyword[if] identifier[len] ( identifier[host] )> literal[int] keyword[and] identifier[len] ( identifier[path] )> literal[int] keyword[and] keyword[not] identifier[path] . identifier[startswith] ( literal[string] ):
keyword[raise] identifier[ValueError] (
literal[string] literal[string]
) | def _validate_authority_uri_abs_path(host, path):
"""Ensure that path in URL with authority starts with a leading slash.
Raise ValueError if not.
"""
if len(host) > 0 and len(path) > 0 and (not path.startswith('/')):
raise ValueError("Path in a URL with authority should start with a slash ('/') if set") # depends on [control=['if'], data=[]] |
def repr_part(self):
"""String usable in a space's ``__repr__`` method."""
optargs = [('weighting', array_str(self.array, nprint=10), ''),
('exponent', self.exponent, 2.0)]
return signature_string([], optargs, sep=',\n',
mod=[[], ['!s', ':.4']]) | def function[repr_part, parameter[self]]:
constant[String usable in a space's ``__repr__`` method.]
variable[optargs] assign[=] list[[<ast.Tuple object at 0x7da1b1eed720>, <ast.Tuple object at 0x7da1b1eed750>]]
return[call[name[signature_string], parameter[list[[]], name[optargs]]]] | keyword[def] identifier[repr_part] ( identifier[self] ):
literal[string]
identifier[optargs] =[( literal[string] , identifier[array_str] ( identifier[self] . identifier[array] , identifier[nprint] = literal[int] ), literal[string] ),
( literal[string] , identifier[self] . identifier[exponent] , literal[int] )]
keyword[return] identifier[signature_string] ([], identifier[optargs] , identifier[sep] = literal[string] ,
identifier[mod] =[[],[ literal[string] , literal[string] ]]) | def repr_part(self):
"""String usable in a space's ``__repr__`` method."""
optargs = [('weighting', array_str(self.array, nprint=10), ''), ('exponent', self.exponent, 2.0)]
return signature_string([], optargs, sep=',\n', mod=[[], ['!s', ':.4']]) |
def _elect_source_replication_group(
self,
over_replicated_rgs,
partition,
):
"""Decide source replication-group based as group with highest replica
count.
"""
return max(
over_replicated_rgs,
key=lambda rg: rg.count_replica(partition),
) | def function[_elect_source_replication_group, parameter[self, over_replicated_rgs, partition]]:
constant[Decide source replication-group based as group with highest replica
count.
]
return[call[name[max], parameter[name[over_replicated_rgs]]]] | keyword[def] identifier[_elect_source_replication_group] (
identifier[self] ,
identifier[over_replicated_rgs] ,
identifier[partition] ,
):
literal[string]
keyword[return] identifier[max] (
identifier[over_replicated_rgs] ,
identifier[key] = keyword[lambda] identifier[rg] : identifier[rg] . identifier[count_replica] ( identifier[partition] ),
) | def _elect_source_replication_group(self, over_replicated_rgs, partition):
"""Decide source replication-group based as group with highest replica
count.
"""
return max(over_replicated_rgs, key=lambda rg: rg.count_replica(partition)) |
def _expand_directories(paths):
"""Expand directory with all files it contains."""
for path in paths:
path_ = Path(path)
if path_.is_dir():
for expanded in path_.rglob('*'):
yield str(expanded)
else:
yield path | def function[_expand_directories, parameter[paths]]:
constant[Expand directory with all files it contains.]
for taget[name[path]] in starred[name[paths]] begin[:]
variable[path_] assign[=] call[name[Path], parameter[name[path]]]
if call[name[path_].is_dir, parameter[]] begin[:]
for taget[name[expanded]] in starred[call[name[path_].rglob, parameter[constant[*]]]] begin[:]
<ast.Yield object at 0x7da20c6a8a30> | keyword[def] identifier[_expand_directories] ( identifier[paths] ):
literal[string]
keyword[for] identifier[path] keyword[in] identifier[paths] :
identifier[path_] = identifier[Path] ( identifier[path] )
keyword[if] identifier[path_] . identifier[is_dir] ():
keyword[for] identifier[expanded] keyword[in] identifier[path_] . identifier[rglob] ( literal[string] ):
keyword[yield] identifier[str] ( identifier[expanded] )
keyword[else] :
keyword[yield] identifier[path] | def _expand_directories(paths):
"""Expand directory with all files it contains."""
for path in paths:
path_ = Path(path)
if path_.is_dir():
for expanded in path_.rglob('*'):
yield str(expanded) # depends on [control=['for'], data=['expanded']] # depends on [control=['if'], data=[]]
else:
yield path # depends on [control=['for'], data=['path']] |
def forces(self, sets_of_forces):
"""Set forces in displacement dataset.
Parameters
----------
sets_of_forces : array_like
A set of atomic forces in displaced supercells. The order of
displaced supercells has to match with that in displacement
dataset.
shape=(displaced supercells, atoms in supercell, 3), dtype='double'
[[[f_1x, f_1y, f_1z], [f_2x, f_2y, f_2z], ...], # first supercell
[[f_1x, f_1y, f_1z], [f_2x, f_2y, f_2z], ...], # second supercell
...
]
"""
if 'first_atoms' in self._displacement_dataset:
for disp, forces in zip(self._displacement_dataset['first_atoms'],
sets_of_forces):
disp['forces'] = forces
elif 'forces' in self._displacement_dataset:
forces = np.array(sets_of_forces, dtype='double', order='C')
self._displacement_dataset['forces'] = forces | def function[forces, parameter[self, sets_of_forces]]:
constant[Set forces in displacement dataset.
Parameters
----------
sets_of_forces : array_like
A set of atomic forces in displaced supercells. The order of
displaced supercells has to match with that in displacement
dataset.
shape=(displaced supercells, atoms in supercell, 3), dtype='double'
[[[f_1x, f_1y, f_1z], [f_2x, f_2y, f_2z], ...], # first supercell
[[f_1x, f_1y, f_1z], [f_2x, f_2y, f_2z], ...], # second supercell
...
]
]
if compare[constant[first_atoms] in name[self]._displacement_dataset] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20cabcd60>, <ast.Name object at 0x7da18fe925c0>]]] in starred[call[name[zip], parameter[call[name[self]._displacement_dataset][constant[first_atoms]], name[sets_of_forces]]]] begin[:]
call[name[disp]][constant[forces]] assign[=] name[forces] | keyword[def] identifier[forces] ( identifier[self] , identifier[sets_of_forces] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_displacement_dataset] :
keyword[for] identifier[disp] , identifier[forces] keyword[in] identifier[zip] ( identifier[self] . identifier[_displacement_dataset] [ literal[string] ],
identifier[sets_of_forces] ):
identifier[disp] [ literal[string] ]= identifier[forces]
keyword[elif] literal[string] keyword[in] identifier[self] . identifier[_displacement_dataset] :
identifier[forces] = identifier[np] . identifier[array] ( identifier[sets_of_forces] , identifier[dtype] = literal[string] , identifier[order] = literal[string] )
identifier[self] . identifier[_displacement_dataset] [ literal[string] ]= identifier[forces] | def forces(self, sets_of_forces):
"""Set forces in displacement dataset.
Parameters
----------
sets_of_forces : array_like
A set of atomic forces in displaced supercells. The order of
displaced supercells has to match with that in displacement
dataset.
shape=(displaced supercells, atoms in supercell, 3), dtype='double'
[[[f_1x, f_1y, f_1z], [f_2x, f_2y, f_2z], ...], # first supercell
[[f_1x, f_1y, f_1z], [f_2x, f_2y, f_2z], ...], # second supercell
...
]
"""
if 'first_atoms' in self._displacement_dataset:
for (disp, forces) in zip(self._displacement_dataset['first_atoms'], sets_of_forces):
disp['forces'] = forces # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif 'forces' in self._displacement_dataset:
forces = np.array(sets_of_forces, dtype='double', order='C')
self._displacement_dataset['forces'] = forces # depends on [control=['if'], data=[]] |
def print_paths(self):
"""Returns a string suitable for printing of the search path"""
# Uses a list to get the order right
ret = []
for i in self._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret) | def function[print_paths, parameter[self]]:
constant[Returns a string suitable for printing of the search path]
variable[ret] assign[=] list[[]]
for taget[name[i]] in starred[call[name[self]._get_paths, parameter[]]] begin[:]
if compare[name[i] <ast.NotIn object at 0x7da2590d7190> name[ret]] begin[:]
call[name[ret].append, parameter[name[i]]]
return[call[name[os].pathsep.join, parameter[name[ret]]]] | keyword[def] identifier[print_paths] ( identifier[self] ):
literal[string]
identifier[ret] =[]
keyword[for] identifier[i] keyword[in] identifier[self] . identifier[_get_paths] ():
keyword[if] identifier[i] keyword[not] keyword[in] identifier[ret] :
identifier[ret] . identifier[append] ( identifier[i] )
keyword[return] identifier[os] . identifier[pathsep] . identifier[join] ( identifier[ret] ) | def print_paths(self):
"""Returns a string suitable for printing of the search path"""
# Uses a list to get the order right
ret = []
for i in self._get_paths():
if i not in ret:
ret.append(i) # depends on [control=['if'], data=['i', 'ret']] # depends on [control=['for'], data=['i']]
return os.pathsep.join(ret) |
def delta_e_cie2000(lab_color_vector, lab_color_matrix, Kl=1, Kc=1, Kh=1):
"""
Calculates the Delta E (CIE2000) of two colors.
"""
L, a, b = lab_color_vector
avg_Lp = (L + lab_color_matrix[:, 0]) / 2.0
C1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
avg_C1_C2 = (C1 + C2) / 2.0
G = 0.5 * (1 - numpy.sqrt(numpy.power(avg_C1_C2, 7.0) / (numpy.power(avg_C1_C2, 7.0) + numpy.power(25.0, 7.0))))
a1p = (1.0 + G) * a
a2p = (1.0 + G) * lab_color_matrix[:, 1]
C1p = numpy.sqrt(numpy.power(a1p, 2) + numpy.power(b, 2))
C2p = numpy.sqrt(numpy.power(a2p, 2) + numpy.power(lab_color_matrix[:, 2], 2))
avg_C1p_C2p = (C1p + C2p) / 2.0
h1p = numpy.degrees(numpy.arctan2(b, a1p))
h1p += (h1p < 0) * 360
h2p = numpy.degrees(numpy.arctan2(lab_color_matrix[:, 2], a2p))
h2p += (h2p < 0) * 360
avg_Hp = (((numpy.fabs(h1p - h2p) > 180) * 360) + h1p + h2p) / 2.0
T = 1 - 0.17 * numpy.cos(numpy.radians(avg_Hp - 30)) + \
0.24 * numpy.cos(numpy.radians(2 * avg_Hp)) + \
0.32 * numpy.cos(numpy.radians(3 * avg_Hp + 6)) - \
0.2 * numpy.cos(numpy.radians(4 * avg_Hp - 63))
diff_h2p_h1p = h2p - h1p
delta_hp = diff_h2p_h1p + (numpy.fabs(diff_h2p_h1p) > 180) * 360
delta_hp -= (h2p > h1p) * 720
delta_Lp = lab_color_matrix[:, 0] - L
delta_Cp = C2p - C1p
delta_Hp = 2 * numpy.sqrt(C2p * C1p) * numpy.sin(numpy.radians(delta_hp) / 2.0)
S_L = 1 + ((0.015 * numpy.power(avg_Lp - 50, 2)) / numpy.sqrt(20 + numpy.power(avg_Lp - 50, 2.0)))
S_C = 1 + 0.045 * avg_C1p_C2p
S_H = 1 + 0.015 * avg_C1p_C2p * T
delta_ro = 30 * numpy.exp(-(numpy.power(((avg_Hp - 275) / 25), 2.0)))
R_C = numpy.sqrt((numpy.power(avg_C1p_C2p, 7.0)) / (numpy.power(avg_C1p_C2p, 7.0) + numpy.power(25.0, 7.0)))
R_T = -2 * R_C * numpy.sin(2 * numpy.radians(delta_ro))
return numpy.sqrt(
numpy.power(delta_Lp / (S_L * Kl), 2) +
numpy.power(delta_Cp / (S_C * Kc), 2) +
numpy.power(delta_Hp / (S_H * Kh), 2) +
R_T * (delta_Cp / (S_C * Kc)) * (delta_Hp / (S_H * Kh))) | def function[delta_e_cie2000, parameter[lab_color_vector, lab_color_matrix, Kl, Kc, Kh]]:
constant[
Calculates the Delta E (CIE2000) of two colors.
]
<ast.Tuple object at 0x7da207f98cd0> assign[=] name[lab_color_vector]
variable[avg_Lp] assign[=] binary_operation[binary_operation[name[L] + call[name[lab_color_matrix]][tuple[[<ast.Slice object at 0x7da207f992a0>, <ast.Constant object at 0x7da207f9a980>]]]] / constant[2.0]]
variable[C1] assign[=] call[name[numpy].sqrt, parameter[call[name[numpy].sum, parameter[call[name[numpy].power, parameter[call[name[lab_color_vector]][<ast.Slice object at 0x7da2054a6710>], constant[2]]]]]]]
variable[C2] assign[=] call[name[numpy].sqrt, parameter[call[name[numpy].sum, parameter[call[name[numpy].power, parameter[call[name[lab_color_matrix]][tuple[[<ast.Slice object at 0x7da2054a4e80>, <ast.Slice object at 0x7da2054a5540>]]], constant[2]]]]]]]
variable[avg_C1_C2] assign[=] binary_operation[binary_operation[name[C1] + name[C2]] / constant[2.0]]
variable[G] assign[=] binary_operation[constant[0.5] * binary_operation[constant[1] - call[name[numpy].sqrt, parameter[binary_operation[call[name[numpy].power, parameter[name[avg_C1_C2], constant[7.0]]] / binary_operation[call[name[numpy].power, parameter[name[avg_C1_C2], constant[7.0]]] + call[name[numpy].power, parameter[constant[25.0], constant[7.0]]]]]]]]]
variable[a1p] assign[=] binary_operation[binary_operation[constant[1.0] + name[G]] * name[a]]
variable[a2p] assign[=] binary_operation[binary_operation[constant[1.0] + name[G]] * call[name[lab_color_matrix]][tuple[[<ast.Slice object at 0x7da2054a6890>, <ast.Constant object at 0x7da2054a5840>]]]]
variable[C1p] assign[=] call[name[numpy].sqrt, parameter[binary_operation[call[name[numpy].power, parameter[name[a1p], constant[2]]] + call[name[numpy].power, parameter[name[b], constant[2]]]]]]
variable[C2p] assign[=] call[name[numpy].sqrt, parameter[binary_operation[call[name[numpy].power, parameter[name[a2p], constant[2]]] + call[name[numpy].power, parameter[call[name[lab_color_matrix]][tuple[[<ast.Slice object at 0x7da2054a4b20>, <ast.Constant object at 0x7da2054a5780>]]], constant[2]]]]]]
variable[avg_C1p_C2p] assign[=] binary_operation[binary_operation[name[C1p] + name[C2p]] / constant[2.0]]
variable[h1p] assign[=] call[name[numpy].degrees, parameter[call[name[numpy].arctan2, parameter[name[b], name[a1p]]]]]
<ast.AugAssign object at 0x7da2054a5000>
variable[h2p] assign[=] call[name[numpy].degrees, parameter[call[name[numpy].arctan2, parameter[call[name[lab_color_matrix]][tuple[[<ast.Slice object at 0x7da207f993f0>, <ast.Constant object at 0x7da207f992d0>]]], name[a2p]]]]]
<ast.AugAssign object at 0x7da207f99510>
variable[avg_Hp] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[compare[call[name[numpy].fabs, parameter[binary_operation[name[h1p] - name[h2p]]]] greater[>] constant[180]] * constant[360]] + name[h1p]] + name[h2p]] / constant[2.0]]
variable[T] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[1] - binary_operation[constant[0.17] * call[name[numpy].cos, parameter[call[name[numpy].radians, parameter[binary_operation[name[avg_Hp] - constant[30]]]]]]]] + binary_operation[constant[0.24] * call[name[numpy].cos, parameter[call[name[numpy].radians, parameter[binary_operation[constant[2] * name[avg_Hp]]]]]]]] + binary_operation[constant[0.32] * call[name[numpy].cos, parameter[call[name[numpy].radians, parameter[binary_operation[binary_operation[constant[3] * name[avg_Hp]] + constant[6]]]]]]]] - binary_operation[constant[0.2] * call[name[numpy].cos, parameter[call[name[numpy].radians, parameter[binary_operation[binary_operation[constant[4] * name[avg_Hp]] - constant[63]]]]]]]]
variable[diff_h2p_h1p] assign[=] binary_operation[name[h2p] - name[h1p]]
variable[delta_hp] assign[=] binary_operation[name[diff_h2p_h1p] + binary_operation[compare[call[name[numpy].fabs, parameter[name[diff_h2p_h1p]]] greater[>] constant[180]] * constant[360]]]
<ast.AugAssign object at 0x7da207f99810>
variable[delta_Lp] assign[=] binary_operation[call[name[lab_color_matrix]][tuple[[<ast.Slice object at 0x7da207f99960>, <ast.Constant object at 0x7da207f9bc10>]]] - name[L]]
variable[delta_Cp] assign[=] binary_operation[name[C2p] - name[C1p]]
variable[delta_Hp] assign[=] binary_operation[binary_operation[constant[2] * call[name[numpy].sqrt, parameter[binary_operation[name[C2p] * name[C1p]]]]] * call[name[numpy].sin, parameter[binary_operation[call[name[numpy].radians, parameter[name[delta_hp]]] / constant[2.0]]]]]
variable[S_L] assign[=] binary_operation[constant[1] + binary_operation[binary_operation[constant[0.015] * call[name[numpy].power, parameter[binary_operation[name[avg_Lp] - constant[50]], constant[2]]]] / call[name[numpy].sqrt, parameter[binary_operation[constant[20] + call[name[numpy].power, parameter[binary_operation[name[avg_Lp] - constant[50]], constant[2.0]]]]]]]]
variable[S_C] assign[=] binary_operation[constant[1] + binary_operation[constant[0.045] * name[avg_C1p_C2p]]]
variable[S_H] assign[=] binary_operation[constant[1] + binary_operation[binary_operation[constant[0.015] * name[avg_C1p_C2p]] * name[T]]]
variable[delta_ro] assign[=] binary_operation[constant[30] * call[name[numpy].exp, parameter[<ast.UnaryOp object at 0x7da18f58de10>]]]
variable[R_C] assign[=] call[name[numpy].sqrt, parameter[binary_operation[call[name[numpy].power, parameter[name[avg_C1p_C2p], constant[7.0]]] / binary_operation[call[name[numpy].power, parameter[name[avg_C1p_C2p], constant[7.0]]] + call[name[numpy].power, parameter[constant[25.0], constant[7.0]]]]]]]
variable[R_T] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18f58e620> * name[R_C]] * call[name[numpy].sin, parameter[binary_operation[constant[2] * call[name[numpy].radians, parameter[name[delta_ro]]]]]]]
return[call[name[numpy].sqrt, parameter[binary_operation[binary_operation[binary_operation[call[name[numpy].power, parameter[binary_operation[name[delta_Lp] / binary_operation[name[S_L] * name[Kl]]], constant[2]]] + call[name[numpy].power, parameter[binary_operation[name[delta_Cp] / binary_operation[name[S_C] * name[Kc]]], constant[2]]]] + call[name[numpy].power, parameter[binary_operation[name[delta_Hp] / binary_operation[name[S_H] * name[Kh]]], constant[2]]]] + binary_operation[binary_operation[name[R_T] * binary_operation[name[delta_Cp] / binary_operation[name[S_C] * name[Kc]]]] * binary_operation[name[delta_Hp] / binary_operation[name[S_H] * name[Kh]]]]]]]] | keyword[def] identifier[delta_e_cie2000] ( identifier[lab_color_vector] , identifier[lab_color_matrix] , identifier[Kl] = literal[int] , identifier[Kc] = literal[int] , identifier[Kh] = literal[int] ):
literal[string]
identifier[L] , identifier[a] , identifier[b] = identifier[lab_color_vector]
identifier[avg_Lp] =( identifier[L] + identifier[lab_color_matrix] [:, literal[int] ])/ literal[int]
identifier[C1] = identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[sum] ( identifier[numpy] . identifier[power] ( identifier[lab_color_vector] [ literal[int] :], literal[int] )))
identifier[C2] = identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[sum] ( identifier[numpy] . identifier[power] ( identifier[lab_color_matrix] [:, literal[int] :], literal[int] ), identifier[axis] = literal[int] ))
identifier[avg_C1_C2] =( identifier[C1] + identifier[C2] )/ literal[int]
identifier[G] = literal[int] *( literal[int] - identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[power] ( identifier[avg_C1_C2] , literal[int] )/( identifier[numpy] . identifier[power] ( identifier[avg_C1_C2] , literal[int] )+ identifier[numpy] . identifier[power] ( literal[int] , literal[int] ))))
identifier[a1p] =( literal[int] + identifier[G] )* identifier[a]
identifier[a2p] =( literal[int] + identifier[G] )* identifier[lab_color_matrix] [:, literal[int] ]
identifier[C1p] = identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[power] ( identifier[a1p] , literal[int] )+ identifier[numpy] . identifier[power] ( identifier[b] , literal[int] ))
identifier[C2p] = identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[power] ( identifier[a2p] , literal[int] )+ identifier[numpy] . identifier[power] ( identifier[lab_color_matrix] [:, literal[int] ], literal[int] ))
identifier[avg_C1p_C2p] =( identifier[C1p] + identifier[C2p] )/ literal[int]
identifier[h1p] = identifier[numpy] . identifier[degrees] ( identifier[numpy] . identifier[arctan2] ( identifier[b] , identifier[a1p] ))
identifier[h1p] +=( identifier[h1p] < literal[int] )* literal[int]
identifier[h2p] = identifier[numpy] . identifier[degrees] ( identifier[numpy] . identifier[arctan2] ( identifier[lab_color_matrix] [:, literal[int] ], identifier[a2p] ))
identifier[h2p] +=( identifier[h2p] < literal[int] )* literal[int]
identifier[avg_Hp] =((( identifier[numpy] . identifier[fabs] ( identifier[h1p] - identifier[h2p] )> literal[int] )* literal[int] )+ identifier[h1p] + identifier[h2p] )/ literal[int]
identifier[T] = literal[int] - literal[int] * identifier[numpy] . identifier[cos] ( identifier[numpy] . identifier[radians] ( identifier[avg_Hp] - literal[int] ))+ literal[int] * identifier[numpy] . identifier[cos] ( identifier[numpy] . identifier[radians] ( literal[int] * identifier[avg_Hp] ))+ literal[int] * identifier[numpy] . identifier[cos] ( identifier[numpy] . identifier[radians] ( literal[int] * identifier[avg_Hp] + literal[int] ))- literal[int] * identifier[numpy] . identifier[cos] ( identifier[numpy] . identifier[radians] ( literal[int] * identifier[avg_Hp] - literal[int] ))
identifier[diff_h2p_h1p] = identifier[h2p] - identifier[h1p]
identifier[delta_hp] = identifier[diff_h2p_h1p] +( identifier[numpy] . identifier[fabs] ( identifier[diff_h2p_h1p] )> literal[int] )* literal[int]
identifier[delta_hp] -=( identifier[h2p] > identifier[h1p] )* literal[int]
identifier[delta_Lp] = identifier[lab_color_matrix] [:, literal[int] ]- identifier[L]
identifier[delta_Cp] = identifier[C2p] - identifier[C1p]
identifier[delta_Hp] = literal[int] * identifier[numpy] . identifier[sqrt] ( identifier[C2p] * identifier[C1p] )* identifier[numpy] . identifier[sin] ( identifier[numpy] . identifier[radians] ( identifier[delta_hp] )/ literal[int] )
identifier[S_L] = literal[int] +(( literal[int] * identifier[numpy] . identifier[power] ( identifier[avg_Lp] - literal[int] , literal[int] ))/ identifier[numpy] . identifier[sqrt] ( literal[int] + identifier[numpy] . identifier[power] ( identifier[avg_Lp] - literal[int] , literal[int] )))
identifier[S_C] = literal[int] + literal[int] * identifier[avg_C1p_C2p]
identifier[S_H] = literal[int] + literal[int] * identifier[avg_C1p_C2p] * identifier[T]
identifier[delta_ro] = literal[int] * identifier[numpy] . identifier[exp] (-( identifier[numpy] . identifier[power] ((( identifier[avg_Hp] - literal[int] )/ literal[int] ), literal[int] )))
identifier[R_C] = identifier[numpy] . identifier[sqrt] (( identifier[numpy] . identifier[power] ( identifier[avg_C1p_C2p] , literal[int] ))/( identifier[numpy] . identifier[power] ( identifier[avg_C1p_C2p] , literal[int] )+ identifier[numpy] . identifier[power] ( literal[int] , literal[int] )))
identifier[R_T] =- literal[int] * identifier[R_C] * identifier[numpy] . identifier[sin] ( literal[int] * identifier[numpy] . identifier[radians] ( identifier[delta_ro] ))
keyword[return] identifier[numpy] . identifier[sqrt] (
identifier[numpy] . identifier[power] ( identifier[delta_Lp] /( identifier[S_L] * identifier[Kl] ), literal[int] )+
identifier[numpy] . identifier[power] ( identifier[delta_Cp] /( identifier[S_C] * identifier[Kc] ), literal[int] )+
identifier[numpy] . identifier[power] ( identifier[delta_Hp] /( identifier[S_H] * identifier[Kh] ), literal[int] )+
identifier[R_T] *( identifier[delta_Cp] /( identifier[S_C] * identifier[Kc] ))*( identifier[delta_Hp] /( identifier[S_H] * identifier[Kh] ))) | def delta_e_cie2000(lab_color_vector, lab_color_matrix, Kl=1, Kc=1, Kh=1):
"""
Calculates the Delta E (CIE2000) of two colors.
"""
(L, a, b) = lab_color_vector
avg_Lp = (L + lab_color_matrix[:, 0]) / 2.0
C1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
avg_C1_C2 = (C1 + C2) / 2.0
G = 0.5 * (1 - numpy.sqrt(numpy.power(avg_C1_C2, 7.0) / (numpy.power(avg_C1_C2, 7.0) + numpy.power(25.0, 7.0))))
a1p = (1.0 + G) * a
a2p = (1.0 + G) * lab_color_matrix[:, 1]
C1p = numpy.sqrt(numpy.power(a1p, 2) + numpy.power(b, 2))
C2p = numpy.sqrt(numpy.power(a2p, 2) + numpy.power(lab_color_matrix[:, 2], 2))
avg_C1p_C2p = (C1p + C2p) / 2.0
h1p = numpy.degrees(numpy.arctan2(b, a1p))
h1p += (h1p < 0) * 360
h2p = numpy.degrees(numpy.arctan2(lab_color_matrix[:, 2], a2p))
h2p += (h2p < 0) * 360
avg_Hp = ((numpy.fabs(h1p - h2p) > 180) * 360 + h1p + h2p) / 2.0
T = 1 - 0.17 * numpy.cos(numpy.radians(avg_Hp - 30)) + 0.24 * numpy.cos(numpy.radians(2 * avg_Hp)) + 0.32 * numpy.cos(numpy.radians(3 * avg_Hp + 6)) - 0.2 * numpy.cos(numpy.radians(4 * avg_Hp - 63))
diff_h2p_h1p = h2p - h1p
delta_hp = diff_h2p_h1p + (numpy.fabs(diff_h2p_h1p) > 180) * 360
delta_hp -= (h2p > h1p) * 720
delta_Lp = lab_color_matrix[:, 0] - L
delta_Cp = C2p - C1p
delta_Hp = 2 * numpy.sqrt(C2p * C1p) * numpy.sin(numpy.radians(delta_hp) / 2.0)
S_L = 1 + 0.015 * numpy.power(avg_Lp - 50, 2) / numpy.sqrt(20 + numpy.power(avg_Lp - 50, 2.0))
S_C = 1 + 0.045 * avg_C1p_C2p
S_H = 1 + 0.015 * avg_C1p_C2p * T
delta_ro = 30 * numpy.exp(-numpy.power((avg_Hp - 275) / 25, 2.0))
R_C = numpy.sqrt(numpy.power(avg_C1p_C2p, 7.0) / (numpy.power(avg_C1p_C2p, 7.0) + numpy.power(25.0, 7.0)))
R_T = -2 * R_C * numpy.sin(2 * numpy.radians(delta_ro))
return numpy.sqrt(numpy.power(delta_Lp / (S_L * Kl), 2) + numpy.power(delta_Cp / (S_C * Kc), 2) + numpy.power(delta_Hp / (S_H * Kh), 2) + R_T * (delta_Cp / (S_C * Kc)) * (delta_Hp / (S_H * Kh))) |
def make_file_name(self, template_name, template_extension=None):
"""
Generate and return the file name for the given template name.
Arguments:
template_extension: defaults to the instance's extension.
"""
file_name = template_name
if template_extension is None:
template_extension = self.template_extension
if template_extension is not False:
file_name += os.path.extsep + template_extension
return file_name | def function[make_file_name, parameter[self, template_name, template_extension]]:
constant[
Generate and return the file name for the given template name.
Arguments:
template_extension: defaults to the instance's extension.
]
variable[file_name] assign[=] name[template_name]
if compare[name[template_extension] is constant[None]] begin[:]
variable[template_extension] assign[=] name[self].template_extension
if compare[name[template_extension] is_not constant[False]] begin[:]
<ast.AugAssign object at 0x7da20c6e6950>
return[name[file_name]] | keyword[def] identifier[make_file_name] ( identifier[self] , identifier[template_name] , identifier[template_extension] = keyword[None] ):
literal[string]
identifier[file_name] = identifier[template_name]
keyword[if] identifier[template_extension] keyword[is] keyword[None] :
identifier[template_extension] = identifier[self] . identifier[template_extension]
keyword[if] identifier[template_extension] keyword[is] keyword[not] keyword[False] :
identifier[file_name] += identifier[os] . identifier[path] . identifier[extsep] + identifier[template_extension]
keyword[return] identifier[file_name] | def make_file_name(self, template_name, template_extension=None):
"""
Generate and return the file name for the given template name.
Arguments:
template_extension: defaults to the instance's extension.
"""
file_name = template_name
if template_extension is None:
template_extension = self.template_extension # depends on [control=['if'], data=['template_extension']]
if template_extension is not False:
file_name += os.path.extsep + template_extension # depends on [control=['if'], data=['template_extension']]
return file_name |
def happens(intervals: Iterable[float], name: Optional[str] = None) -> Callable:
"""
Decorator used to set up a process that adds a new instance of another process at intervals dictated by the given
sequence (which may be infinite).
Example: the following program runs process named `my_process` 5 times, each time spaced by 2.0 time units.
```
from itertools import repeat
sim = Simulator()
log = []
@happens(repeat(2.0, 5))
def my_process(the_log):
the_log.append(now())
sim.add(my_process, log)
sim.run()
print(str(log)) # Expect: [2.0, 4.0, 6.0, 8.0, 10.0]
```
"""
def hook(event: Callable):
def make_happen(*args_event: Any, **kwargs_event: Any) -> None:
if name is not None:
local.name = cast(str, name)
for interval in intervals:
advance(interval)
add(event, *args_event, **kwargs_event)
return make_happen
return hook | def function[happens, parameter[intervals, name]]:
constant[
Decorator used to set up a process that adds a new instance of another process at intervals dictated by the given
sequence (which may be infinite).
Example: the following program runs process named `my_process` 5 times, each time spaced by 2.0 time units.
```
from itertools import repeat
sim = Simulator()
log = []
@happens(repeat(2.0, 5))
def my_process(the_log):
the_log.append(now())
sim.add(my_process, log)
sim.run()
print(str(log)) # Expect: [2.0, 4.0, 6.0, 8.0, 10.0]
```
]
def function[hook, parameter[event]]:
def function[make_happen, parameter[]]:
if compare[name[name] is_not constant[None]] begin[:]
name[local].name assign[=] call[name[cast], parameter[name[str], name[name]]]
for taget[name[interval]] in starred[name[intervals]] begin[:]
call[name[advance], parameter[name[interval]]]
call[name[add], parameter[name[event], <ast.Starred object at 0x7da1b0416530>]]
return[name[make_happen]]
return[name[hook]] | keyword[def] identifier[happens] ( identifier[intervals] : identifier[Iterable] [ identifier[float] ], identifier[name] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[Callable] :
literal[string]
keyword[def] identifier[hook] ( identifier[event] : identifier[Callable] ):
keyword[def] identifier[make_happen] (* identifier[args_event] : identifier[Any] ,** identifier[kwargs_event] : identifier[Any] )-> keyword[None] :
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[local] . identifier[name] = identifier[cast] ( identifier[str] , identifier[name] )
keyword[for] identifier[interval] keyword[in] identifier[intervals] :
identifier[advance] ( identifier[interval] )
identifier[add] ( identifier[event] ,* identifier[args_event] ,** identifier[kwargs_event] )
keyword[return] identifier[make_happen]
keyword[return] identifier[hook] | def happens(intervals: Iterable[float], name: Optional[str]=None) -> Callable:
"""
Decorator used to set up a process that adds a new instance of another process at intervals dictated by the given
sequence (which may be infinite).
Example: the following program runs process named `my_process` 5 times, each time spaced by 2.0 time units.
```
from itertools import repeat
sim = Simulator()
log = []
@happens(repeat(2.0, 5))
def my_process(the_log):
the_log.append(now())
sim.add(my_process, log)
sim.run()
print(str(log)) # Expect: [2.0, 4.0, 6.0, 8.0, 10.0]
```
"""
def hook(event: Callable):
def make_happen(*args_event: Any, **kwargs_event: Any) -> None:
if name is not None:
local.name = cast(str, name) # depends on [control=['if'], data=['name']]
for interval in intervals:
advance(interval)
add(event, *args_event, **kwargs_event) # depends on [control=['for'], data=['interval']]
return make_happen
return hook |
def cdist_sq_periodic(ra, rb, L):
"""Return the squared distance between each point in on set,
and every point in a second set, in periodic space.
Parameters
----------
ra, rb: float array-like, shape (n, d) and (m, d) in d dimensions.
Two sets of points.
L: float array, shape (d,)
System lengths.
Returns
-------
cdist_sq: float array-like, shape (n, m, d)
cdist_sq[i, j] is the squared distance between point j and point i.
"""
return np.sum(np.square(csep_periodic(ra, rb, L)), axis=-1) | def function[cdist_sq_periodic, parameter[ra, rb, L]]:
constant[Return the squared distance between each point in on set,
and every point in a second set, in periodic space.
Parameters
----------
ra, rb: float array-like, shape (n, d) and (m, d) in d dimensions.
Two sets of points.
L: float array, shape (d,)
System lengths.
Returns
-------
cdist_sq: float array-like, shape (n, m, d)
cdist_sq[i, j] is the squared distance between point j and point i.
]
return[call[name[np].sum, parameter[call[name[np].square, parameter[call[name[csep_periodic], parameter[name[ra], name[rb], name[L]]]]]]]] | keyword[def] identifier[cdist_sq_periodic] ( identifier[ra] , identifier[rb] , identifier[L] ):
literal[string]
keyword[return] identifier[np] . identifier[sum] ( identifier[np] . identifier[square] ( identifier[csep_periodic] ( identifier[ra] , identifier[rb] , identifier[L] )), identifier[axis] =- literal[int] ) | def cdist_sq_periodic(ra, rb, L):
"""Return the squared distance between each point in on set,
and every point in a second set, in periodic space.
Parameters
----------
ra, rb: float array-like, shape (n, d) and (m, d) in d dimensions.
Two sets of points.
L: float array, shape (d,)
System lengths.
Returns
-------
cdist_sq: float array-like, shape (n, m, d)
cdist_sq[i, j] is the squared distance between point j and point i.
"""
return np.sum(np.square(csep_periodic(ra, rb, L)), axis=-1) |
def _base_environ(self, **request):
'''
Override the default values for the wsgi environment variables.
'''
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('localhost'),
'SERVER_PORT': str('8000'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': True,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ | def function[_base_environ, parameter[self]]:
constant[
Override the default values for the wsgi environment variables.
]
variable[environ] assign[=] dictionary[[<ast.Constant object at 0x7da1b0ea2770>, <ast.Constant object at 0x7da1b0ea20b0>, <ast.Constant object at 0x7da1b0ea07c0>, <ast.Constant object at 0x7da1b0ea0fa0>, <ast.Constant object at 0x7da1b0ea05e0>, <ast.Constant object at 0x7da1b0ea04f0>, <ast.Constant object at 0x7da1b0ea0b80>, <ast.Constant object at 0x7da1b0ea24d0>, <ast.Constant object at 0x7da1b0ea0cd0>, <ast.Constant object at 0x7da1b0ea1e70>, <ast.Constant object at 0x7da1b0ea0250>, <ast.Constant object at 0x7da1b0ea27d0>, <ast.Constant object at 0x7da1b0ea1cf0>, <ast.Constant object at 0x7da1b0ea0b50>, <ast.Constant object at 0x7da1b0ea1180>], [<ast.Call object at 0x7da1b0ea2320>, <ast.Call object at 0x7da1b0ea1000>, <ast.Call object at 0x7da1b0ea1960>, <ast.Call object at 0x7da1b0ea00a0>, <ast.Call object at 0x7da1b0ea0610>, <ast.Call object at 0x7da1b0ea2560>, <ast.Call object at 0x7da1b0ea0460>, <ast.Call object at 0x7da1b0ea1540>, <ast.Tuple object at 0x7da1b0ea16f0>, <ast.Call object at 0x7da1b0ea1cc0>, <ast.Call object at 0x7da1b0ea1420>, <ast.Attribute object at 0x7da1b0ea1ea0>, <ast.Constant object at 0x7da1b0ea0940>, <ast.Constant object at 0x7da1b0ea1c30>, <ast.Constant object at 0x7da1b0ea18d0>]]
call[name[environ].update, parameter[name[self].defaults]]
call[name[environ].update, parameter[name[request]]]
return[name[environ]] | keyword[def] identifier[_base_environ] ( identifier[self] ,** identifier[request] ):
literal[string]
identifier[environ] ={
literal[string] : identifier[self] . identifier[cookies] . identifier[output] ( identifier[header] = literal[string] , identifier[sep] = literal[string] ),
literal[string] : identifier[str] ( literal[string] ),
literal[string] : identifier[str] ( literal[string] ),
literal[string] : identifier[str] ( literal[string] ),
literal[string] : identifier[str] ( literal[string] ),
literal[string] : identifier[str] ( literal[string] ),
literal[string] : identifier[str] ( literal[string] ),
literal[string] : identifier[str] ( literal[string] ),
literal[string] :( literal[int] , literal[int] ),
literal[string] : identifier[str] ( literal[string] ),
literal[string] : identifier[FakePayload] ( literal[string] ),
literal[string] : identifier[self] . identifier[errors] ,
literal[string] : keyword[True] ,
literal[string] : keyword[True] ,
literal[string] : keyword[False] ,
}
identifier[environ] . identifier[update] ( identifier[self] . identifier[defaults] )
identifier[environ] . identifier[update] ( identifier[request] )
keyword[return] identifier[environ] | def _base_environ(self, **request):
"""
Override the default values for the wsgi environment variables.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {'HTTP_COOKIE': self.cookies.output(header='', sep='; '), 'PATH_INFO': str('/'), 'REMOTE_ADDR': str('127.0.0.1'), 'REQUEST_METHOD': str('GET'), 'SCRIPT_NAME': str(''), 'SERVER_NAME': str('localhost'), 'SERVER_PORT': str('8000'), 'SERVER_PROTOCOL': str('HTTP/1.1'), 'wsgi.version': (1, 0), 'wsgi.url_scheme': str('http'), 'wsgi.input': FakePayload(b''), 'wsgi.errors': self.errors, 'wsgi.multiprocess': True, 'wsgi.multithread': True, 'wsgi.run_once': False}
environ.update(self.defaults)
environ.update(request)
return environ |
def create_stream(self, unique_id=0, listener=None, timeout=11, buffer_size=1024, description='BetfairSocket',
host=None):
"""
Creates BetfairStream.
:param dict unique_id: Id used to start unique id's of the stream (+1 before every request)
:param resources.Listener listener: Listener class to use
:param float timeout: Socket timeout
:param int buffer_size: Socket buffer size
:param str description: Betfair stream description
:param str host: Host endpoint (prod (default) or integration)
:rtype: BetfairStream
"""
listener = listener if listener else BaseListener()
return BetfairStream(
unique_id,
listener,
app_key=self.client.app_key,
session_token=self.client.session_token,
timeout=timeout,
buffer_size=buffer_size,
description=description,
host=host,
) | def function[create_stream, parameter[self, unique_id, listener, timeout, buffer_size, description, host]]:
constant[
Creates BetfairStream.
:param dict unique_id: Id used to start unique id's of the stream (+1 before every request)
:param resources.Listener listener: Listener class to use
:param float timeout: Socket timeout
:param int buffer_size: Socket buffer size
:param str description: Betfair stream description
:param str host: Host endpoint (prod (default) or integration)
:rtype: BetfairStream
]
variable[listener] assign[=] <ast.IfExp object at 0x7da1b17fbe20>
return[call[name[BetfairStream], parameter[name[unique_id], name[listener]]]] | keyword[def] identifier[create_stream] ( identifier[self] , identifier[unique_id] = literal[int] , identifier[listener] = keyword[None] , identifier[timeout] = literal[int] , identifier[buffer_size] = literal[int] , identifier[description] = literal[string] ,
identifier[host] = keyword[None] ):
literal[string]
identifier[listener] = identifier[listener] keyword[if] identifier[listener] keyword[else] identifier[BaseListener] ()
keyword[return] identifier[BetfairStream] (
identifier[unique_id] ,
identifier[listener] ,
identifier[app_key] = identifier[self] . identifier[client] . identifier[app_key] ,
identifier[session_token] = identifier[self] . identifier[client] . identifier[session_token] ,
identifier[timeout] = identifier[timeout] ,
identifier[buffer_size] = identifier[buffer_size] ,
identifier[description] = identifier[description] ,
identifier[host] = identifier[host] ,
) | def create_stream(self, unique_id=0, listener=None, timeout=11, buffer_size=1024, description='BetfairSocket', host=None):
"""
Creates BetfairStream.
:param dict unique_id: Id used to start unique id's of the stream (+1 before every request)
:param resources.Listener listener: Listener class to use
:param float timeout: Socket timeout
:param int buffer_size: Socket buffer size
:param str description: Betfair stream description
:param str host: Host endpoint (prod (default) or integration)
:rtype: BetfairStream
"""
listener = listener if listener else BaseListener()
return BetfairStream(unique_id, listener, app_key=self.client.app_key, session_token=self.client.session_token, timeout=timeout, buffer_size=buffer_size, description=description, host=host) |
def ensureVisible(self, viewType):
"""
Find and switch to the first tab of the specified view type. If the
type does not exist, add it.
:param viewType | <subclass of XView>
:return <XView> || None
"""
# make sure we're not trying to switch to the same type
view = self.currentView()
if type(view) == viewType:
return view
self.blockSignals(True)
self.setUpdatesEnabled(False)
for i in xrange(self.count()):
widget = self.widget(i)
if type(widget) == viewType:
self.setCurrentIndex(i)
view = widget
break
else:
view = self.addView(viewType)
self.blockSignals(False)
self.setUpdatesEnabled(True)
return view | def function[ensureVisible, parameter[self, viewType]]:
constant[
Find and switch to the first tab of the specified view type. If the
type does not exist, add it.
:param viewType | <subclass of XView>
:return <XView> || None
]
variable[view] assign[=] call[name[self].currentView, parameter[]]
if compare[call[name[type], parameter[name[view]]] equal[==] name[viewType]] begin[:]
return[name[view]]
call[name[self].blockSignals, parameter[constant[True]]]
call[name[self].setUpdatesEnabled, parameter[constant[False]]]
for taget[name[i]] in starred[call[name[xrange], parameter[call[name[self].count, parameter[]]]]] begin[:]
variable[widget] assign[=] call[name[self].widget, parameter[name[i]]]
if compare[call[name[type], parameter[name[widget]]] equal[==] name[viewType]] begin[:]
call[name[self].setCurrentIndex, parameter[name[i]]]
variable[view] assign[=] name[widget]
break
call[name[self].blockSignals, parameter[constant[False]]]
call[name[self].setUpdatesEnabled, parameter[constant[True]]]
return[name[view]] | keyword[def] identifier[ensureVisible] ( identifier[self] , identifier[viewType] ):
literal[string]
identifier[view] = identifier[self] . identifier[currentView] ()
keyword[if] identifier[type] ( identifier[view] )== identifier[viewType] :
keyword[return] identifier[view]
identifier[self] . identifier[blockSignals] ( keyword[True] )
identifier[self] . identifier[setUpdatesEnabled] ( keyword[False] )
keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[self] . identifier[count] ()):
identifier[widget] = identifier[self] . identifier[widget] ( identifier[i] )
keyword[if] identifier[type] ( identifier[widget] )== identifier[viewType] :
identifier[self] . identifier[setCurrentIndex] ( identifier[i] )
identifier[view] = identifier[widget]
keyword[break]
keyword[else] :
identifier[view] = identifier[self] . identifier[addView] ( identifier[viewType] )
identifier[self] . identifier[blockSignals] ( keyword[False] )
identifier[self] . identifier[setUpdatesEnabled] ( keyword[True] )
keyword[return] identifier[view] | def ensureVisible(self, viewType):
"""
Find and switch to the first tab of the specified view type. If the
type does not exist, add it.
:param viewType | <subclass of XView>
:return <XView> || None
"""
# make sure we're not trying to switch to the same type
view = self.currentView()
if type(view) == viewType:
return view # depends on [control=['if'], data=[]]
self.blockSignals(True)
self.setUpdatesEnabled(False)
for i in xrange(self.count()):
widget = self.widget(i)
if type(widget) == viewType:
self.setCurrentIndex(i)
view = widget
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
else:
view = self.addView(viewType)
self.blockSignals(False)
self.setUpdatesEnabled(True)
return view |
def format_string(string, context):
"""String-template format a string:
>>> format_string('$foo and ${foo}s', dict(foo=42))
'42 and 42s'
This does not do any attribute lookup etc. For more advanced string
formattings have a look at the `werkzeug.template` module.
:param string: the format string.
:param context: a dict with the variables to insert.
"""
def lookup_arg(match):
x = context[match.group(1) or match.group(2)]
if not isinstance(x, string_types):
x = type(string)(x)
return x
return _format_re.sub(lookup_arg, string) | def function[format_string, parameter[string, context]]:
constant[String-template format a string:
>>> format_string('$foo and ${foo}s', dict(foo=42))
'42 and 42s'
This does not do any attribute lookup etc. For more advanced string
formattings have a look at the `werkzeug.template` module.
:param string: the format string.
:param context: a dict with the variables to insert.
]
def function[lookup_arg, parameter[match]]:
variable[x] assign[=] call[name[context]][<ast.BoolOp object at 0x7da204621b70>]
if <ast.UnaryOp object at 0x7da204621420> begin[:]
variable[x] assign[=] call[call[name[type], parameter[name[string]]], parameter[name[x]]]
return[name[x]]
return[call[name[_format_re].sub, parameter[name[lookup_arg], name[string]]]] | keyword[def] identifier[format_string] ( identifier[string] , identifier[context] ):
literal[string]
keyword[def] identifier[lookup_arg] ( identifier[match] ):
identifier[x] = identifier[context] [ identifier[match] . identifier[group] ( literal[int] ) keyword[or] identifier[match] . identifier[group] ( literal[int] )]
keyword[if] keyword[not] identifier[isinstance] ( identifier[x] , identifier[string_types] ):
identifier[x] = identifier[type] ( identifier[string] )( identifier[x] )
keyword[return] identifier[x]
keyword[return] identifier[_format_re] . identifier[sub] ( identifier[lookup_arg] , identifier[string] ) | def format_string(string, context):
"""String-template format a string:
>>> format_string('$foo and ${foo}s', dict(foo=42))
'42 and 42s'
This does not do any attribute lookup etc. For more advanced string
formattings have a look at the `werkzeug.template` module.
:param string: the format string.
:param context: a dict with the variables to insert.
"""
def lookup_arg(match):
x = context[match.group(1) or match.group(2)]
if not isinstance(x, string_types):
x = type(string)(x) # depends on [control=['if'], data=[]]
return x
return _format_re.sub(lookup_arg, string) |
def initUI(self):
#self.setMinimumSize(WIDTH,HEIGTH)
#self.setMaximumSize(WIDTH,HEIGTH)
'''Radio buttons for Original/RGB/HSV/YUV images'''
self.origButton = QRadioButton("Original")
self.rgbButton = QRadioButton("RGB")
self.hsvButton = QRadioButton("HSV")
self.yuvButton = QRadioButton("YUV")
'''HSV status scheck'''
self.hsvCheck = QCheckBox('HSV Color Disc')
self.hsvCheck.setObjectName("hsvCheck")
self.hsvCheck.stateChanged.connect(self.showHSVWidget)
'''Signals for toggled radio buttons'''
self.origButton.toggled.connect(lambda:self.origButtonState())
self.rgbButton.toggled.connect(lambda:self.rgbButtonState())
self.hsvButton.toggled.connect(lambda:self.hsvButtonState())
self.yuvButton.toggled.connect(lambda:self.yuvButtonState())
self.origButton.setChecked(True)
'''Main layout of the widget will contain several vertical layouts'''
self.gLayout = QGridLayout(self)
self.gLayout.setObjectName("gLayout")
''' Vertical Layout for radio buttons '''
self.radio1Layout = QHBoxLayout()
self.radio2Layout = QVBoxLayout()
self.radio3Layout = QVBoxLayout()
self.radioLayout = QVBoxLayout()
self.radioLayout.setObjectName("radioLayout")
self.radio1Layout.addWidget(self.origButton)
self.radio1Layout.addWidget(self.rgbButton)
self.radio1Layout.addWidget(self.hsvButton)
self.radio1Layout.addWidget(self.yuvButton)
self.radio1Layout.addWidget(self.hsvCheck)
#self.radio1Layout.addLayout(self.radio2Layout)
#self.radio1Layout.addLayout(self.radio3Layout)
self.radioLayout.addLayout(self.radio1Layout)
self.vSpacer = QSpacerItem(10, 10, QSizePolicy.Ignored, QSizePolicy.Ignored);
self.radioLayout.addItem(self.vSpacer)
hmin,smin,vmin = HSVMIN
hmax,smax,vmax = HSVMAX
''' Vertical Layout for HMIN Slider'''
self.hminLayout = QVBoxLayout()
self.hminLayout.setObjectName("hminLayout")
self.hminLabel = QLabel("HMin")
self.hminValue = QLineEdit(str(hmin),self)
self.hminValue.setValidator(QIntValidator(hmin, hmax, self));
self.hminValue.setFixedWidth(40)
self.hminValue.setFixedHeight(27)
self.hminValue.setAlignment(Qt.AlignCenter);
self.hminSlider = QSlider(Qt.Vertical)
self.hminSlider.setMinimum(hmin)
self.hminSlider.setMaximum(hmax)
self.hminSlider.setValue(hmin)
self.hminLayout.addWidget(self.hminLabel, Qt.AlignCenter)
self.hminLayout.addWidget(self.hminValue,Qt.AlignCenter)
self.hminLayout.addWidget(self.hminSlider)
''' Vertical Layout for HMAX Slider'''
self.hmaxLayout = QVBoxLayout()
self.hmaxLayout.setObjectName("hmaxLayout")
self.hmaxLabel = QLabel("HMax")
self.hmaxValue = QLineEdit(str(hmax),self)
self.hmaxValue.setValidator(QIntValidator(hmin, hmax, self));
self.hmaxValue.setFixedWidth(40)
self.hmaxValue.setFixedHeight(27)
self.hmaxValue.setAlignment(Qt.AlignCenter);
self.hmaxSlider = QSlider(Qt.Vertical)
self.hmaxSlider.setMinimum(hmin)
self.hmaxSlider.setMaximum(hmax)
self.hmaxSlider.setValue(hmax)
self.hmaxLayout.addWidget(self.hmaxLabel)
self.hmaxLayout.addWidget(self.hmaxValue)
self.hmaxLayout.addWidget(self.hmaxSlider)
''' Vertical Layout for SMIN Slider'''
self.sminLayout = QVBoxLayout()
self.sminLayout.setObjectName("sminLayout")
self.sminLabel = QLabel("SMin")
self.sminValue = QLineEdit(str(smin),self)
self.sminValue.setValidator(QIntValidator(smin, smax, self));
self.sminValue.setFixedWidth(40)
self.sminValue.setFixedHeight(27)
self.sminValue.setAlignment(Qt.AlignCenter);
self.sminSlider = QSlider(Qt.Vertical)
self.sminSlider.setMinimum(smin)
self.sminSlider.setMaximum(smax)
self.sminSlider.setValue(smin)
self.sminLayout.addWidget(self.sminLabel)
self.sminLayout.addWidget(self.sminValue)
self.sminLayout.addWidget(self.sminSlider)
''' Vertical Layout for SMAX Slider'''
self.smaxLayout = QVBoxLayout()
self.smaxLayout.setObjectName("smaxLayout")
self.smaxLabel = QLabel("SMax")
self.smaxValue = QLineEdit(str(smax),self)
self.smaxValue.setValidator(QIntValidator(smin, smax, self));
self.smaxValue.setFixedWidth(40)
self.smaxValue.setFixedHeight(27)
self.smaxValue.setAlignment(Qt.AlignCenter);
self.smaxSlider = QSlider(Qt.Vertical)
self.smaxSlider.setMinimum(smin)
self.smaxSlider.setMaximum(smax)
self.smaxSlider.setValue(smax)
self.smaxLayout.addWidget(self.smaxLabel)
self.smaxLayout.addWidget(self.smaxValue)
self.smaxLayout.addWidget(self.smaxSlider)
''' Vertical Layout for VMIN Slider'''
self.vminLayout = QVBoxLayout()
self.vminLayout.setObjectName("vminLayout")
self.vminLabel = QLabel("VMin")
self.vminValue = QLineEdit(str(vmin),self)
self.vminValue.setValidator(QIntValidator(vmin, vmax, self));
self.vminValue.setFixedWidth(40)
self.vminValue.setFixedHeight(27)
self.vminValue.setAlignment(Qt.AlignCenter);
self.vminSlider = QSlider(Qt.Vertical)
self.vminSlider.setMinimum(vmin)
self.vminSlider.setMaximum(vmax)
self.vminSlider.setValue(vmin)
self.vminLayout.addWidget(self.vminLabel)
self.vminLayout.addWidget(self.vminValue)
self.vminLayout.addWidget(self.vminSlider)
''' Vertical Layout for VMAX Slider'''
self.vmaxLayout = QVBoxLayout()
self.vmaxLayout.setObjectName("vmaxLayout")
self.vmaxLabel = QLabel("VMax")
self.vmaxValue = QLineEdit(str(vmax),self)
self.vmaxValue.setValidator(QIntValidator(vmin, vmax, self));
self.vmaxValue.setFixedWidth(40)
self.vmaxValue.setFixedHeight(27)
self.vmaxValue.setAlignment(Qt.AlignCenter);
self.vmaxSlider = QSlider(Qt.Vertical)
self.vmaxSlider.setMinimum(vmin)
self.vmaxSlider.setMaximum(vmax)
self.vmaxSlider.setValue(vmax)
self.vmaxLayout.addWidget(self.vmaxLabel)
self.vmaxLayout.addWidget(self.vmaxValue)
self.vmaxLayout.addWidget(self.vmaxSlider)
'''Adding all the vertical layouts to the main horizontal layout'''
self.gLayout.addLayout(self.radioLayout,1,0,1,6,Qt.AlignCenter)
self.gLayout.addLayout(self.hminLayout,2,0,Qt.AlignCenter)
self.gLayout.addLayout(self.hmaxLayout,2,1,Qt.AlignCenter)
self.gLayout.addLayout(self.sminLayout,2,2,Qt.AlignCenter)
self.gLayout.addLayout(self.smaxLayout,2,3,Qt.AlignCenter)
self.gLayout.addLayout(self.vminLayout,2,4,Qt.AlignCenter)
self.gLayout.addLayout(self.vmaxLayout,2,5,Qt.AlignCenter)
self.setLayout(self.gLayout)
'''Signals for sliders value changes'''
self.hminSlider.valueChanged.connect(self.changeHmin)
self.hmaxSlider.valueChanged.connect(self.changeHmax)
self.sminSlider.valueChanged.connect(self.changeSmin)
self.smaxSlider.valueChanged.connect(self.changeSmax)
self.vminSlider.valueChanged.connect(self.changeVmin)
self.vmaxSlider.valueChanged.connect(self.changeVmax)
self.hminValue.textChanged.connect(self.changeHmin2)
self.hmaxValue.textChanged.connect(self.changeHmax2)
self.sminValue.textChanged.connect(self.changeSmin2)
self.smaxValue.textChanged.connect(self.changeSmax2)
self.vminValue.textChanged.connect(self.changeVmin2)
self.vmaxValue.textChanged.connect(self.changeVmax2) | def function[initUI, parameter[self]]:
constant[Radio buttons for Original/RGB/HSV/YUV images]
name[self].origButton assign[=] call[name[QRadioButton], parameter[constant[Original]]]
name[self].rgbButton assign[=] call[name[QRadioButton], parameter[constant[RGB]]]
name[self].hsvButton assign[=] call[name[QRadioButton], parameter[constant[HSV]]]
name[self].yuvButton assign[=] call[name[QRadioButton], parameter[constant[YUV]]]
constant[HSV status scheck]
name[self].hsvCheck assign[=] call[name[QCheckBox], parameter[constant[HSV Color Disc]]]
call[name[self].hsvCheck.setObjectName, parameter[constant[hsvCheck]]]
call[name[self].hsvCheck.stateChanged.connect, parameter[name[self].showHSVWidget]]
constant[Signals for toggled radio buttons]
call[name[self].origButton.toggled.connect, parameter[<ast.Lambda object at 0x7da20c992e30>]]
call[name[self].rgbButton.toggled.connect, parameter[<ast.Lambda object at 0x7da20c9915a0>]]
call[name[self].hsvButton.toggled.connect, parameter[<ast.Lambda object at 0x7da18ede6bc0>]]
call[name[self].yuvButton.toggled.connect, parameter[<ast.Lambda object at 0x7da18ede7ac0>]]
call[name[self].origButton.setChecked, parameter[constant[True]]]
constant[Main layout of the widget will contain several vertical layouts]
name[self].gLayout assign[=] call[name[QGridLayout], parameter[name[self]]]
call[name[self].gLayout.setObjectName, parameter[constant[gLayout]]]
constant[ Vertical Layout for radio buttons ]
name[self].radio1Layout assign[=] call[name[QHBoxLayout], parameter[]]
name[self].radio2Layout assign[=] call[name[QVBoxLayout], parameter[]]
name[self].radio3Layout assign[=] call[name[QVBoxLayout], parameter[]]
name[self].radioLayout assign[=] call[name[QVBoxLayout], parameter[]]
call[name[self].radioLayout.setObjectName, parameter[constant[radioLayout]]]
call[name[self].radio1Layout.addWidget, parameter[name[self].origButton]]
call[name[self].radio1Layout.addWidget, parameter[name[self].rgbButton]]
call[name[self].radio1Layout.addWidget, parameter[name[self].hsvButton]]
call[name[self].radio1Layout.addWidget, parameter[name[self].yuvButton]]
call[name[self].radio1Layout.addWidget, parameter[name[self].hsvCheck]]
call[name[self].radioLayout.addLayout, parameter[name[self].radio1Layout]]
name[self].vSpacer assign[=] call[name[QSpacerItem], parameter[constant[10], constant[10], name[QSizePolicy].Ignored, name[QSizePolicy].Ignored]]
call[name[self].radioLayout.addItem, parameter[name[self].vSpacer]]
<ast.Tuple object at 0x7da18ede7220> assign[=] name[HSVMIN]
<ast.Tuple object at 0x7da18ede4670> assign[=] name[HSVMAX]
constant[ Vertical Layout for HMIN Slider]
name[self].hminLayout assign[=] call[name[QVBoxLayout], parameter[]]
call[name[self].hminLayout.setObjectName, parameter[constant[hminLayout]]]
name[self].hminLabel assign[=] call[name[QLabel], parameter[constant[HMin]]]
name[self].hminValue assign[=] call[name[QLineEdit], parameter[call[name[str], parameter[name[hmin]]], name[self]]]
call[name[self].hminValue.setValidator, parameter[call[name[QIntValidator], parameter[name[hmin], name[hmax], name[self]]]]]
call[name[self].hminValue.setFixedWidth, parameter[constant[40]]]
call[name[self].hminValue.setFixedHeight, parameter[constant[27]]]
call[name[self].hminValue.setAlignment, parameter[name[Qt].AlignCenter]]
name[self].hminSlider assign[=] call[name[QSlider], parameter[name[Qt].Vertical]]
call[name[self].hminSlider.setMinimum, parameter[name[hmin]]]
call[name[self].hminSlider.setMaximum, parameter[name[hmax]]]
call[name[self].hminSlider.setValue, parameter[name[hmin]]]
call[name[self].hminLayout.addWidget, parameter[name[self].hminLabel, name[Qt].AlignCenter]]
call[name[self].hminLayout.addWidget, parameter[name[self].hminValue, name[Qt].AlignCenter]]
call[name[self].hminLayout.addWidget, parameter[name[self].hminSlider]]
constant[ Vertical Layout for HMAX Slider]
name[self].hmaxLayout assign[=] call[name[QVBoxLayout], parameter[]]
call[name[self].hmaxLayout.setObjectName, parameter[constant[hmaxLayout]]]
name[self].hmaxLabel assign[=] call[name[QLabel], parameter[constant[HMax]]]
name[self].hmaxValue assign[=] call[name[QLineEdit], parameter[call[name[str], parameter[name[hmax]]], name[self]]]
call[name[self].hmaxValue.setValidator, parameter[call[name[QIntValidator], parameter[name[hmin], name[hmax], name[self]]]]]
call[name[self].hmaxValue.setFixedWidth, parameter[constant[40]]]
call[name[self].hmaxValue.setFixedHeight, parameter[constant[27]]]
call[name[self].hmaxValue.setAlignment, parameter[name[Qt].AlignCenter]]
name[self].hmaxSlider assign[=] call[name[QSlider], parameter[name[Qt].Vertical]]
call[name[self].hmaxSlider.setMinimum, parameter[name[hmin]]]
call[name[self].hmaxSlider.setMaximum, parameter[name[hmax]]]
call[name[self].hmaxSlider.setValue, parameter[name[hmax]]]
call[name[self].hmaxLayout.addWidget, parameter[name[self].hmaxLabel]]
call[name[self].hmaxLayout.addWidget, parameter[name[self].hmaxValue]]
call[name[self].hmaxLayout.addWidget, parameter[name[self].hmaxSlider]]
constant[ Vertical Layout for SMIN Slider]
name[self].sminLayout assign[=] call[name[QVBoxLayout], parameter[]]
call[name[self].sminLayout.setObjectName, parameter[constant[sminLayout]]]
name[self].sminLabel assign[=] call[name[QLabel], parameter[constant[SMin]]]
name[self].sminValue assign[=] call[name[QLineEdit], parameter[call[name[str], parameter[name[smin]]], name[self]]]
call[name[self].sminValue.setValidator, parameter[call[name[QIntValidator], parameter[name[smin], name[smax], name[self]]]]]
call[name[self].sminValue.setFixedWidth, parameter[constant[40]]]
call[name[self].sminValue.setFixedHeight, parameter[constant[27]]]
call[name[self].sminValue.setAlignment, parameter[name[Qt].AlignCenter]]
name[self].sminSlider assign[=] call[name[QSlider], parameter[name[Qt].Vertical]]
call[name[self].sminSlider.setMinimum, parameter[name[smin]]]
call[name[self].sminSlider.setMaximum, parameter[name[smax]]]
call[name[self].sminSlider.setValue, parameter[name[smin]]]
call[name[self].sminLayout.addWidget, parameter[name[self].sminLabel]]
call[name[self].sminLayout.addWidget, parameter[name[self].sminValue]]
call[name[self].sminLayout.addWidget, parameter[name[self].sminSlider]]
constant[ Vertical Layout for SMAX Slider]
name[self].smaxLayout assign[=] call[name[QVBoxLayout], parameter[]]
call[name[self].smaxLayout.setObjectName, parameter[constant[smaxLayout]]]
name[self].smaxLabel assign[=] call[name[QLabel], parameter[constant[SMax]]]
name[self].smaxValue assign[=] call[name[QLineEdit], parameter[call[name[str], parameter[name[smax]]], name[self]]]
call[name[self].smaxValue.setValidator, parameter[call[name[QIntValidator], parameter[name[smin], name[smax], name[self]]]]]
call[name[self].smaxValue.setFixedWidth, parameter[constant[40]]]
call[name[self].smaxValue.setFixedHeight, parameter[constant[27]]]
call[name[self].smaxValue.setAlignment, parameter[name[Qt].AlignCenter]]
name[self].smaxSlider assign[=] call[name[QSlider], parameter[name[Qt].Vertical]]
call[name[self].smaxSlider.setMinimum, parameter[name[smin]]]
call[name[self].smaxSlider.setMaximum, parameter[name[smax]]]
call[name[self].smaxSlider.setValue, parameter[name[smax]]]
call[name[self].smaxLayout.addWidget, parameter[name[self].smaxLabel]]
call[name[self].smaxLayout.addWidget, parameter[name[self].smaxValue]]
call[name[self].smaxLayout.addWidget, parameter[name[self].smaxSlider]]
constant[ Vertical Layout for VMIN Slider]
name[self].vminLayout assign[=] call[name[QVBoxLayout], parameter[]]
call[name[self].vminLayout.setObjectName, parameter[constant[vminLayout]]]
name[self].vminLabel assign[=] call[name[QLabel], parameter[constant[VMin]]]
name[self].vminValue assign[=] call[name[QLineEdit], parameter[call[name[str], parameter[name[vmin]]], name[self]]]
call[name[self].vminValue.setValidator, parameter[call[name[QIntValidator], parameter[name[vmin], name[vmax], name[self]]]]]
call[name[self].vminValue.setFixedWidth, parameter[constant[40]]]
call[name[self].vminValue.setFixedHeight, parameter[constant[27]]]
call[name[self].vminValue.setAlignment, parameter[name[Qt].AlignCenter]]
name[self].vminSlider assign[=] call[name[QSlider], parameter[name[Qt].Vertical]]
call[name[self].vminSlider.setMinimum, parameter[name[vmin]]]
call[name[self].vminSlider.setMaximum, parameter[name[vmax]]]
call[name[self].vminSlider.setValue, parameter[name[vmin]]]
call[name[self].vminLayout.addWidget, parameter[name[self].vminLabel]]
call[name[self].vminLayout.addWidget, parameter[name[self].vminValue]]
call[name[self].vminLayout.addWidget, parameter[name[self].vminSlider]]
constant[ Vertical Layout for VMAX Slider]
name[self].vmaxLayout assign[=] call[name[QVBoxLayout], parameter[]]
call[name[self].vmaxLayout.setObjectName, parameter[constant[vmaxLayout]]]
name[self].vmaxLabel assign[=] call[name[QLabel], parameter[constant[VMax]]]
name[self].vmaxValue assign[=] call[name[QLineEdit], parameter[call[name[str], parameter[name[vmax]]], name[self]]]
call[name[self].vmaxValue.setValidator, parameter[call[name[QIntValidator], parameter[name[vmin], name[vmax], name[self]]]]]
call[name[self].vmaxValue.setFixedWidth, parameter[constant[40]]]
call[name[self].vmaxValue.setFixedHeight, parameter[constant[27]]]
call[name[self].vmaxValue.setAlignment, parameter[name[Qt].AlignCenter]]
name[self].vmaxSlider assign[=] call[name[QSlider], parameter[name[Qt].Vertical]]
call[name[self].vmaxSlider.setMinimum, parameter[name[vmin]]]
call[name[self].vmaxSlider.setMaximum, parameter[name[vmax]]]
call[name[self].vmaxSlider.setValue, parameter[name[vmax]]]
call[name[self].vmaxLayout.addWidget, parameter[name[self].vmaxLabel]]
call[name[self].vmaxLayout.addWidget, parameter[name[self].vmaxValue]]
call[name[self].vmaxLayout.addWidget, parameter[name[self].vmaxSlider]]
constant[Adding all the vertical layouts to the main horizontal layout]
call[name[self].gLayout.addLayout, parameter[name[self].radioLayout, constant[1], constant[0], constant[1], constant[6], name[Qt].AlignCenter]]
call[name[self].gLayout.addLayout, parameter[name[self].hminLayout, constant[2], constant[0], name[Qt].AlignCenter]]
call[name[self].gLayout.addLayout, parameter[name[self].hmaxLayout, constant[2], constant[1], name[Qt].AlignCenter]]
call[name[self].gLayout.addLayout, parameter[name[self].sminLayout, constant[2], constant[2], name[Qt].AlignCenter]]
call[name[self].gLayout.addLayout, parameter[name[self].smaxLayout, constant[2], constant[3], name[Qt].AlignCenter]]
call[name[self].gLayout.addLayout, parameter[name[self].vminLayout, constant[2], constant[4], name[Qt].AlignCenter]]
call[name[self].gLayout.addLayout, parameter[name[self].vmaxLayout, constant[2], constant[5], name[Qt].AlignCenter]]
call[name[self].setLayout, parameter[name[self].gLayout]]
constant[Signals for sliders value changes]
call[name[self].hminSlider.valueChanged.connect, parameter[name[self].changeHmin]]
call[name[self].hmaxSlider.valueChanged.connect, parameter[name[self].changeHmax]]
call[name[self].sminSlider.valueChanged.connect, parameter[name[self].changeSmin]]
call[name[self].smaxSlider.valueChanged.connect, parameter[name[self].changeSmax]]
call[name[self].vminSlider.valueChanged.connect, parameter[name[self].changeVmin]]
call[name[self].vmaxSlider.valueChanged.connect, parameter[name[self].changeVmax]]
call[name[self].hminValue.textChanged.connect, parameter[name[self].changeHmin2]]
call[name[self].hmaxValue.textChanged.connect, parameter[name[self].changeHmax2]]
call[name[self].sminValue.textChanged.connect, parameter[name[self].changeSmin2]]
call[name[self].smaxValue.textChanged.connect, parameter[name[self].changeSmax2]]
call[name[self].vminValue.textChanged.connect, parameter[name[self].changeVmin2]]
call[name[self].vmaxValue.textChanged.connect, parameter[name[self].changeVmax2]] | keyword[def] identifier[initUI] ( identifier[self] ):
literal[string]
identifier[self] . identifier[origButton] = identifier[QRadioButton] ( literal[string] )
identifier[self] . identifier[rgbButton] = identifier[QRadioButton] ( literal[string] )
identifier[self] . identifier[hsvButton] = identifier[QRadioButton] ( literal[string] )
identifier[self] . identifier[yuvButton] = identifier[QRadioButton] ( literal[string] )
literal[string]
identifier[self] . identifier[hsvCheck] = identifier[QCheckBox] ( literal[string] )
identifier[self] . identifier[hsvCheck] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[hsvCheck] . identifier[stateChanged] . identifier[connect] ( identifier[self] . identifier[showHSVWidget] )
literal[string]
identifier[self] . identifier[origButton] . identifier[toggled] . identifier[connect] ( keyword[lambda] : identifier[self] . identifier[origButtonState] ())
identifier[self] . identifier[rgbButton] . identifier[toggled] . identifier[connect] ( keyword[lambda] : identifier[self] . identifier[rgbButtonState] ())
identifier[self] . identifier[hsvButton] . identifier[toggled] . identifier[connect] ( keyword[lambda] : identifier[self] . identifier[hsvButtonState] ())
identifier[self] . identifier[yuvButton] . identifier[toggled] . identifier[connect] ( keyword[lambda] : identifier[self] . identifier[yuvButtonState] ())
identifier[self] . identifier[origButton] . identifier[setChecked] ( keyword[True] )
literal[string]
identifier[self] . identifier[gLayout] = identifier[QGridLayout] ( identifier[self] )
identifier[self] . identifier[gLayout] . identifier[setObjectName] ( literal[string] )
literal[string]
identifier[self] . identifier[radio1Layout] = identifier[QHBoxLayout] ()
identifier[self] . identifier[radio2Layout] = identifier[QVBoxLayout] ()
identifier[self] . identifier[radio3Layout] = identifier[QVBoxLayout] ()
identifier[self] . identifier[radioLayout] = identifier[QVBoxLayout] ()
identifier[self] . identifier[radioLayout] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[radio1Layout] . identifier[addWidget] ( identifier[self] . identifier[origButton] )
identifier[self] . identifier[radio1Layout] . identifier[addWidget] ( identifier[self] . identifier[rgbButton] )
identifier[self] . identifier[radio1Layout] . identifier[addWidget] ( identifier[self] . identifier[hsvButton] )
identifier[self] . identifier[radio1Layout] . identifier[addWidget] ( identifier[self] . identifier[yuvButton] )
identifier[self] . identifier[radio1Layout] . identifier[addWidget] ( identifier[self] . identifier[hsvCheck] )
identifier[self] . identifier[radioLayout] . identifier[addLayout] ( identifier[self] . identifier[radio1Layout] )
identifier[self] . identifier[vSpacer] = identifier[QSpacerItem] ( literal[int] , literal[int] , identifier[QSizePolicy] . identifier[Ignored] , identifier[QSizePolicy] . identifier[Ignored] );
identifier[self] . identifier[radioLayout] . identifier[addItem] ( identifier[self] . identifier[vSpacer] )
identifier[hmin] , identifier[smin] , identifier[vmin] = identifier[HSVMIN]
identifier[hmax] , identifier[smax] , identifier[vmax] = identifier[HSVMAX]
literal[string]
identifier[self] . identifier[hminLayout] = identifier[QVBoxLayout] ()
identifier[self] . identifier[hminLayout] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[hminLabel] = identifier[QLabel] ( literal[string] )
identifier[self] . identifier[hminValue] = identifier[QLineEdit] ( identifier[str] ( identifier[hmin] ), identifier[self] )
identifier[self] . identifier[hminValue] . identifier[setValidator] ( identifier[QIntValidator] ( identifier[hmin] , identifier[hmax] , identifier[self] ));
identifier[self] . identifier[hminValue] . identifier[setFixedWidth] ( literal[int] )
identifier[self] . identifier[hminValue] . identifier[setFixedHeight] ( literal[int] )
identifier[self] . identifier[hminValue] . identifier[setAlignment] ( identifier[Qt] . identifier[AlignCenter] );
identifier[self] . identifier[hminSlider] = identifier[QSlider] ( identifier[Qt] . identifier[Vertical] )
identifier[self] . identifier[hminSlider] . identifier[setMinimum] ( identifier[hmin] )
identifier[self] . identifier[hminSlider] . identifier[setMaximum] ( identifier[hmax] )
identifier[self] . identifier[hminSlider] . identifier[setValue] ( identifier[hmin] )
identifier[self] . identifier[hminLayout] . identifier[addWidget] ( identifier[self] . identifier[hminLabel] , identifier[Qt] . identifier[AlignCenter] )
identifier[self] . identifier[hminLayout] . identifier[addWidget] ( identifier[self] . identifier[hminValue] , identifier[Qt] . identifier[AlignCenter] )
identifier[self] . identifier[hminLayout] . identifier[addWidget] ( identifier[self] . identifier[hminSlider] )
literal[string]
identifier[self] . identifier[hmaxLayout] = identifier[QVBoxLayout] ()
identifier[self] . identifier[hmaxLayout] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[hmaxLabel] = identifier[QLabel] ( literal[string] )
identifier[self] . identifier[hmaxValue] = identifier[QLineEdit] ( identifier[str] ( identifier[hmax] ), identifier[self] )
identifier[self] . identifier[hmaxValue] . identifier[setValidator] ( identifier[QIntValidator] ( identifier[hmin] , identifier[hmax] , identifier[self] ));
identifier[self] . identifier[hmaxValue] . identifier[setFixedWidth] ( literal[int] )
identifier[self] . identifier[hmaxValue] . identifier[setFixedHeight] ( literal[int] )
identifier[self] . identifier[hmaxValue] . identifier[setAlignment] ( identifier[Qt] . identifier[AlignCenter] );
identifier[self] . identifier[hmaxSlider] = identifier[QSlider] ( identifier[Qt] . identifier[Vertical] )
identifier[self] . identifier[hmaxSlider] . identifier[setMinimum] ( identifier[hmin] )
identifier[self] . identifier[hmaxSlider] . identifier[setMaximum] ( identifier[hmax] )
identifier[self] . identifier[hmaxSlider] . identifier[setValue] ( identifier[hmax] )
identifier[self] . identifier[hmaxLayout] . identifier[addWidget] ( identifier[self] . identifier[hmaxLabel] )
identifier[self] . identifier[hmaxLayout] . identifier[addWidget] ( identifier[self] . identifier[hmaxValue] )
identifier[self] . identifier[hmaxLayout] . identifier[addWidget] ( identifier[self] . identifier[hmaxSlider] )
literal[string]
identifier[self] . identifier[sminLayout] = identifier[QVBoxLayout] ()
identifier[self] . identifier[sminLayout] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[sminLabel] = identifier[QLabel] ( literal[string] )
identifier[self] . identifier[sminValue] = identifier[QLineEdit] ( identifier[str] ( identifier[smin] ), identifier[self] )
identifier[self] . identifier[sminValue] . identifier[setValidator] ( identifier[QIntValidator] ( identifier[smin] , identifier[smax] , identifier[self] ));
identifier[self] . identifier[sminValue] . identifier[setFixedWidth] ( literal[int] )
identifier[self] . identifier[sminValue] . identifier[setFixedHeight] ( literal[int] )
identifier[self] . identifier[sminValue] . identifier[setAlignment] ( identifier[Qt] . identifier[AlignCenter] );
identifier[self] . identifier[sminSlider] = identifier[QSlider] ( identifier[Qt] . identifier[Vertical] )
identifier[self] . identifier[sminSlider] . identifier[setMinimum] ( identifier[smin] )
identifier[self] . identifier[sminSlider] . identifier[setMaximum] ( identifier[smax] )
identifier[self] . identifier[sminSlider] . identifier[setValue] ( identifier[smin] )
identifier[self] . identifier[sminLayout] . identifier[addWidget] ( identifier[self] . identifier[sminLabel] )
identifier[self] . identifier[sminLayout] . identifier[addWidget] ( identifier[self] . identifier[sminValue] )
identifier[self] . identifier[sminLayout] . identifier[addWidget] ( identifier[self] . identifier[sminSlider] )
literal[string]
identifier[self] . identifier[smaxLayout] = identifier[QVBoxLayout] ()
identifier[self] . identifier[smaxLayout] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[smaxLabel] = identifier[QLabel] ( literal[string] )
identifier[self] . identifier[smaxValue] = identifier[QLineEdit] ( identifier[str] ( identifier[smax] ), identifier[self] )
identifier[self] . identifier[smaxValue] . identifier[setValidator] ( identifier[QIntValidator] ( identifier[smin] , identifier[smax] , identifier[self] ));
identifier[self] . identifier[smaxValue] . identifier[setFixedWidth] ( literal[int] )
identifier[self] . identifier[smaxValue] . identifier[setFixedHeight] ( literal[int] )
identifier[self] . identifier[smaxValue] . identifier[setAlignment] ( identifier[Qt] . identifier[AlignCenter] );
identifier[self] . identifier[smaxSlider] = identifier[QSlider] ( identifier[Qt] . identifier[Vertical] )
identifier[self] . identifier[smaxSlider] . identifier[setMinimum] ( identifier[smin] )
identifier[self] . identifier[smaxSlider] . identifier[setMaximum] ( identifier[smax] )
identifier[self] . identifier[smaxSlider] . identifier[setValue] ( identifier[smax] )
identifier[self] . identifier[smaxLayout] . identifier[addWidget] ( identifier[self] . identifier[smaxLabel] )
identifier[self] . identifier[smaxLayout] . identifier[addWidget] ( identifier[self] . identifier[smaxValue] )
identifier[self] . identifier[smaxLayout] . identifier[addWidget] ( identifier[self] . identifier[smaxSlider] )
literal[string]
identifier[self] . identifier[vminLayout] = identifier[QVBoxLayout] ()
identifier[self] . identifier[vminLayout] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[vminLabel] = identifier[QLabel] ( literal[string] )
identifier[self] . identifier[vminValue] = identifier[QLineEdit] ( identifier[str] ( identifier[vmin] ), identifier[self] )
identifier[self] . identifier[vminValue] . identifier[setValidator] ( identifier[QIntValidator] ( identifier[vmin] , identifier[vmax] , identifier[self] ));
identifier[self] . identifier[vminValue] . identifier[setFixedWidth] ( literal[int] )
identifier[self] . identifier[vminValue] . identifier[setFixedHeight] ( literal[int] )
identifier[self] . identifier[vminValue] . identifier[setAlignment] ( identifier[Qt] . identifier[AlignCenter] );
identifier[self] . identifier[vminSlider] = identifier[QSlider] ( identifier[Qt] . identifier[Vertical] )
identifier[self] . identifier[vminSlider] . identifier[setMinimum] ( identifier[vmin] )
identifier[self] . identifier[vminSlider] . identifier[setMaximum] ( identifier[vmax] )
identifier[self] . identifier[vminSlider] . identifier[setValue] ( identifier[vmin] )
identifier[self] . identifier[vminLayout] . identifier[addWidget] ( identifier[self] . identifier[vminLabel] )
identifier[self] . identifier[vminLayout] . identifier[addWidget] ( identifier[self] . identifier[vminValue] )
identifier[self] . identifier[vminLayout] . identifier[addWidget] ( identifier[self] . identifier[vminSlider] )
literal[string]
identifier[self] . identifier[vmaxLayout] = identifier[QVBoxLayout] ()
identifier[self] . identifier[vmaxLayout] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[vmaxLabel] = identifier[QLabel] ( literal[string] )
identifier[self] . identifier[vmaxValue] = identifier[QLineEdit] ( identifier[str] ( identifier[vmax] ), identifier[self] )
identifier[self] . identifier[vmaxValue] . identifier[setValidator] ( identifier[QIntValidator] ( identifier[vmin] , identifier[vmax] , identifier[self] ));
identifier[self] . identifier[vmaxValue] . identifier[setFixedWidth] ( literal[int] )
identifier[self] . identifier[vmaxValue] . identifier[setFixedHeight] ( literal[int] )
identifier[self] . identifier[vmaxValue] . identifier[setAlignment] ( identifier[Qt] . identifier[AlignCenter] );
identifier[self] . identifier[vmaxSlider] = identifier[QSlider] ( identifier[Qt] . identifier[Vertical] )
identifier[self] . identifier[vmaxSlider] . identifier[setMinimum] ( identifier[vmin] )
identifier[self] . identifier[vmaxSlider] . identifier[setMaximum] ( identifier[vmax] )
identifier[self] . identifier[vmaxSlider] . identifier[setValue] ( identifier[vmax] )
identifier[self] . identifier[vmaxLayout] . identifier[addWidget] ( identifier[self] . identifier[vmaxLabel] )
identifier[self] . identifier[vmaxLayout] . identifier[addWidget] ( identifier[self] . identifier[vmaxValue] )
identifier[self] . identifier[vmaxLayout] . identifier[addWidget] ( identifier[self] . identifier[vmaxSlider] )
literal[string]
identifier[self] . identifier[gLayout] . identifier[addLayout] ( identifier[self] . identifier[radioLayout] , literal[int] , literal[int] , literal[int] , literal[int] , identifier[Qt] . identifier[AlignCenter] )
identifier[self] . identifier[gLayout] . identifier[addLayout] ( identifier[self] . identifier[hminLayout] , literal[int] , literal[int] , identifier[Qt] . identifier[AlignCenter] )
identifier[self] . identifier[gLayout] . identifier[addLayout] ( identifier[self] . identifier[hmaxLayout] , literal[int] , literal[int] , identifier[Qt] . identifier[AlignCenter] )
identifier[self] . identifier[gLayout] . identifier[addLayout] ( identifier[self] . identifier[sminLayout] , literal[int] , literal[int] , identifier[Qt] . identifier[AlignCenter] )
identifier[self] . identifier[gLayout] . identifier[addLayout] ( identifier[self] . identifier[smaxLayout] , literal[int] , literal[int] , identifier[Qt] . identifier[AlignCenter] )
identifier[self] . identifier[gLayout] . identifier[addLayout] ( identifier[self] . identifier[vminLayout] , literal[int] , literal[int] , identifier[Qt] . identifier[AlignCenter] )
identifier[self] . identifier[gLayout] . identifier[addLayout] ( identifier[self] . identifier[vmaxLayout] , literal[int] , literal[int] , identifier[Qt] . identifier[AlignCenter] )
identifier[self] . identifier[setLayout] ( identifier[self] . identifier[gLayout] )
literal[string]
identifier[self] . identifier[hminSlider] . identifier[valueChanged] . identifier[connect] ( identifier[self] . identifier[changeHmin] )
identifier[self] . identifier[hmaxSlider] . identifier[valueChanged] . identifier[connect] ( identifier[self] . identifier[changeHmax] )
identifier[self] . identifier[sminSlider] . identifier[valueChanged] . identifier[connect] ( identifier[self] . identifier[changeSmin] )
identifier[self] . identifier[smaxSlider] . identifier[valueChanged] . identifier[connect] ( identifier[self] . identifier[changeSmax] )
identifier[self] . identifier[vminSlider] . identifier[valueChanged] . identifier[connect] ( identifier[self] . identifier[changeVmin] )
identifier[self] . identifier[vmaxSlider] . identifier[valueChanged] . identifier[connect] ( identifier[self] . identifier[changeVmax] )
identifier[self] . identifier[hminValue] . identifier[textChanged] . identifier[connect] ( identifier[self] . identifier[changeHmin2] )
identifier[self] . identifier[hmaxValue] . identifier[textChanged] . identifier[connect] ( identifier[self] . identifier[changeHmax2] )
identifier[self] . identifier[sminValue] . identifier[textChanged] . identifier[connect] ( identifier[self] . identifier[changeSmin2] )
identifier[self] . identifier[smaxValue] . identifier[textChanged] . identifier[connect] ( identifier[self] . identifier[changeSmax2] )
identifier[self] . identifier[vminValue] . identifier[textChanged] . identifier[connect] ( identifier[self] . identifier[changeVmin2] )
identifier[self] . identifier[vmaxValue] . identifier[textChanged] . identifier[connect] ( identifier[self] . identifier[changeVmax2] ) | def initUI(self):
#self.setMinimumSize(WIDTH,HEIGTH)
#self.setMaximumSize(WIDTH,HEIGTH)
'Radio buttons for Original/RGB/HSV/YUV images'
self.origButton = QRadioButton('Original')
self.rgbButton = QRadioButton('RGB')
self.hsvButton = QRadioButton('HSV')
self.yuvButton = QRadioButton('YUV')
'HSV status scheck'
self.hsvCheck = QCheckBox('HSV Color Disc')
self.hsvCheck.setObjectName('hsvCheck')
self.hsvCheck.stateChanged.connect(self.showHSVWidget)
'Signals for toggled radio buttons'
self.origButton.toggled.connect(lambda : self.origButtonState())
self.rgbButton.toggled.connect(lambda : self.rgbButtonState())
self.hsvButton.toggled.connect(lambda : self.hsvButtonState())
self.yuvButton.toggled.connect(lambda : self.yuvButtonState())
self.origButton.setChecked(True)
'Main layout of the widget will contain several vertical layouts'
self.gLayout = QGridLayout(self)
self.gLayout.setObjectName('gLayout')
' Vertical Layout for radio buttons '
self.radio1Layout = QHBoxLayout()
self.radio2Layout = QVBoxLayout()
self.radio3Layout = QVBoxLayout()
self.radioLayout = QVBoxLayout()
self.radioLayout.setObjectName('radioLayout')
self.radio1Layout.addWidget(self.origButton)
self.radio1Layout.addWidget(self.rgbButton)
self.radio1Layout.addWidget(self.hsvButton)
self.radio1Layout.addWidget(self.yuvButton)
self.radio1Layout.addWidget(self.hsvCheck)
#self.radio1Layout.addLayout(self.radio2Layout)
#self.radio1Layout.addLayout(self.radio3Layout)
self.radioLayout.addLayout(self.radio1Layout)
self.vSpacer = QSpacerItem(10, 10, QSizePolicy.Ignored, QSizePolicy.Ignored)
self.radioLayout.addItem(self.vSpacer)
(hmin, smin, vmin) = HSVMIN
(hmax, smax, vmax) = HSVMAX
' Vertical Layout for HMIN Slider'
self.hminLayout = QVBoxLayout()
self.hminLayout.setObjectName('hminLayout')
self.hminLabel = QLabel('HMin')
self.hminValue = QLineEdit(str(hmin), self)
self.hminValue.setValidator(QIntValidator(hmin, hmax, self))
self.hminValue.setFixedWidth(40)
self.hminValue.setFixedHeight(27)
self.hminValue.setAlignment(Qt.AlignCenter)
self.hminSlider = QSlider(Qt.Vertical)
self.hminSlider.setMinimum(hmin)
self.hminSlider.setMaximum(hmax)
self.hminSlider.setValue(hmin)
self.hminLayout.addWidget(self.hminLabel, Qt.AlignCenter)
self.hminLayout.addWidget(self.hminValue, Qt.AlignCenter)
self.hminLayout.addWidget(self.hminSlider)
' Vertical Layout for HMAX Slider'
self.hmaxLayout = QVBoxLayout()
self.hmaxLayout.setObjectName('hmaxLayout')
self.hmaxLabel = QLabel('HMax')
self.hmaxValue = QLineEdit(str(hmax), self)
self.hmaxValue.setValidator(QIntValidator(hmin, hmax, self))
self.hmaxValue.setFixedWidth(40)
self.hmaxValue.setFixedHeight(27)
self.hmaxValue.setAlignment(Qt.AlignCenter)
self.hmaxSlider = QSlider(Qt.Vertical)
self.hmaxSlider.setMinimum(hmin)
self.hmaxSlider.setMaximum(hmax)
self.hmaxSlider.setValue(hmax)
self.hmaxLayout.addWidget(self.hmaxLabel)
self.hmaxLayout.addWidget(self.hmaxValue)
self.hmaxLayout.addWidget(self.hmaxSlider)
' Vertical Layout for SMIN Slider'
self.sminLayout = QVBoxLayout()
self.sminLayout.setObjectName('sminLayout')
self.sminLabel = QLabel('SMin')
self.sminValue = QLineEdit(str(smin), self)
self.sminValue.setValidator(QIntValidator(smin, smax, self))
self.sminValue.setFixedWidth(40)
self.sminValue.setFixedHeight(27)
self.sminValue.setAlignment(Qt.AlignCenter)
self.sminSlider = QSlider(Qt.Vertical)
self.sminSlider.setMinimum(smin)
self.sminSlider.setMaximum(smax)
self.sminSlider.setValue(smin)
self.sminLayout.addWidget(self.sminLabel)
self.sminLayout.addWidget(self.sminValue)
self.sminLayout.addWidget(self.sminSlider)
' Vertical Layout for SMAX Slider'
self.smaxLayout = QVBoxLayout()
self.smaxLayout.setObjectName('smaxLayout')
self.smaxLabel = QLabel('SMax')
self.smaxValue = QLineEdit(str(smax), self)
self.smaxValue.setValidator(QIntValidator(smin, smax, self))
self.smaxValue.setFixedWidth(40)
self.smaxValue.setFixedHeight(27)
self.smaxValue.setAlignment(Qt.AlignCenter)
self.smaxSlider = QSlider(Qt.Vertical)
self.smaxSlider.setMinimum(smin)
self.smaxSlider.setMaximum(smax)
self.smaxSlider.setValue(smax)
self.smaxLayout.addWidget(self.smaxLabel)
self.smaxLayout.addWidget(self.smaxValue)
self.smaxLayout.addWidget(self.smaxSlider)
' Vertical Layout for VMIN Slider'
self.vminLayout = QVBoxLayout()
self.vminLayout.setObjectName('vminLayout')
self.vminLabel = QLabel('VMin')
self.vminValue = QLineEdit(str(vmin), self)
self.vminValue.setValidator(QIntValidator(vmin, vmax, self))
self.vminValue.setFixedWidth(40)
self.vminValue.setFixedHeight(27)
self.vminValue.setAlignment(Qt.AlignCenter)
self.vminSlider = QSlider(Qt.Vertical)
self.vminSlider.setMinimum(vmin)
self.vminSlider.setMaximum(vmax)
self.vminSlider.setValue(vmin)
self.vminLayout.addWidget(self.vminLabel)
self.vminLayout.addWidget(self.vminValue)
self.vminLayout.addWidget(self.vminSlider)
' Vertical Layout for VMAX Slider'
self.vmaxLayout = QVBoxLayout()
self.vmaxLayout.setObjectName('vmaxLayout')
self.vmaxLabel = QLabel('VMax')
self.vmaxValue = QLineEdit(str(vmax), self)
self.vmaxValue.setValidator(QIntValidator(vmin, vmax, self))
self.vmaxValue.setFixedWidth(40)
self.vmaxValue.setFixedHeight(27)
self.vmaxValue.setAlignment(Qt.AlignCenter)
self.vmaxSlider = QSlider(Qt.Vertical)
self.vmaxSlider.setMinimum(vmin)
self.vmaxSlider.setMaximum(vmax)
self.vmaxSlider.setValue(vmax)
self.vmaxLayout.addWidget(self.vmaxLabel)
self.vmaxLayout.addWidget(self.vmaxValue)
self.vmaxLayout.addWidget(self.vmaxSlider)
'Adding all the vertical layouts to the main horizontal layout'
self.gLayout.addLayout(self.radioLayout, 1, 0, 1, 6, Qt.AlignCenter)
self.gLayout.addLayout(self.hminLayout, 2, 0, Qt.AlignCenter)
self.gLayout.addLayout(self.hmaxLayout, 2, 1, Qt.AlignCenter)
self.gLayout.addLayout(self.sminLayout, 2, 2, Qt.AlignCenter)
self.gLayout.addLayout(self.smaxLayout, 2, 3, Qt.AlignCenter)
self.gLayout.addLayout(self.vminLayout, 2, 4, Qt.AlignCenter)
self.gLayout.addLayout(self.vmaxLayout, 2, 5, Qt.AlignCenter)
self.setLayout(self.gLayout)
'Signals for sliders value changes'
self.hminSlider.valueChanged.connect(self.changeHmin)
self.hmaxSlider.valueChanged.connect(self.changeHmax)
self.sminSlider.valueChanged.connect(self.changeSmin)
self.smaxSlider.valueChanged.connect(self.changeSmax)
self.vminSlider.valueChanged.connect(self.changeVmin)
self.vmaxSlider.valueChanged.connect(self.changeVmax)
self.hminValue.textChanged.connect(self.changeHmin2)
self.hmaxValue.textChanged.connect(self.changeHmax2)
self.sminValue.textChanged.connect(self.changeSmin2)
self.smaxValue.textChanged.connect(self.changeSmax2)
self.vminValue.textChanged.connect(self.changeVmin2)
self.vmaxValue.textChanged.connect(self.changeVmax2) |
def _ewp_files_set(self, ewp_dic, project_dic):
""" Fills files in the ewp dictionary """
# empty any files in the template which are not grouped
try:
ewp_dic['project']['file'] = []
except KeyError:
pass
# empty groups
ewp_dic['project']['group'] = []
i = 0
for group_name, files in project_dic['groups'].items():
ewp_dic['project']['group'].append({'name': group_name, 'file': []})
for file in files:
ewp_dic['project']['group'][i]['file'].append({'name': file})
ewp_dic['project']['group'][i]['file'] = sorted(ewp_dic['project']['group'][i]['file'], key=lambda x: os.path.basename(x['name'].lower()))
i += 1 | def function[_ewp_files_set, parameter[self, ewp_dic, project_dic]]:
constant[ Fills files in the ewp dictionary ]
<ast.Try object at 0x7da1b0efb190>
call[call[name[ewp_dic]][constant[project]]][constant[group]] assign[=] list[[]]
variable[i] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b0efabc0>, <ast.Name object at 0x7da1b0efa9e0>]]] in starred[call[call[name[project_dic]][constant[groups]].items, parameter[]]] begin[:]
call[call[call[name[ewp_dic]][constant[project]]][constant[group]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0d0d6c0>, <ast.Constant object at 0x7da1b0d0e9b0>], [<ast.Name object at 0x7da1b0d0c5e0>, <ast.List object at 0x7da1b0d0c1f0>]]]]
for taget[name[file]] in starred[name[files]] begin[:]
call[call[call[call[call[name[ewp_dic]][constant[project]]][constant[group]]][name[i]]][constant[file]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0d0fca0>], [<ast.Name object at 0x7da1b0d0ff10>]]]]
call[call[call[call[name[ewp_dic]][constant[project]]][constant[group]]][name[i]]][constant[file]] assign[=] call[name[sorted], parameter[call[call[call[call[name[ewp_dic]][constant[project]]][constant[group]]][name[i]]][constant[file]]]]
<ast.AugAssign object at 0x7da1b0efb430> | keyword[def] identifier[_ewp_files_set] ( identifier[self] , identifier[ewp_dic] , identifier[project_dic] ):
literal[string]
keyword[try] :
identifier[ewp_dic] [ literal[string] ][ literal[string] ]=[]
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[ewp_dic] [ literal[string] ][ literal[string] ]=[]
identifier[i] = literal[int]
keyword[for] identifier[group_name] , identifier[files] keyword[in] identifier[project_dic] [ literal[string] ]. identifier[items] ():
identifier[ewp_dic] [ literal[string] ][ literal[string] ]. identifier[append] ({ literal[string] : identifier[group_name] , literal[string] :[]})
keyword[for] identifier[file] keyword[in] identifier[files] :
identifier[ewp_dic] [ literal[string] ][ literal[string] ][ identifier[i] ][ literal[string] ]. identifier[append] ({ literal[string] : identifier[file] })
identifier[ewp_dic] [ literal[string] ][ literal[string] ][ identifier[i] ][ literal[string] ]= identifier[sorted] ( identifier[ewp_dic] [ literal[string] ][ literal[string] ][ identifier[i] ][ literal[string] ], identifier[key] = keyword[lambda] identifier[x] : identifier[os] . identifier[path] . identifier[basename] ( identifier[x] [ literal[string] ]. identifier[lower] ()))
identifier[i] += literal[int] | def _ewp_files_set(self, ewp_dic, project_dic):
""" Fills files in the ewp dictionary """
# empty any files in the template which are not grouped
try:
ewp_dic['project']['file'] = [] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
# empty groups
ewp_dic['project']['group'] = []
i = 0
for (group_name, files) in project_dic['groups'].items():
ewp_dic['project']['group'].append({'name': group_name, 'file': []})
for file in files:
ewp_dic['project']['group'][i]['file'].append({'name': file}) # depends on [control=['for'], data=['file']]
ewp_dic['project']['group'][i]['file'] = sorted(ewp_dic['project']['group'][i]['file'], key=lambda x: os.path.basename(x['name'].lower()))
i += 1 # depends on [control=['for'], data=[]] |
def model_from_list(l, header):
"""Return a model with a collection from a list of entry"""
col = groups.sortableListe(PseudoAccesCategorie(n) for n in l)
return MultiSelectModel(col, header) | def function[model_from_list, parameter[l, header]]:
constant[Return a model with a collection from a list of entry]
variable[col] assign[=] call[name[groups].sortableListe, parameter[<ast.GeneratorExp object at 0x7da1b11e1480>]]
return[call[name[MultiSelectModel], parameter[name[col], name[header]]]] | keyword[def] identifier[model_from_list] ( identifier[l] , identifier[header] ):
literal[string]
identifier[col] = identifier[groups] . identifier[sortableListe] ( identifier[PseudoAccesCategorie] ( identifier[n] ) keyword[for] identifier[n] keyword[in] identifier[l] )
keyword[return] identifier[MultiSelectModel] ( identifier[col] , identifier[header] ) | def model_from_list(l, header):
"""Return a model with a collection from a list of entry"""
col = groups.sortableListe((PseudoAccesCategorie(n) for n in l))
return MultiSelectModel(col, header) |
def to_json(self, default_mapping=None):
"""
Dump json of the text view.
**Parameters**
default_mapping : default_mapping, dictionary(optional), a dictionary of mapping of different types to json types.
by default DeepDiff converts certain data types. For example Decimals into floats so they can be exported into json.
If you have a certain object type that the json serializer can not serialize it, please pass the appropriate type
conversion through this dictionary.
**Example**
Serialize custom objects
>>> class A:
... pass
...
>>> class B:
... pass
...
>>> t1 = A()
>>> t2 = B()
>>> ddiff = DeepDiff(t1, t2)
>>> ddiff.to_json()
TypeError: We do not know how to convert <__main__.A object at 0x10648> of type <class '__main__.A'> for json serialization. Please pass the default_mapping parameter with proper mapping of the object to a basic python type.
>>> default_mapping = {A: lambda x: 'obj A', B: lambda x: 'obj B'}
>>> ddiff.to_json(default_mapping=default_mapping)
'{"type_changes": {"root": {"old_type": "A", "new_type": "B", "old_value": "obj A", "new_value": "obj B"}}}'
"""
return json.dumps(self.to_dict(), default=json_convertor_default(default_mapping=default_mapping)) | def function[to_json, parameter[self, default_mapping]]:
constant[
Dump json of the text view.
**Parameters**
default_mapping : default_mapping, dictionary(optional), a dictionary of mapping of different types to json types.
by default DeepDiff converts certain data types. For example Decimals into floats so they can be exported into json.
If you have a certain object type that the json serializer can not serialize it, please pass the appropriate type
conversion through this dictionary.
**Example**
Serialize custom objects
>>> class A:
... pass
...
>>> class B:
... pass
...
>>> t1 = A()
>>> t2 = B()
>>> ddiff = DeepDiff(t1, t2)
>>> ddiff.to_json()
TypeError: We do not know how to convert <__main__.A object at 0x10648> of type <class '__main__.A'> for json serialization. Please pass the default_mapping parameter with proper mapping of the object to a basic python type.
>>> default_mapping = {A: lambda x: 'obj A', B: lambda x: 'obj B'}
>>> ddiff.to_json(default_mapping=default_mapping)
'{"type_changes": {"root": {"old_type": "A", "new_type": "B", "old_value": "obj A", "new_value": "obj B"}}}'
]
return[call[name[json].dumps, parameter[call[name[self].to_dict, parameter[]]]]] | keyword[def] identifier[to_json] ( identifier[self] , identifier[default_mapping] = keyword[None] ):
literal[string]
keyword[return] identifier[json] . identifier[dumps] ( identifier[self] . identifier[to_dict] (), identifier[default] = identifier[json_convertor_default] ( identifier[default_mapping] = identifier[default_mapping] )) | def to_json(self, default_mapping=None):
"""
Dump json of the text view.
**Parameters**
default_mapping : default_mapping, dictionary(optional), a dictionary of mapping of different types to json types.
by default DeepDiff converts certain data types. For example Decimals into floats so they can be exported into json.
If you have a certain object type that the json serializer can not serialize it, please pass the appropriate type
conversion through this dictionary.
**Example**
Serialize custom objects
>>> class A:
... pass
...
>>> class B:
... pass
...
>>> t1 = A()
>>> t2 = B()
>>> ddiff = DeepDiff(t1, t2)
>>> ddiff.to_json()
TypeError: We do not know how to convert <__main__.A object at 0x10648> of type <class '__main__.A'> for json serialization. Please pass the default_mapping parameter with proper mapping of the object to a basic python type.
>>> default_mapping = {A: lambda x: 'obj A', B: lambda x: 'obj B'}
>>> ddiff.to_json(default_mapping=default_mapping)
'{"type_changes": {"root": {"old_type": "A", "new_type": "B", "old_value": "obj A", "new_value": "obj B"}}}'
"""
return json.dumps(self.to_dict(), default=json_convertor_default(default_mapping=default_mapping)) |
def post(self):
"""
This is an example
---
tags:
- restful
parameters:
- in: body
name: body
schema:
$ref: '#/definitions/Task'
responses:
201:
description: The task has been created
schema:
$ref: '#/definitions/Task'
"""
args = parser.parse_args()
todo_id = int(max(TODOS.keys()).lstrip('todo')) + 1
todo_id = 'todo%i' % todo_id
TODOS[todo_id] = {'task': args['task']}
return TODOS[todo_id], 201 | def function[post, parameter[self]]:
constant[
This is an example
---
tags:
- restful
parameters:
- in: body
name: body
schema:
$ref: '#/definitions/Task'
responses:
201:
description: The task has been created
schema:
$ref: '#/definitions/Task'
]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
variable[todo_id] assign[=] binary_operation[call[name[int], parameter[call[call[name[max], parameter[call[name[TODOS].keys, parameter[]]]].lstrip, parameter[constant[todo]]]]] + constant[1]]
variable[todo_id] assign[=] binary_operation[constant[todo%i] <ast.Mod object at 0x7da2590d6920> name[todo_id]]
call[name[TODOS]][name[todo_id]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1bf8370>], [<ast.Subscript object at 0x7da1b1bf9180>]]
return[tuple[[<ast.Subscript object at 0x7da1b1bf8c40>, <ast.Constant object at 0x7da1b1bbbd30>]]] | keyword[def] identifier[post] ( identifier[self] ):
literal[string]
identifier[args] = identifier[parser] . identifier[parse_args] ()
identifier[todo_id] = identifier[int] ( identifier[max] ( identifier[TODOS] . identifier[keys] ()). identifier[lstrip] ( literal[string] ))+ literal[int]
identifier[todo_id] = literal[string] % identifier[todo_id]
identifier[TODOS] [ identifier[todo_id] ]={ literal[string] : identifier[args] [ literal[string] ]}
keyword[return] identifier[TODOS] [ identifier[todo_id] ], literal[int] | def post(self):
"""
This is an example
---
tags:
- restful
parameters:
- in: body
name: body
schema:
$ref: '#/definitions/Task'
responses:
201:
description: The task has been created
schema:
$ref: '#/definitions/Task'
"""
args = parser.parse_args()
todo_id = int(max(TODOS.keys()).lstrip('todo')) + 1
todo_id = 'todo%i' % todo_id
TODOS[todo_id] = {'task': args['task']}
return (TODOS[todo_id], 201) |
def get_diskinfo(opts, show_all=False, debug=False, local_only=False):
''' Returns a list holding the current disk info,
stats divided by the ouptut unit.
'''
outunit = opts.outunit
disks = []
try:
label_map = get_label_map(opts)
lines = run(diskcmd).splitlines()[1:] # dump header
for line in lines:
tokens = line.split()
mntp = b' '.join(tokens[8:])
dev = basename(tokens[0])
disk = DiskInfo()
if (dev in devfilter) or (mntp in mntfilter):
if show_all:
if dev == b'map': # fix alignment :-/
dev = tokens[0] = b'%b %b' % (dev, tokens[1])
del tokens[1]
disk.isram = True
else:
continue
# convert to bytes as integer, then output units
disk.dev = dev = dev.decode('ascii')
disk.ocap = float(tokens[1]) * 1024
disk.cap = disk.ocap / outunit
disk.free = float(tokens[3]) * 1024 / outunit
disk.pcnt = int(tokens[4][:-1])
disk.used = float(tokens[2]) * 1024 / outunit
disk.mntp = mntp.decode('utf8')
disk.label = label_map.get(disk.mntp)
disk.ismntd = bool(disk.mntp)
disk.isnet = ':' in dev # cheesy but may work? (macos)
if local_only and disk.isnet:
continue
if disk.ismntd:
if disk.mntp == '/':
disk.rw = True
else:
disk.rw = os.access(disk.mntp, os.W_OK)
# ~ disk.isopt = None # TODO: not sure how to get these
# ~ disk.isrem = None
disks.append(disk)
except IOError as err:
print(err)
return None
if opts.debug:
print()
for disk in disks:
print(disk.dev, disk)
print()
disks.sort()
return disks | def function[get_diskinfo, parameter[opts, show_all, debug, local_only]]:
constant[ Returns a list holding the current disk info,
stats divided by the ouptut unit.
]
variable[outunit] assign[=] name[opts].outunit
variable[disks] assign[=] list[[]]
<ast.Try object at 0x7da20c993f70>
if name[opts].debug begin[:]
call[name[print], parameter[]]
for taget[name[disk]] in starred[name[disks]] begin[:]
call[name[print], parameter[name[disk].dev, name[disk]]]
call[name[print], parameter[]]
call[name[disks].sort, parameter[]]
return[name[disks]] | keyword[def] identifier[get_diskinfo] ( identifier[opts] , identifier[show_all] = keyword[False] , identifier[debug] = keyword[False] , identifier[local_only] = keyword[False] ):
literal[string]
identifier[outunit] = identifier[opts] . identifier[outunit]
identifier[disks] =[]
keyword[try] :
identifier[label_map] = identifier[get_label_map] ( identifier[opts] )
identifier[lines] = identifier[run] ( identifier[diskcmd] ). identifier[splitlines] ()[ literal[int] :]
keyword[for] identifier[line] keyword[in] identifier[lines] :
identifier[tokens] = identifier[line] . identifier[split] ()
identifier[mntp] = literal[string] . identifier[join] ( identifier[tokens] [ literal[int] :])
identifier[dev] = identifier[basename] ( identifier[tokens] [ literal[int] ])
identifier[disk] = identifier[DiskInfo] ()
keyword[if] ( identifier[dev] keyword[in] identifier[devfilter] ) keyword[or] ( identifier[mntp] keyword[in] identifier[mntfilter] ):
keyword[if] identifier[show_all] :
keyword[if] identifier[dev] == literal[string] :
identifier[dev] = identifier[tokens] [ literal[int] ]= literal[string] %( identifier[dev] , identifier[tokens] [ literal[int] ])
keyword[del] identifier[tokens] [ literal[int] ]
identifier[disk] . identifier[isram] = keyword[True]
keyword[else] :
keyword[continue]
identifier[disk] . identifier[dev] = identifier[dev] = identifier[dev] . identifier[decode] ( literal[string] )
identifier[disk] . identifier[ocap] = identifier[float] ( identifier[tokens] [ literal[int] ])* literal[int]
identifier[disk] . identifier[cap] = identifier[disk] . identifier[ocap] / identifier[outunit]
identifier[disk] . identifier[free] = identifier[float] ( identifier[tokens] [ literal[int] ])* literal[int] / identifier[outunit]
identifier[disk] . identifier[pcnt] = identifier[int] ( identifier[tokens] [ literal[int] ][:- literal[int] ])
identifier[disk] . identifier[used] = identifier[float] ( identifier[tokens] [ literal[int] ])* literal[int] / identifier[outunit]
identifier[disk] . identifier[mntp] = identifier[mntp] . identifier[decode] ( literal[string] )
identifier[disk] . identifier[label] = identifier[label_map] . identifier[get] ( identifier[disk] . identifier[mntp] )
identifier[disk] . identifier[ismntd] = identifier[bool] ( identifier[disk] . identifier[mntp] )
identifier[disk] . identifier[isnet] = literal[string] keyword[in] identifier[dev]
keyword[if] identifier[local_only] keyword[and] identifier[disk] . identifier[isnet] :
keyword[continue]
keyword[if] identifier[disk] . identifier[ismntd] :
keyword[if] identifier[disk] . identifier[mntp] == literal[string] :
identifier[disk] . identifier[rw] = keyword[True]
keyword[else] :
identifier[disk] . identifier[rw] = identifier[os] . identifier[access] ( identifier[disk] . identifier[mntp] , identifier[os] . identifier[W_OK] )
identifier[disks] . identifier[append] ( identifier[disk] )
keyword[except] identifier[IOError] keyword[as] identifier[err] :
identifier[print] ( identifier[err] )
keyword[return] keyword[None]
keyword[if] identifier[opts] . identifier[debug] :
identifier[print] ()
keyword[for] identifier[disk] keyword[in] identifier[disks] :
identifier[print] ( identifier[disk] . identifier[dev] , identifier[disk] )
identifier[print] ()
identifier[disks] . identifier[sort] ()
keyword[return] identifier[disks] | def get_diskinfo(opts, show_all=False, debug=False, local_only=False):
""" Returns a list holding the current disk info,
stats divided by the ouptut unit.
"""
outunit = opts.outunit
disks = []
try:
label_map = get_label_map(opts)
lines = run(diskcmd).splitlines()[1:] # dump header
for line in lines:
tokens = line.split()
mntp = b' '.join(tokens[8:])
dev = basename(tokens[0])
disk = DiskInfo()
if dev in devfilter or mntp in mntfilter:
if show_all:
if dev == b'map': # fix alignment :-/
dev = tokens[0] = b'%b %b' % (dev, tokens[1])
del tokens[1] # depends on [control=['if'], data=['dev']]
disk.isram = True # depends on [control=['if'], data=[]]
else:
continue # depends on [control=['if'], data=[]]
# convert to bytes as integer, then output units
disk.dev = dev = dev.decode('ascii')
disk.ocap = float(tokens[1]) * 1024
disk.cap = disk.ocap / outunit
disk.free = float(tokens[3]) * 1024 / outunit
disk.pcnt = int(tokens[4][:-1])
disk.used = float(tokens[2]) * 1024 / outunit
disk.mntp = mntp.decode('utf8')
disk.label = label_map.get(disk.mntp)
disk.ismntd = bool(disk.mntp)
disk.isnet = ':' in dev # cheesy but may work? (macos)
if local_only and disk.isnet:
continue # depends on [control=['if'], data=[]]
if disk.ismntd:
if disk.mntp == '/':
disk.rw = True # depends on [control=['if'], data=[]]
else:
disk.rw = os.access(disk.mntp, os.W_OK) # depends on [control=['if'], data=[]]
# ~ disk.isopt = None # TODO: not sure how to get these
# ~ disk.isrem = None
disks.append(disk) # depends on [control=['for'], data=['line']] # depends on [control=['try'], data=[]]
except IOError as err:
print(err)
return None # depends on [control=['except'], data=['err']]
if opts.debug:
print()
for disk in disks:
print(disk.dev, disk)
print() # depends on [control=['for'], data=['disk']] # depends on [control=['if'], data=[]]
disks.sort()
return disks |
def parse_subtags(subtags, expect=EXTLANG):
"""
Parse everything that comes after the language tag: scripts, regions,
variants, and assorted extensions.
"""
# We parse the parts of a language code recursively: each step of
# language code parsing handles one component of the code, recurses
# to handle the rest of the code, and adds what it found onto the
# list of things that were in the rest of the code.
#
# This could just as well have been iterative, but the loops would have
# been convoluted.
#
# So here's the base case.
if not subtags:
return []
# There's a subtag that comes next. We need to find out what it is.
#
# The primary thing that distinguishes different types of subtags is
# length, but the subtags also come in a specified order. The 'expect'
# parameter keeps track of where we are in that order. expect=REGION,
# for example, means we're expecting a region code, or anything later
# (because everything but the language is optional).
subtag = subtags[0]
tag_length = len(subtag)
# In the usual case, our goal is to recognize what kind of tag this is,
# and set it in 'tagtype' -- as an integer, so we can compare where it
# should go in order. You can see the enumerated list of tagtypes above,
# where the SUBTAG_TYPES global is defined.
tagtype = None
if tag_length == 0 or tag_length > 8:
# Unless you're inside a private use tag or something -- in which case,
# you're not in this function at the moment -- every component needs to
# be between 1 and 8 characters.
subtag_error(subtag, '1-8 characters')
elif tag_length == 1:
# A one-character subtag introduces an extension, which can itself have
# sub-subtags, so we dispatch to a different function at this point.
#
# We don't need to check anything about the order, because extensions
# necessarily come last.
return parse_extension(subtags)
elif tag_length == 2:
if subtag.isalpha():
# Two-letter alphabetic subtags are regions. These are the only
# two-character subtags after the language.
tagtype = REGION
elif tag_length == 3:
if subtag.isalpha():
# Three-letter alphabetic subtags are 'extended languages'.
# It's allowed for there to be up to three of them in a row, so we
# need another function to enforce that. Before we dispatch to that
# function, though, we need to check whether we're in the right
# place in order.
if expect <= EXTLANG:
return parse_extlang(subtags)
else:
order_error(subtag, EXTLANG, expect)
elif subtag.isdigit():
# Three-digit subtags are broad regions, such as Latin America
# (419).
tagtype = REGION
elif tag_length == 4:
if subtag.isalpha():
# Four-letter alphabetic subtags are scripts.
tagtype = SCRIPT
elif subtag[0].isdigit():
# Four-character subtags that start with a digit are variants.
tagtype = VARIANT
else:
# Tags of length 5-8 are variants.
tagtype = VARIANT
# That's the end of the big elif block for figuring out what kind of
# subtag we have based on its length. Now we should do something with that
# kind of subtag.
if tagtype is None:
# We haven't recognized a type of tag. This subtag just doesn't fit the
# standard.
subtag_error(subtag)
elif tagtype < expect:
# We got a tag type that was supposed to appear earlier in the order.
order_error(subtag, tagtype, expect)
else:
# We've recognized a subtag of a particular type. If it's a region or
# script, we expect the next subtag to be a strictly later type, because
# there can be at most one region and one script. Otherwise, we expect
# the next subtag to be the type we got or later.
if tagtype in (SCRIPT, REGION):
expect = tagtype + 1
else:
expect = tagtype
# Get the name of this subtag type instead of its integer value.
typename = SUBTAG_TYPES[tagtype]
# Some subtags are conventionally written with capitalization. Apply
# those conventions.
if tagtype == SCRIPT:
subtag = subtag.title()
elif tagtype == REGION:
subtag = subtag.upper()
# Recurse on the remaining subtags.
return [(typename, subtag)] + parse_subtags(subtags[1:], expect) | def function[parse_subtags, parameter[subtags, expect]]:
constant[
Parse everything that comes after the language tag: scripts, regions,
variants, and assorted extensions.
]
if <ast.UnaryOp object at 0x7da1b0625ea0> begin[:]
return[list[[]]]
variable[subtag] assign[=] call[name[subtags]][constant[0]]
variable[tag_length] assign[=] call[name[len], parameter[name[subtag]]]
variable[tagtype] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b0625b10> begin[:]
call[name[subtag_error], parameter[name[subtag], constant[1-8 characters]]]
if compare[name[tagtype] is constant[None]] begin[:]
call[name[subtag_error], parameter[name[subtag]]] | keyword[def] identifier[parse_subtags] ( identifier[subtags] , identifier[expect] = identifier[EXTLANG] ):
literal[string]
keyword[if] keyword[not] identifier[subtags] :
keyword[return] []
identifier[subtag] = identifier[subtags] [ literal[int] ]
identifier[tag_length] = identifier[len] ( identifier[subtag] )
identifier[tagtype] = keyword[None]
keyword[if] identifier[tag_length] == literal[int] keyword[or] identifier[tag_length] > literal[int] :
identifier[subtag_error] ( identifier[subtag] , literal[string] )
keyword[elif] identifier[tag_length] == literal[int] :
keyword[return] identifier[parse_extension] ( identifier[subtags] )
keyword[elif] identifier[tag_length] == literal[int] :
keyword[if] identifier[subtag] . identifier[isalpha] ():
identifier[tagtype] = identifier[REGION]
keyword[elif] identifier[tag_length] == literal[int] :
keyword[if] identifier[subtag] . identifier[isalpha] ():
keyword[if] identifier[expect] <= identifier[EXTLANG] :
keyword[return] identifier[parse_extlang] ( identifier[subtags] )
keyword[else] :
identifier[order_error] ( identifier[subtag] , identifier[EXTLANG] , identifier[expect] )
keyword[elif] identifier[subtag] . identifier[isdigit] ():
identifier[tagtype] = identifier[REGION]
keyword[elif] identifier[tag_length] == literal[int] :
keyword[if] identifier[subtag] . identifier[isalpha] ():
identifier[tagtype] = identifier[SCRIPT]
keyword[elif] identifier[subtag] [ literal[int] ]. identifier[isdigit] ():
identifier[tagtype] = identifier[VARIANT]
keyword[else] :
identifier[tagtype] = identifier[VARIANT]
keyword[if] identifier[tagtype] keyword[is] keyword[None] :
identifier[subtag_error] ( identifier[subtag] )
keyword[elif] identifier[tagtype] < identifier[expect] :
identifier[order_error] ( identifier[subtag] , identifier[tagtype] , identifier[expect] )
keyword[else] :
keyword[if] identifier[tagtype] keyword[in] ( identifier[SCRIPT] , identifier[REGION] ):
identifier[expect] = identifier[tagtype] + literal[int]
keyword[else] :
identifier[expect] = identifier[tagtype]
identifier[typename] = identifier[SUBTAG_TYPES] [ identifier[tagtype] ]
keyword[if] identifier[tagtype] == identifier[SCRIPT] :
identifier[subtag] = identifier[subtag] . identifier[title] ()
keyword[elif] identifier[tagtype] == identifier[REGION] :
identifier[subtag] = identifier[subtag] . identifier[upper] ()
keyword[return] [( identifier[typename] , identifier[subtag] )]+ identifier[parse_subtags] ( identifier[subtags] [ literal[int] :], identifier[expect] ) | def parse_subtags(subtags, expect=EXTLANG):
"""
Parse everything that comes after the language tag: scripts, regions,
variants, and assorted extensions.
"""
# We parse the parts of a language code recursively: each step of
# language code parsing handles one component of the code, recurses
# to handle the rest of the code, and adds what it found onto the
# list of things that were in the rest of the code.
#
# This could just as well have been iterative, but the loops would have
# been convoluted.
#
# So here's the base case.
if not subtags:
return [] # depends on [control=['if'], data=[]]
# There's a subtag that comes next. We need to find out what it is.
#
# The primary thing that distinguishes different types of subtags is
# length, but the subtags also come in a specified order. The 'expect'
# parameter keeps track of where we are in that order. expect=REGION,
# for example, means we're expecting a region code, or anything later
# (because everything but the language is optional).
subtag = subtags[0]
tag_length = len(subtag)
# In the usual case, our goal is to recognize what kind of tag this is,
# and set it in 'tagtype' -- as an integer, so we can compare where it
# should go in order. You can see the enumerated list of tagtypes above,
# where the SUBTAG_TYPES global is defined.
tagtype = None
if tag_length == 0 or tag_length > 8:
# Unless you're inside a private use tag or something -- in which case,
# you're not in this function at the moment -- every component needs to
# be between 1 and 8 characters.
subtag_error(subtag, '1-8 characters') # depends on [control=['if'], data=[]]
elif tag_length == 1:
# A one-character subtag introduces an extension, which can itself have
# sub-subtags, so we dispatch to a different function at this point.
#
# We don't need to check anything about the order, because extensions
# necessarily come last.
return parse_extension(subtags) # depends on [control=['if'], data=[]]
elif tag_length == 2:
if subtag.isalpha():
# Two-letter alphabetic subtags are regions. These are the only
# two-character subtags after the language.
tagtype = REGION # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif tag_length == 3:
if subtag.isalpha():
# Three-letter alphabetic subtags are 'extended languages'.
# It's allowed for there to be up to three of them in a row, so we
# need another function to enforce that. Before we dispatch to that
# function, though, we need to check whether we're in the right
# place in order.
if expect <= EXTLANG:
return parse_extlang(subtags) # depends on [control=['if'], data=[]]
else:
order_error(subtag, EXTLANG, expect) # depends on [control=['if'], data=[]]
elif subtag.isdigit():
# Three-digit subtags are broad regions, such as Latin America
# (419).
tagtype = REGION # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif tag_length == 4:
if subtag.isalpha():
# Four-letter alphabetic subtags are scripts.
tagtype = SCRIPT # depends on [control=['if'], data=[]]
elif subtag[0].isdigit():
# Four-character subtags that start with a digit are variants.
tagtype = VARIANT # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Tags of length 5-8 are variants.
tagtype = VARIANT
# That's the end of the big elif block for figuring out what kind of
# subtag we have based on its length. Now we should do something with that
# kind of subtag.
if tagtype is None:
# We haven't recognized a type of tag. This subtag just doesn't fit the
# standard.
subtag_error(subtag) # depends on [control=['if'], data=[]]
elif tagtype < expect:
# We got a tag type that was supposed to appear earlier in the order.
order_error(subtag, tagtype, expect) # depends on [control=['if'], data=['tagtype', 'expect']]
else:
# We've recognized a subtag of a particular type. If it's a region or
# script, we expect the next subtag to be a strictly later type, because
# there can be at most one region and one script. Otherwise, we expect
# the next subtag to be the type we got or later.
if tagtype in (SCRIPT, REGION):
expect = tagtype + 1 # depends on [control=['if'], data=['tagtype']]
else:
expect = tagtype
# Get the name of this subtag type instead of its integer value.
typename = SUBTAG_TYPES[tagtype]
# Some subtags are conventionally written with capitalization. Apply
# those conventions.
if tagtype == SCRIPT:
subtag = subtag.title() # depends on [control=['if'], data=[]]
elif tagtype == REGION:
subtag = subtag.upper() # depends on [control=['if'], data=[]]
# Recurse on the remaining subtags.
return [(typename, subtag)] + parse_subtags(subtags[1:], expect) |
def to_json(objects, filename, warnings=True):
"""
Export the indicators of one or several users to JSON.
Parameters
----------
objects : list
List of objects to be exported.
filename : string
File to export to.
Examples
--------
This function can be use to export the results of
:meth`bandicoot.utils.all`.
>>> U_1 = bc.User()
>>> U_2 = bc.User()
>>> bc.to_json([bc.utils.all(U_1), bc.utils.all(U_2)], 'results_1_2.json')
If you only have one object, you can simply pass it as argument:
>>> bc.to_json(bc.utils.all(U_1), 'results_1.json')
"""
if not isinstance(objects, list):
objects = [objects]
obj_dict = OrderedDict([(obj['name'], obj) for obj in objects])
with open(filename, 'w') as f:
f.write(dumps(obj_dict, indent=4, separators=(',', ': ')))
if warnings:
print("Successfully exported {} object(s) to {}".format(len(objects),
filename)) | def function[to_json, parameter[objects, filename, warnings]]:
constant[
Export the indicators of one or several users to JSON.
Parameters
----------
objects : list
List of objects to be exported.
filename : string
File to export to.
Examples
--------
This function can be use to export the results of
:meth`bandicoot.utils.all`.
>>> U_1 = bc.User()
>>> U_2 = bc.User()
>>> bc.to_json([bc.utils.all(U_1), bc.utils.all(U_2)], 'results_1_2.json')
If you only have one object, you can simply pass it as argument:
>>> bc.to_json(bc.utils.all(U_1), 'results_1.json')
]
if <ast.UnaryOp object at 0x7da1b0dc3ca0> begin[:]
variable[objects] assign[=] list[[<ast.Name object at 0x7da1b0dc2c20>]]
variable[obj_dict] assign[=] call[name[OrderedDict], parameter[<ast.ListComp object at 0x7da1b0dc3700>]]
with call[name[open], parameter[name[filename], constant[w]]] begin[:]
call[name[f].write, parameter[call[name[dumps], parameter[name[obj_dict]]]]]
if name[warnings] begin[:]
call[name[print], parameter[call[constant[Successfully exported {} object(s) to {}].format, parameter[call[name[len], parameter[name[objects]]], name[filename]]]]] | keyword[def] identifier[to_json] ( identifier[objects] , identifier[filename] , identifier[warnings] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[objects] , identifier[list] ):
identifier[objects] =[ identifier[objects] ]
identifier[obj_dict] = identifier[OrderedDict] ([( identifier[obj] [ literal[string] ], identifier[obj] ) keyword[for] identifier[obj] keyword[in] identifier[objects] ])
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[dumps] ( identifier[obj_dict] , identifier[indent] = literal[int] , identifier[separators] =( literal[string] , literal[string] )))
keyword[if] identifier[warnings] :
identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[objects] ),
identifier[filename] )) | def to_json(objects, filename, warnings=True):
"""
Export the indicators of one or several users to JSON.
Parameters
----------
objects : list
List of objects to be exported.
filename : string
File to export to.
Examples
--------
This function can be use to export the results of
:meth`bandicoot.utils.all`.
>>> U_1 = bc.User()
>>> U_2 = bc.User()
>>> bc.to_json([bc.utils.all(U_1), bc.utils.all(U_2)], 'results_1_2.json')
If you only have one object, you can simply pass it as argument:
>>> bc.to_json(bc.utils.all(U_1), 'results_1.json')
"""
if not isinstance(objects, list):
objects = [objects] # depends on [control=['if'], data=[]]
obj_dict = OrderedDict([(obj['name'], obj) for obj in objects])
with open(filename, 'w') as f:
f.write(dumps(obj_dict, indent=4, separators=(',', ': '))) # depends on [control=['with'], data=['f']]
if warnings:
print('Successfully exported {} object(s) to {}'.format(len(objects), filename)) # depends on [control=['if'], data=[]] |
def _fix_name(self, prefix, name):
"""
Apply some fixes to element named and attribute names.
Try to get conform to:
> Like element names, attribute names are case-sensitive and must start with a letter or underscore.
> The rest of the name can contain letters, digits, hyphens, underscores, and periods.
See: https://msdn.microsoft.com/en-us/library/ms256152(v=vs.110).aspx
This function tries to fix some broken namespace mappings.
In some cases, the namespace prefix is inside the name and not in the prefix field.
Then, the tag name will usually look like 'android:foobar'.
If and only if the namespace prefix is inside the namespace mapping and the actual prefix field is empty,
we will strip the prefix from the attribute name and return the fixed prefix URI instead.
Otherwise replacement rules will be applied.
The replacement rules work in that way, that all unwanted characters are replaced by underscores.
In other words, all characters except the ones listed above are replaced.
:param name: Name of the attribute or tag
:param prefix: The existing prefix uri as found in the AXML chunk
:return: a fixed version of prefix and name
:rtype: tuple
"""
if not name[0].isalpha() and name[0] != "_":
log.warning("Invalid start for name '{}'. "
"XML name must start with a letter.".format(name))
self.packerwarning = True
name = "_{}".format(name)
if name.startswith("android:") and prefix == '' and 'android' in self.axml.nsmap:
# Seems be a common thing...
log.info("Name '{}' starts with 'android:' prefix but 'android' is a known prefix. Replacing prefix.".format(name))
prefix = self._print_namespace(self.axml.nsmap['android'])
name = name[len("android:"):]
# It looks like this is some kind of packer... Not sure though.
self.packerwarning = True
elif ":" in name and prefix == '':
self.packerwarning = True
embedded_prefix, new_name = name.split(":", 1)
if embedded_prefix in self.axml.nsmap:
log.info("Prefix '{}' is in namespace mapping, assume that it is a prefix.")
prefix = self._print_namespace(self.axml.nsmap[embedded_prefix])
name = new_name
else:
# Print out an extra warning
log.warning("Confused: name contains a unknown namespace prefix: '{}'. "
"This is either a broken AXML file or some attempt to break stuff.".format(name))
if not re.match(r"^[a-zA-Z0-9._-]*$", name):
log.warning("Name '{}' contains invalid characters!".format(name))
self.packerwarning = True
name = re.sub(r"[^a-zA-Z0-9._-]", "_", name)
return prefix, name | def function[_fix_name, parameter[self, prefix, name]]:
constant[
Apply some fixes to element named and attribute names.
Try to get conform to:
> Like element names, attribute names are case-sensitive and must start with a letter or underscore.
> The rest of the name can contain letters, digits, hyphens, underscores, and periods.
See: https://msdn.microsoft.com/en-us/library/ms256152(v=vs.110).aspx
This function tries to fix some broken namespace mappings.
In some cases, the namespace prefix is inside the name and not in the prefix field.
Then, the tag name will usually look like 'android:foobar'.
If and only if the namespace prefix is inside the namespace mapping and the actual prefix field is empty,
we will strip the prefix from the attribute name and return the fixed prefix URI instead.
Otherwise replacement rules will be applied.
The replacement rules work in that way, that all unwanted characters are replaced by underscores.
In other words, all characters except the ones listed above are replaced.
:param name: Name of the attribute or tag
:param prefix: The existing prefix uri as found in the AXML chunk
:return: a fixed version of prefix and name
:rtype: tuple
]
if <ast.BoolOp object at 0x7da20c6c6920> begin[:]
call[name[log].warning, parameter[call[constant[Invalid start for name '{}'. XML name must start with a letter.].format, parameter[name[name]]]]]
name[self].packerwarning assign[=] constant[True]
variable[name] assign[=] call[constant[_{}].format, parameter[name[name]]]
if <ast.BoolOp object at 0x7da20c6c5060> begin[:]
call[name[log].info, parameter[call[constant[Name '{}' starts with 'android:' prefix but 'android' is a known prefix. Replacing prefix.].format, parameter[name[name]]]]]
variable[prefix] assign[=] call[name[self]._print_namespace, parameter[call[name[self].axml.nsmap][constant[android]]]]
variable[name] assign[=] call[name[name]][<ast.Slice object at 0x7da20c6c4df0>]
name[self].packerwarning assign[=] constant[True]
if <ast.UnaryOp object at 0x7da18f00f820> begin[:]
call[name[log].warning, parameter[call[constant[Name '{}' contains invalid characters!].format, parameter[name[name]]]]]
name[self].packerwarning assign[=] constant[True]
variable[name] assign[=] call[name[re].sub, parameter[constant[[^a-zA-Z0-9._-]], constant[_], name[name]]]
return[tuple[[<ast.Name object at 0x7da18f00df00>, <ast.Name object at 0x7da18f00d690>]]] | keyword[def] identifier[_fix_name] ( identifier[self] , identifier[prefix] , identifier[name] ):
literal[string]
keyword[if] keyword[not] identifier[name] [ literal[int] ]. identifier[isalpha] () keyword[and] identifier[name] [ literal[int] ]!= literal[string] :
identifier[log] . identifier[warning] ( literal[string]
literal[string] . identifier[format] ( identifier[name] ))
identifier[self] . identifier[packerwarning] = keyword[True]
identifier[name] = literal[string] . identifier[format] ( identifier[name] )
keyword[if] identifier[name] . identifier[startswith] ( literal[string] ) keyword[and] identifier[prefix] == literal[string] keyword[and] literal[string] keyword[in] identifier[self] . identifier[axml] . identifier[nsmap] :
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] ))
identifier[prefix] = identifier[self] . identifier[_print_namespace] ( identifier[self] . identifier[axml] . identifier[nsmap] [ literal[string] ])
identifier[name] = identifier[name] [ identifier[len] ( literal[string] ):]
identifier[self] . identifier[packerwarning] = keyword[True]
keyword[elif] literal[string] keyword[in] identifier[name] keyword[and] identifier[prefix] == literal[string] :
identifier[self] . identifier[packerwarning] = keyword[True]
identifier[embedded_prefix] , identifier[new_name] = identifier[name] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[embedded_prefix] keyword[in] identifier[self] . identifier[axml] . identifier[nsmap] :
identifier[log] . identifier[info] ( literal[string] )
identifier[prefix] = identifier[self] . identifier[_print_namespace] ( identifier[self] . identifier[axml] . identifier[nsmap] [ identifier[embedded_prefix] ])
identifier[name] = identifier[new_name]
keyword[else] :
identifier[log] . identifier[warning] ( literal[string]
literal[string] . identifier[format] ( identifier[name] ))
keyword[if] keyword[not] identifier[re] . identifier[match] ( literal[string] , identifier[name] ):
identifier[log] . identifier[warning] ( literal[string] . identifier[format] ( identifier[name] ))
identifier[self] . identifier[packerwarning] = keyword[True]
identifier[name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[name] )
keyword[return] identifier[prefix] , identifier[name] | def _fix_name(self, prefix, name):
"""
Apply some fixes to element named and attribute names.
Try to get conform to:
> Like element names, attribute names are case-sensitive and must start with a letter or underscore.
> The rest of the name can contain letters, digits, hyphens, underscores, and periods.
See: https://msdn.microsoft.com/en-us/library/ms256152(v=vs.110).aspx
This function tries to fix some broken namespace mappings.
In some cases, the namespace prefix is inside the name and not in the prefix field.
Then, the tag name will usually look like 'android:foobar'.
If and only if the namespace prefix is inside the namespace mapping and the actual prefix field is empty,
we will strip the prefix from the attribute name and return the fixed prefix URI instead.
Otherwise replacement rules will be applied.
The replacement rules work in that way, that all unwanted characters are replaced by underscores.
In other words, all characters except the ones listed above are replaced.
:param name: Name of the attribute or tag
:param prefix: The existing prefix uri as found in the AXML chunk
:return: a fixed version of prefix and name
:rtype: tuple
"""
if not name[0].isalpha() and name[0] != '_':
log.warning("Invalid start for name '{}'. XML name must start with a letter.".format(name))
self.packerwarning = True
name = '_{}'.format(name) # depends on [control=['if'], data=[]]
if name.startswith('android:') and prefix == '' and ('android' in self.axml.nsmap):
# Seems be a common thing...
log.info("Name '{}' starts with 'android:' prefix but 'android' is a known prefix. Replacing prefix.".format(name))
prefix = self._print_namespace(self.axml.nsmap['android'])
name = name[len('android:'):]
# It looks like this is some kind of packer... Not sure though.
self.packerwarning = True # depends on [control=['if'], data=[]]
elif ':' in name and prefix == '':
self.packerwarning = True
(embedded_prefix, new_name) = name.split(':', 1)
if embedded_prefix in self.axml.nsmap:
log.info("Prefix '{}' is in namespace mapping, assume that it is a prefix.")
prefix = self._print_namespace(self.axml.nsmap[embedded_prefix])
name = new_name # depends on [control=['if'], data=['embedded_prefix']]
else:
# Print out an extra warning
log.warning("Confused: name contains a unknown namespace prefix: '{}'. This is either a broken AXML file or some attempt to break stuff.".format(name)) # depends on [control=['if'], data=[]]
if not re.match('^[a-zA-Z0-9._-]*$', name):
log.warning("Name '{}' contains invalid characters!".format(name))
self.packerwarning = True
name = re.sub('[^a-zA-Z0-9._-]', '_', name) # depends on [control=['if'], data=[]]
return (prefix, name) |
def toggle_rich_text(self, checked):
"""Toggle between sphinxified docstrings or plain ones"""
if checked:
self.docstring = not checked
self.switch_to_rich_text()
self.set_option('rich_mode', checked) | def function[toggle_rich_text, parameter[self, checked]]:
constant[Toggle between sphinxified docstrings or plain ones]
if name[checked] begin[:]
name[self].docstring assign[=] <ast.UnaryOp object at 0x7da20e956260>
call[name[self].switch_to_rich_text, parameter[]]
call[name[self].set_option, parameter[constant[rich_mode], name[checked]]] | keyword[def] identifier[toggle_rich_text] ( identifier[self] , identifier[checked] ):
literal[string]
keyword[if] identifier[checked] :
identifier[self] . identifier[docstring] = keyword[not] identifier[checked]
identifier[self] . identifier[switch_to_rich_text] ()
identifier[self] . identifier[set_option] ( literal[string] , identifier[checked] ) | def toggle_rich_text(self, checked):
"""Toggle between sphinxified docstrings or plain ones"""
if checked:
self.docstring = not checked
self.switch_to_rich_text() # depends on [control=['if'], data=[]]
self.set_option('rich_mode', checked) |
def paras(self: object, fileids: str):
"""
Returns paragraphs in a .tess file, as defined by two \n characters.
NB: Most .tess files do not have this feature; only the Homeric poems
from what I have noticed so far. Perhaps a feature worth looking into.
"""
for text in self.texts(fileids):
for para in text.split('\n\n'):
yield para | def function[paras, parameter[self, fileids]]:
constant[
Returns paragraphs in a .tess file, as defined by two
characters.
NB: Most .tess files do not have this feature; only the Homeric poems
from what I have noticed so far. Perhaps a feature worth looking into.
]
for taget[name[text]] in starred[call[name[self].texts, parameter[name[fileids]]]] begin[:]
for taget[name[para]] in starred[call[name[text].split, parameter[constant[
]]]] begin[:]
<ast.Yield object at 0x7da207f9a170> | keyword[def] identifier[paras] ( identifier[self] : identifier[object] , identifier[fileids] : identifier[str] ):
literal[string]
keyword[for] identifier[text] keyword[in] identifier[self] . identifier[texts] ( identifier[fileids] ):
keyword[for] identifier[para] keyword[in] identifier[text] . identifier[split] ( literal[string] ):
keyword[yield] identifier[para] | def paras(self: object, fileids: str):
"""
Returns paragraphs in a .tess file, as defined by two
characters.
NB: Most .tess files do not have this feature; only the Homeric poems
from what I have noticed so far. Perhaps a feature worth looking into.
"""
for text in self.texts(fileids):
for para in text.split('\n\n'):
yield para # depends on [control=['for'], data=['para']] # depends on [control=['for'], data=['text']] |
def set_staff_url(parser, token):
"""
Assign an URL to be the "admin link" of this page.
Example::
{% set_staff_url %}{% url 'admin:fluent_pages_page_change' page.id %}{% end_set_staff_url %}
"""
nodelist = parser.parse(('end_set_staff_url',))
parser.delete_first_token()
return AdminUrlNode(nodelist) | def function[set_staff_url, parameter[parser, token]]:
constant[
Assign an URL to be the "admin link" of this page.
Example::
{% set_staff_url %}{% url 'admin:fluent_pages_page_change' page.id %}{% end_set_staff_url %}
]
variable[nodelist] assign[=] call[name[parser].parse, parameter[tuple[[<ast.Constant object at 0x7da1b0ab86a0>]]]]
call[name[parser].delete_first_token, parameter[]]
return[call[name[AdminUrlNode], parameter[name[nodelist]]]] | keyword[def] identifier[set_staff_url] ( identifier[parser] , identifier[token] ):
literal[string]
identifier[nodelist] = identifier[parser] . identifier[parse] (( literal[string] ,))
identifier[parser] . identifier[delete_first_token] ()
keyword[return] identifier[AdminUrlNode] ( identifier[nodelist] ) | def set_staff_url(parser, token):
"""
Assign an URL to be the "admin link" of this page.
Example::
{% set_staff_url %}{% url 'admin:fluent_pages_page_change' page.id %}{% end_set_staff_url %}
"""
nodelist = parser.parse(('end_set_staff_url',))
parser.delete_first_token()
return AdminUrlNode(nodelist) |
def lease(self, lease_time, num_tasks, group_by_tag=False, tag=None, client=None):
""" Acquires a lease on the topmost N unowned tasks in the specified queue.
:type lease_time: int
:param lease_time: How long to lease this task, in seconds.
:type num_tasks: int
:param num_tasks: The number of tasks to lease.
:type group_by_tag: bool
:param group_by_tag: Optional. When True, returns tasks of the same tag. Specify which tag by using the
tag parameter. If tag is not specified, returns tasks of the same tag as the oldest task in the queue.
:type tag: string
:param tag: Optional. Only specify tag if groupByTag is true. If groupByTag is true and tag is not specified,
the tag is assumed to be that of the oldest task by ETA. I.e., the first available tag.
:type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the task's taskqueue.
:rtype: :class:`_TaskIterator`.
:returns: An iterator of tasks.
"""
client = self._require_client(client)
if group_by_tag:
query_params = {"leaseSecs": lease_time, "numTasks": num_tasks, "groupByTag": group_by_tag, "tag": tag}
else:
query_params = {"leaseSecs": lease_time, "numTasks": num_tasks}
response = client.connection.api_request(method='POST', path=self.path + "/tasks/lease",
query_params=query_params)
for item in response.get('items', []):
id = item.get('id')
task = Task(id, taskqueue=self)
task._set_properties(item)
yield task | def function[lease, parameter[self, lease_time, num_tasks, group_by_tag, tag, client]]:
constant[ Acquires a lease on the topmost N unowned tasks in the specified queue.
:type lease_time: int
:param lease_time: How long to lease this task, in seconds.
:type num_tasks: int
:param num_tasks: The number of tasks to lease.
:type group_by_tag: bool
:param group_by_tag: Optional. When True, returns tasks of the same tag. Specify which tag by using the
tag parameter. If tag is not specified, returns tasks of the same tag as the oldest task in the queue.
:type tag: string
:param tag: Optional. Only specify tag if groupByTag is true. If groupByTag is true and tag is not specified,
the tag is assumed to be that of the oldest task by ETA. I.e., the first available tag.
:type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the task's taskqueue.
:rtype: :class:`_TaskIterator`.
:returns: An iterator of tasks.
]
variable[client] assign[=] call[name[self]._require_client, parameter[name[client]]]
if name[group_by_tag] begin[:]
variable[query_params] assign[=] dictionary[[<ast.Constant object at 0x7da1b14d9ea0>, <ast.Constant object at 0x7da1b14db8e0>, <ast.Constant object at 0x7da1b14db0a0>, <ast.Constant object at 0x7da1b14d9a50>], [<ast.Name object at 0x7da1b14d88e0>, <ast.Name object at 0x7da1b14db040>, <ast.Name object at 0x7da1b14d9f60>, <ast.Name object at 0x7da1b14d8c70>]]
variable[response] assign[=] call[name[client].connection.api_request, parameter[]]
for taget[name[item]] in starred[call[name[response].get, parameter[constant[items], list[[]]]]] begin[:]
variable[id] assign[=] call[name[item].get, parameter[constant[id]]]
variable[task] assign[=] call[name[Task], parameter[name[id]]]
call[name[task]._set_properties, parameter[name[item]]]
<ast.Yield object at 0x7da1b14d9060> | keyword[def] identifier[lease] ( identifier[self] , identifier[lease_time] , identifier[num_tasks] , identifier[group_by_tag] = keyword[False] , identifier[tag] = keyword[None] , identifier[client] = keyword[None] ):
literal[string]
identifier[client] = identifier[self] . identifier[_require_client] ( identifier[client] )
keyword[if] identifier[group_by_tag] :
identifier[query_params] ={ literal[string] : identifier[lease_time] , literal[string] : identifier[num_tasks] , literal[string] : identifier[group_by_tag] , literal[string] : identifier[tag] }
keyword[else] :
identifier[query_params] ={ literal[string] : identifier[lease_time] , literal[string] : identifier[num_tasks] }
identifier[response] = identifier[client] . identifier[connection] . identifier[api_request] ( identifier[method] = literal[string] , identifier[path] = identifier[self] . identifier[path] + literal[string] ,
identifier[query_params] = identifier[query_params] )
keyword[for] identifier[item] keyword[in] identifier[response] . identifier[get] ( literal[string] ,[]):
identifier[id] = identifier[item] . identifier[get] ( literal[string] )
identifier[task] = identifier[Task] ( identifier[id] , identifier[taskqueue] = identifier[self] )
identifier[task] . identifier[_set_properties] ( identifier[item] )
keyword[yield] identifier[task] | def lease(self, lease_time, num_tasks, group_by_tag=False, tag=None, client=None):
""" Acquires a lease on the topmost N unowned tasks in the specified queue.
:type lease_time: int
:param lease_time: How long to lease this task, in seconds.
:type num_tasks: int
:param num_tasks: The number of tasks to lease.
:type group_by_tag: bool
:param group_by_tag: Optional. When True, returns tasks of the same tag. Specify which tag by using the
tag parameter. If tag is not specified, returns tasks of the same tag as the oldest task in the queue.
:type tag: string
:param tag: Optional. Only specify tag if groupByTag is true. If groupByTag is true and tag is not specified,
the tag is assumed to be that of the oldest task by ETA. I.e., the first available tag.
:type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the task's taskqueue.
:rtype: :class:`_TaskIterator`.
:returns: An iterator of tasks.
"""
client = self._require_client(client)
if group_by_tag:
query_params = {'leaseSecs': lease_time, 'numTasks': num_tasks, 'groupByTag': group_by_tag, 'tag': tag} # depends on [control=['if'], data=[]]
else:
query_params = {'leaseSecs': lease_time, 'numTasks': num_tasks}
response = client.connection.api_request(method='POST', path=self.path + '/tasks/lease', query_params=query_params)
for item in response.get('items', []):
id = item.get('id')
task = Task(id, taskqueue=self)
task._set_properties(item)
yield task # depends on [control=['for'], data=['item']] |
def export(self, node):
"""Export tree starting at `node`."""
attriter = self.attriter or (lambda attr_values: attr_values)
return self.__export(node, self.dictcls, attriter, self.childiter) | def function[export, parameter[self, node]]:
constant[Export tree starting at `node`.]
variable[attriter] assign[=] <ast.BoolOp object at 0x7da20c993460>
return[call[name[self].__export, parameter[name[node], name[self].dictcls, name[attriter], name[self].childiter]]] | keyword[def] identifier[export] ( identifier[self] , identifier[node] ):
literal[string]
identifier[attriter] = identifier[self] . identifier[attriter] keyword[or] ( keyword[lambda] identifier[attr_values] : identifier[attr_values] )
keyword[return] identifier[self] . identifier[__export] ( identifier[node] , identifier[self] . identifier[dictcls] , identifier[attriter] , identifier[self] . identifier[childiter] ) | def export(self, node):
"""Export tree starting at `node`."""
attriter = self.attriter or (lambda attr_values: attr_values)
return self.__export(node, self.dictcls, attriter, self.childiter) |
def nearest_overlap(self, overlap, bins):
"""Return nearest overlap/crop factor based on number of bins"""
bins_overlap = overlap * bins
if bins_overlap % 2 != 0:
bins_overlap = math.ceil(bins_overlap / 2) * 2
overlap = bins_overlap / bins
logger.warning('number of overlapping FFT bins should be even, '
'changing overlap/crop factor to {:.5f}'.format(overlap))
return overlap | def function[nearest_overlap, parameter[self, overlap, bins]]:
constant[Return nearest overlap/crop factor based on number of bins]
variable[bins_overlap] assign[=] binary_operation[name[overlap] * name[bins]]
if compare[binary_operation[name[bins_overlap] <ast.Mod object at 0x7da2590d6920> constant[2]] not_equal[!=] constant[0]] begin[:]
variable[bins_overlap] assign[=] binary_operation[call[name[math].ceil, parameter[binary_operation[name[bins_overlap] / constant[2]]]] * constant[2]]
variable[overlap] assign[=] binary_operation[name[bins_overlap] / name[bins]]
call[name[logger].warning, parameter[call[constant[number of overlapping FFT bins should be even, changing overlap/crop factor to {:.5f}].format, parameter[name[overlap]]]]]
return[name[overlap]] | keyword[def] identifier[nearest_overlap] ( identifier[self] , identifier[overlap] , identifier[bins] ):
literal[string]
identifier[bins_overlap] = identifier[overlap] * identifier[bins]
keyword[if] identifier[bins_overlap] % literal[int] != literal[int] :
identifier[bins_overlap] = identifier[math] . identifier[ceil] ( identifier[bins_overlap] / literal[int] )* literal[int]
identifier[overlap] = identifier[bins_overlap] / identifier[bins]
identifier[logger] . identifier[warning] ( literal[string]
literal[string] . identifier[format] ( identifier[overlap] ))
keyword[return] identifier[overlap] | def nearest_overlap(self, overlap, bins):
"""Return nearest overlap/crop factor based on number of bins"""
bins_overlap = overlap * bins
if bins_overlap % 2 != 0:
bins_overlap = math.ceil(bins_overlap / 2) * 2
overlap = bins_overlap / bins
logger.warning('number of overlapping FFT bins should be even, changing overlap/crop factor to {:.5f}'.format(overlap)) # depends on [control=['if'], data=[]]
return overlap |
def _enum_from_direction(direction):
"""Convert a string representation of a direction to an enum.
Args:
direction (str): A direction to order by. Must be one of
:attr:`~.firestore.Query.ASCENDING` or
:attr:`~.firestore.Query.DESCENDING`.
Returns:
int: The enum corresponding to ``direction``.
Raises:
ValueError: If ``direction`` is not a valid direction.
"""
if isinstance(direction, int):
return direction
if direction == Query.ASCENDING:
return enums.StructuredQuery.Direction.ASCENDING
elif direction == Query.DESCENDING:
return enums.StructuredQuery.Direction.DESCENDING
else:
msg = _BAD_DIR_STRING.format(direction, Query.ASCENDING, Query.DESCENDING)
raise ValueError(msg) | def function[_enum_from_direction, parameter[direction]]:
constant[Convert a string representation of a direction to an enum.
Args:
direction (str): A direction to order by. Must be one of
:attr:`~.firestore.Query.ASCENDING` or
:attr:`~.firestore.Query.DESCENDING`.
Returns:
int: The enum corresponding to ``direction``.
Raises:
ValueError: If ``direction`` is not a valid direction.
]
if call[name[isinstance], parameter[name[direction], name[int]]] begin[:]
return[name[direction]]
if compare[name[direction] equal[==] name[Query].ASCENDING] begin[:]
return[name[enums].StructuredQuery.Direction.ASCENDING] | keyword[def] identifier[_enum_from_direction] ( identifier[direction] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[direction] , identifier[int] ):
keyword[return] identifier[direction]
keyword[if] identifier[direction] == identifier[Query] . identifier[ASCENDING] :
keyword[return] identifier[enums] . identifier[StructuredQuery] . identifier[Direction] . identifier[ASCENDING]
keyword[elif] identifier[direction] == identifier[Query] . identifier[DESCENDING] :
keyword[return] identifier[enums] . identifier[StructuredQuery] . identifier[Direction] . identifier[DESCENDING]
keyword[else] :
identifier[msg] = identifier[_BAD_DIR_STRING] . identifier[format] ( identifier[direction] , identifier[Query] . identifier[ASCENDING] , identifier[Query] . identifier[DESCENDING] )
keyword[raise] identifier[ValueError] ( identifier[msg] ) | def _enum_from_direction(direction):
"""Convert a string representation of a direction to an enum.
Args:
direction (str): A direction to order by. Must be one of
:attr:`~.firestore.Query.ASCENDING` or
:attr:`~.firestore.Query.DESCENDING`.
Returns:
int: The enum corresponding to ``direction``.
Raises:
ValueError: If ``direction`` is not a valid direction.
"""
if isinstance(direction, int):
return direction # depends on [control=['if'], data=[]]
if direction == Query.ASCENDING:
return enums.StructuredQuery.Direction.ASCENDING # depends on [control=['if'], data=[]]
elif direction == Query.DESCENDING:
return enums.StructuredQuery.Direction.DESCENDING # depends on [control=['if'], data=[]]
else:
msg = _BAD_DIR_STRING.format(direction, Query.ASCENDING, Query.DESCENDING)
raise ValueError(msg) |
def to_selector(self):
""" For some reason, the selector for <identity> is
<identity id="1234" />
rather than
<identity by="id"></identity>
"""
for i in self.SELECTORS:
if hasattr(self, i):
val = getattr(self, i)
selector = i
break
return {selector: val} | def function[to_selector, parameter[self]]:
constant[ For some reason, the selector for <identity> is
<identity id="1234" />
rather than
<identity by="id"></identity>
]
for taget[name[i]] in starred[name[self].SELECTORS] begin[:]
if call[name[hasattr], parameter[name[self], name[i]]] begin[:]
variable[val] assign[=] call[name[getattr], parameter[name[self], name[i]]]
variable[selector] assign[=] name[i]
break
return[dictionary[[<ast.Name object at 0x7da18bc71ea0>], [<ast.Name object at 0x7da18bc73f10>]]] | keyword[def] identifier[to_selector] ( identifier[self] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[self] . identifier[SELECTORS] :
keyword[if] identifier[hasattr] ( identifier[self] , identifier[i] ):
identifier[val] = identifier[getattr] ( identifier[self] , identifier[i] )
identifier[selector] = identifier[i]
keyword[break]
keyword[return] { identifier[selector] : identifier[val] } | def to_selector(self):
""" For some reason, the selector for <identity> is
<identity id="1234" />
rather than
<identity by="id"></identity>
"""
for i in self.SELECTORS:
if hasattr(self, i):
val = getattr(self, i)
selector = i
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return {selector: val} |
def xlsx_part(self):
"""
Return the related |EmbeddedXlsxPart| object having its rId at
`c:chartSpace/c:externalData/@rId` or |None| if there is no
`<c:externalData>` element.
"""
xlsx_part_rId = self._chartSpace.xlsx_part_rId
if xlsx_part_rId is None:
return None
return self._chart_part.related_parts[xlsx_part_rId] | def function[xlsx_part, parameter[self]]:
constant[
Return the related |EmbeddedXlsxPart| object having its rId at
`c:chartSpace/c:externalData/@rId` or |None| if there is no
`<c:externalData>` element.
]
variable[xlsx_part_rId] assign[=] name[self]._chartSpace.xlsx_part_rId
if compare[name[xlsx_part_rId] is constant[None]] begin[:]
return[constant[None]]
return[call[name[self]._chart_part.related_parts][name[xlsx_part_rId]]] | keyword[def] identifier[xlsx_part] ( identifier[self] ):
literal[string]
identifier[xlsx_part_rId] = identifier[self] . identifier[_chartSpace] . identifier[xlsx_part_rId]
keyword[if] identifier[xlsx_part_rId] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[_chart_part] . identifier[related_parts] [ identifier[xlsx_part_rId] ] | def xlsx_part(self):
"""
Return the related |EmbeddedXlsxPart| object having its rId at
`c:chartSpace/c:externalData/@rId` or |None| if there is no
`<c:externalData>` element.
"""
xlsx_part_rId = self._chartSpace.xlsx_part_rId
if xlsx_part_rId is None:
return None # depends on [control=['if'], data=[]]
return self._chart_part.related_parts[xlsx_part_rId] |
def _x10_command(self, house_code, unit_number, state):
"""Real implementation"""
# log = log or default_logger
log = default_logger
if state.startswith('xdim') or state.startswith('dim') or state.startswith('bright'):
raise NotImplementedError('xdim/dim/bright %r' % ((house_code, unit_num, state), ))
if unit_number is not None:
house_and_unit = '%s%d' % (house_code, unit_number)
else:
raise NotImplementedError('mochad all ON/OFF %r' % ((house_code, unit_number, state), ))
house_and_unit = house_code
house_and_unit = to_bytes(house_and_unit)
# TODO normalize/validate state
state = to_bytes(state)
mochad_cmd = self.default_type + b' ' + house_and_unit + b' ' + state + b'\n' # byte concat works with older Python 3.4
log.debug('mochad send: %r', mochad_cmd)
mochad_host, mochad_port = self.device_address
result = netcat(mochad_host, mochad_port, mochad_cmd)
log.debug('mochad received: %r', result) | def function[_x10_command, parameter[self, house_code, unit_number, state]]:
constant[Real implementation]
variable[log] assign[=] name[default_logger]
if <ast.BoolOp object at 0x7da18f7203d0> begin[:]
<ast.Raise object at 0x7da18f721210>
if compare[name[unit_number] is_not constant[None]] begin[:]
variable[house_and_unit] assign[=] binary_operation[constant[%s%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f7234f0>, <ast.Name object at 0x7da18f721870>]]]
variable[house_and_unit] assign[=] call[name[to_bytes], parameter[name[house_and_unit]]]
variable[state] assign[=] call[name[to_bytes], parameter[name[state]]]
variable[mochad_cmd] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[self].default_type + constant[b' ']] + name[house_and_unit]] + constant[b' ']] + name[state]] + constant[b'\n']]
call[name[log].debug, parameter[constant[mochad send: %r], name[mochad_cmd]]]
<ast.Tuple object at 0x7da18f813ca0> assign[=] name[self].device_address
variable[result] assign[=] call[name[netcat], parameter[name[mochad_host], name[mochad_port], name[mochad_cmd]]]
call[name[log].debug, parameter[constant[mochad received: %r], name[result]]] | keyword[def] identifier[_x10_command] ( identifier[self] , identifier[house_code] , identifier[unit_number] , identifier[state] ):
literal[string]
identifier[log] = identifier[default_logger]
keyword[if] identifier[state] . identifier[startswith] ( literal[string] ) keyword[or] identifier[state] . identifier[startswith] ( literal[string] ) keyword[or] identifier[state] . identifier[startswith] ( literal[string] ):
keyword[raise] identifier[NotImplementedError] ( literal[string] %(( identifier[house_code] , identifier[unit_num] , identifier[state] ),))
keyword[if] identifier[unit_number] keyword[is] keyword[not] keyword[None] :
identifier[house_and_unit] = literal[string] %( identifier[house_code] , identifier[unit_number] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] %(( identifier[house_code] , identifier[unit_number] , identifier[state] ),))
identifier[house_and_unit] = identifier[house_code]
identifier[house_and_unit] = identifier[to_bytes] ( identifier[house_and_unit] )
identifier[state] = identifier[to_bytes] ( identifier[state] )
identifier[mochad_cmd] = identifier[self] . identifier[default_type] + literal[string] + identifier[house_and_unit] + literal[string] + identifier[state] + literal[string]
identifier[log] . identifier[debug] ( literal[string] , identifier[mochad_cmd] )
identifier[mochad_host] , identifier[mochad_port] = identifier[self] . identifier[device_address]
identifier[result] = identifier[netcat] ( identifier[mochad_host] , identifier[mochad_port] , identifier[mochad_cmd] )
identifier[log] . identifier[debug] ( literal[string] , identifier[result] ) | def _x10_command(self, house_code, unit_number, state):
"""Real implementation"""
# log = log or default_logger
log = default_logger
if state.startswith('xdim') or state.startswith('dim') or state.startswith('bright'):
raise NotImplementedError('xdim/dim/bright %r' % ((house_code, unit_num, state),)) # depends on [control=['if'], data=[]]
if unit_number is not None:
house_and_unit = '%s%d' % (house_code, unit_number) # depends on [control=['if'], data=['unit_number']]
else:
raise NotImplementedError('mochad all ON/OFF %r' % ((house_code, unit_number, state),))
house_and_unit = house_code
house_and_unit = to_bytes(house_and_unit)
# TODO normalize/validate state
state = to_bytes(state)
mochad_cmd = self.default_type + b' ' + house_and_unit + b' ' + state + b'\n' # byte concat works with older Python 3.4
log.debug('mochad send: %r', mochad_cmd)
(mochad_host, mochad_port) = self.device_address
result = netcat(mochad_host, mochad_port, mochad_cmd)
log.debug('mochad received: %r', result) |
def write_context_error_report(self,file,context_type):
"""Write a context error report relative to the target or query into the specified filename
:param file: The name of a file to write the report to
:param context_type: They type of profile, target or query based
:type file: string
:type context_type: string
"""
if context_type == 'target':
r = self.get_target_context_error_report()
elif context_type == 'query':
r = self.get_query_context_error_report()
else:
sys.stderr.write("ERROR invalid type must be target or query\n")
sys.exit()
of = open(file,'w')
of.write("\t".join(r['header'])+"\n")
for row in r['data']:
of.write("\t".join([str(x) for x in row])+"\n")
return | def function[write_context_error_report, parameter[self, file, context_type]]:
constant[Write a context error report relative to the target or query into the specified filename
:param file: The name of a file to write the report to
:param context_type: They type of profile, target or query based
:type file: string
:type context_type: string
]
if compare[name[context_type] equal[==] constant[target]] begin[:]
variable[r] assign[=] call[name[self].get_target_context_error_report, parameter[]]
variable[of] assign[=] call[name[open], parameter[name[file], constant[w]]]
call[name[of].write, parameter[binary_operation[call[constant[ ].join, parameter[call[name[r]][constant[header]]]] + constant[
]]]]
for taget[name[row]] in starred[call[name[r]][constant[data]]] begin[:]
call[name[of].write, parameter[binary_operation[call[constant[ ].join, parameter[<ast.ListComp object at 0x7da1b092cd00>]] + constant[
]]]]
return[None] | keyword[def] identifier[write_context_error_report] ( identifier[self] , identifier[file] , identifier[context_type] ):
literal[string]
keyword[if] identifier[context_type] == literal[string] :
identifier[r] = identifier[self] . identifier[get_target_context_error_report] ()
keyword[elif] identifier[context_type] == literal[string] :
identifier[r] = identifier[self] . identifier[get_query_context_error_report] ()
keyword[else] :
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] )
identifier[sys] . identifier[exit] ()
identifier[of] = identifier[open] ( identifier[file] , literal[string] )
identifier[of] . identifier[write] ( literal[string] . identifier[join] ( identifier[r] [ literal[string] ])+ literal[string] )
keyword[for] identifier[row] keyword[in] identifier[r] [ literal[string] ]:
identifier[of] . identifier[write] ( literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[row] ])+ literal[string] )
keyword[return] | def write_context_error_report(self, file, context_type):
"""Write a context error report relative to the target or query into the specified filename
:param file: The name of a file to write the report to
:param context_type: They type of profile, target or query based
:type file: string
:type context_type: string
"""
if context_type == 'target':
r = self.get_target_context_error_report() # depends on [control=['if'], data=[]]
elif context_type == 'query':
r = self.get_query_context_error_report() # depends on [control=['if'], data=[]]
else:
sys.stderr.write('ERROR invalid type must be target or query\n')
sys.exit()
of = open(file, 'w')
of.write('\t'.join(r['header']) + '\n')
for row in r['data']:
of.write('\t'.join([str(x) for x in row]) + '\n') # depends on [control=['for'], data=['row']]
return |
def _leapfrog(value_and_gradients_fn,
current_state,
current_grads_target_log_prob,
current_momentum,
step_size):
"""Runs one step of leapfrog integration."""
mid_momentum = [
m + 0.5 * step * g for m, step, g in
zip(current_momentum, step_size, current_grads_target_log_prob)]
next_state = [
s + step * m for s, step, m in
zip(current_state, step_size, mid_momentum)]
next_target_log_prob, next_grads_target_log_prob = value_and_gradients_fn(
*next_state)
next_momentum = [
m + 0.5 * step * g for m, step, g in
zip(mid_momentum, step_size, next_grads_target_log_prob)]
return [
next_state,
next_target_log_prob,
next_grads_target_log_prob,
next_momentum,
] | def function[_leapfrog, parameter[value_and_gradients_fn, current_state, current_grads_target_log_prob, current_momentum, step_size]]:
constant[Runs one step of leapfrog integration.]
variable[mid_momentum] assign[=] <ast.ListComp object at 0x7da1b022b640>
variable[next_state] assign[=] <ast.ListComp object at 0x7da1b022a890>
<ast.Tuple object at 0x7da1b022a590> assign[=] call[name[value_and_gradients_fn], parameter[<ast.Starred object at 0x7da1b022a470>]]
variable[next_momentum] assign[=] <ast.ListComp object at 0x7da1b022a380>
return[list[[<ast.Name object at 0x7da1b0228940>, <ast.Name object at 0x7da1b02287c0>, <ast.Name object at 0x7da1b02287f0>, <ast.Name object at 0x7da1b0228970>]]] | keyword[def] identifier[_leapfrog] ( identifier[value_and_gradients_fn] ,
identifier[current_state] ,
identifier[current_grads_target_log_prob] ,
identifier[current_momentum] ,
identifier[step_size] ):
literal[string]
identifier[mid_momentum] =[
identifier[m] + literal[int] * identifier[step] * identifier[g] keyword[for] identifier[m] , identifier[step] , identifier[g] keyword[in]
identifier[zip] ( identifier[current_momentum] , identifier[step_size] , identifier[current_grads_target_log_prob] )]
identifier[next_state] =[
identifier[s] + identifier[step] * identifier[m] keyword[for] identifier[s] , identifier[step] , identifier[m] keyword[in]
identifier[zip] ( identifier[current_state] , identifier[step_size] , identifier[mid_momentum] )]
identifier[next_target_log_prob] , identifier[next_grads_target_log_prob] = identifier[value_and_gradients_fn] (
* identifier[next_state] )
identifier[next_momentum] =[
identifier[m] + literal[int] * identifier[step] * identifier[g] keyword[for] identifier[m] , identifier[step] , identifier[g] keyword[in]
identifier[zip] ( identifier[mid_momentum] , identifier[step_size] , identifier[next_grads_target_log_prob] )]
keyword[return] [
identifier[next_state] ,
identifier[next_target_log_prob] ,
identifier[next_grads_target_log_prob] ,
identifier[next_momentum] ,
] | def _leapfrog(value_and_gradients_fn, current_state, current_grads_target_log_prob, current_momentum, step_size):
"""Runs one step of leapfrog integration."""
mid_momentum = [m + 0.5 * step * g for (m, step, g) in zip(current_momentum, step_size, current_grads_target_log_prob)]
next_state = [s + step * m for (s, step, m) in zip(current_state, step_size, mid_momentum)]
(next_target_log_prob, next_grads_target_log_prob) = value_and_gradients_fn(*next_state)
next_momentum = [m + 0.5 * step * g for (m, step, g) in zip(mid_momentum, step_size, next_grads_target_log_prob)]
return [next_state, next_target_log_prob, next_grads_target_log_prob, next_momentum] |
def write(self, data):
"""Sends some data to the client."""
# I don't want to add a separate 'Client disconnected' logic for sending.
# Therefore I just ignore any writes after the first error - the server
# won't send that much data anyway. Afterwards the read will detect the
# broken connection and we quit.
if self._ignore_write_operations:
return
assert self.is_connected()
try:
self._connection.send(data.encode('ascii'))
except socket.error:
self.close()
self._ignore_write_operations = True | def function[write, parameter[self, data]]:
constant[Sends some data to the client.]
if name[self]._ignore_write_operations begin[:]
return[None]
assert[call[name[self].is_connected, parameter[]]]
<ast.Try object at 0x7da1b0860f40> | keyword[def] identifier[write] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] identifier[self] . identifier[_ignore_write_operations] :
keyword[return]
keyword[assert] identifier[self] . identifier[is_connected] ()
keyword[try] :
identifier[self] . identifier[_connection] . identifier[send] ( identifier[data] . identifier[encode] ( literal[string] ))
keyword[except] identifier[socket] . identifier[error] :
identifier[self] . identifier[close] ()
identifier[self] . identifier[_ignore_write_operations] = keyword[True] | def write(self, data):
"""Sends some data to the client."""
# I don't want to add a separate 'Client disconnected' logic for sending.
# Therefore I just ignore any writes after the first error - the server
# won't send that much data anyway. Afterwards the read will detect the
# broken connection and we quit.
if self._ignore_write_operations:
return # depends on [control=['if'], data=[]]
assert self.is_connected()
try:
self._connection.send(data.encode('ascii')) # depends on [control=['try'], data=[]]
except socket.error:
self.close()
self._ignore_write_operations = True # depends on [control=['except'], data=[]] |
def run_subprocess(command, return_code=False, **kwargs):
"""Run command using subprocess.Popen
Run command and wait for command to complete. If the return code was zero
then return, otherwise raise CalledProcessError.
By default, this will also add stdout= and stderr=subproces.PIPE
to the call to Popen to suppress printing to the terminal.
Parameters
----------
command : list of str
Command to run as subprocess (see subprocess.Popen documentation).
return_code : bool
If True, the returncode will be returned, and no error checking
will be performed (so this function should always return without
error).
**kwargs : dict
Additional kwargs to pass to ``subprocess.Popen``.
Returns
-------
stdout : str
Stdout returned by the process.
stderr : str
Stderr returned by the process.
code : int
The command exit code. Only returned if ``return_code`` is True.
"""
# code adapted with permission from mne-python
use_kwargs = dict(stderr=subprocess.PIPE, stdout=subprocess.PIPE)
use_kwargs.update(kwargs)
p = subprocess.Popen(command, **use_kwargs)
output = p.communicate()
# communicate() may return bytes, str, or None depending on the kwargs
# passed to Popen(). Convert all to unicode str:
output = ['' if s is None else s for s in output]
output = [s.decode('utf-8') if isinstance(s, bytes) else s for s in output]
output = tuple(output)
if not return_code and p.returncode:
print(output[0])
print(output[1])
err_fun = subprocess.CalledProcessError.__init__
if 'output' in inspect.getargspec(err_fun).args:
raise subprocess.CalledProcessError(p.returncode, command, output)
else:
raise subprocess.CalledProcessError(p.returncode, command)
if return_code:
output = output + (p.returncode,)
return output | def function[run_subprocess, parameter[command, return_code]]:
constant[Run command using subprocess.Popen
Run command and wait for command to complete. If the return code was zero
then return, otherwise raise CalledProcessError.
By default, this will also add stdout= and stderr=subproces.PIPE
to the call to Popen to suppress printing to the terminal.
Parameters
----------
command : list of str
Command to run as subprocess (see subprocess.Popen documentation).
return_code : bool
If True, the returncode will be returned, and no error checking
will be performed (so this function should always return without
error).
**kwargs : dict
Additional kwargs to pass to ``subprocess.Popen``.
Returns
-------
stdout : str
Stdout returned by the process.
stderr : str
Stderr returned by the process.
code : int
The command exit code. Only returned if ``return_code`` is True.
]
variable[use_kwargs] assign[=] call[name[dict], parameter[]]
call[name[use_kwargs].update, parameter[name[kwargs]]]
variable[p] assign[=] call[name[subprocess].Popen, parameter[name[command]]]
variable[output] assign[=] call[name[p].communicate, parameter[]]
variable[output] assign[=] <ast.ListComp object at 0x7da1b0e7e290>
variable[output] assign[=] <ast.ListComp object at 0x7da1b0ea23b0>
variable[output] assign[=] call[name[tuple], parameter[name[output]]]
if <ast.BoolOp object at 0x7da1b0ea05b0> begin[:]
call[name[print], parameter[call[name[output]][constant[0]]]]
call[name[print], parameter[call[name[output]][constant[1]]]]
variable[err_fun] assign[=] name[subprocess].CalledProcessError.__init__
if compare[constant[output] in call[name[inspect].getargspec, parameter[name[err_fun]]].args] begin[:]
<ast.Raise object at 0x7da1b0ea0b50>
if name[return_code] begin[:]
variable[output] assign[=] binary_operation[name[output] + tuple[[<ast.Attribute object at 0x7da1b0ea06d0>]]]
return[name[output]] | keyword[def] identifier[run_subprocess] ( identifier[command] , identifier[return_code] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[use_kwargs] = identifier[dict] ( identifier[stderr] = identifier[subprocess] . identifier[PIPE] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] )
identifier[use_kwargs] . identifier[update] ( identifier[kwargs] )
identifier[p] = identifier[subprocess] . identifier[Popen] ( identifier[command] ,** identifier[use_kwargs] )
identifier[output] = identifier[p] . identifier[communicate] ()
identifier[output] =[ literal[string] keyword[if] identifier[s] keyword[is] keyword[None] keyword[else] identifier[s] keyword[for] identifier[s] keyword[in] identifier[output] ]
identifier[output] =[ identifier[s] . identifier[decode] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[s] , identifier[bytes] ) keyword[else] identifier[s] keyword[for] identifier[s] keyword[in] identifier[output] ]
identifier[output] = identifier[tuple] ( identifier[output] )
keyword[if] keyword[not] identifier[return_code] keyword[and] identifier[p] . identifier[returncode] :
identifier[print] ( identifier[output] [ literal[int] ])
identifier[print] ( identifier[output] [ literal[int] ])
identifier[err_fun] = identifier[subprocess] . identifier[CalledProcessError] . identifier[__init__]
keyword[if] literal[string] keyword[in] identifier[inspect] . identifier[getargspec] ( identifier[err_fun] ). identifier[args] :
keyword[raise] identifier[subprocess] . identifier[CalledProcessError] ( identifier[p] . identifier[returncode] , identifier[command] , identifier[output] )
keyword[else] :
keyword[raise] identifier[subprocess] . identifier[CalledProcessError] ( identifier[p] . identifier[returncode] , identifier[command] )
keyword[if] identifier[return_code] :
identifier[output] = identifier[output] +( identifier[p] . identifier[returncode] ,)
keyword[return] identifier[output] | def run_subprocess(command, return_code=False, **kwargs):
"""Run command using subprocess.Popen
Run command and wait for command to complete. If the return code was zero
then return, otherwise raise CalledProcessError.
By default, this will also add stdout= and stderr=subproces.PIPE
to the call to Popen to suppress printing to the terminal.
Parameters
----------
command : list of str
Command to run as subprocess (see subprocess.Popen documentation).
return_code : bool
If True, the returncode will be returned, and no error checking
will be performed (so this function should always return without
error).
**kwargs : dict
Additional kwargs to pass to ``subprocess.Popen``.
Returns
-------
stdout : str
Stdout returned by the process.
stderr : str
Stderr returned by the process.
code : int
The command exit code. Only returned if ``return_code`` is True.
"""
# code adapted with permission from mne-python
use_kwargs = dict(stderr=subprocess.PIPE, stdout=subprocess.PIPE)
use_kwargs.update(kwargs)
p = subprocess.Popen(command, **use_kwargs)
output = p.communicate()
# communicate() may return bytes, str, or None depending on the kwargs
# passed to Popen(). Convert all to unicode str:
output = ['' if s is None else s for s in output]
output = [s.decode('utf-8') if isinstance(s, bytes) else s for s in output]
output = tuple(output)
if not return_code and p.returncode:
print(output[0])
print(output[1])
err_fun = subprocess.CalledProcessError.__init__
if 'output' in inspect.getargspec(err_fun).args:
raise subprocess.CalledProcessError(p.returncode, command, output) # depends on [control=['if'], data=[]]
else:
raise subprocess.CalledProcessError(p.returncode, command) # depends on [control=['if'], data=[]]
if return_code:
output = output + (p.returncode,) # depends on [control=['if'], data=[]]
return output |
def draw_line(self, img, pixmapper, pt1, pt2, colour, linewidth):
'''draw a line on the image'''
pix1 = pixmapper(pt1)
pix2 = pixmapper(pt2)
(width, height) = image_shape(img)
(ret, pix1, pix2) = cv2.clipLine((0, 0, width, height), pix1, pix2)
if ret is False:
return
cv2.line(img, pix1, pix2, colour, linewidth)
cv2.circle(img, pix2, linewidth*2, colour) | def function[draw_line, parameter[self, img, pixmapper, pt1, pt2, colour, linewidth]]:
constant[draw a line on the image]
variable[pix1] assign[=] call[name[pixmapper], parameter[name[pt1]]]
variable[pix2] assign[=] call[name[pixmapper], parameter[name[pt2]]]
<ast.Tuple object at 0x7da1b160af80> assign[=] call[name[image_shape], parameter[name[img]]]
<ast.Tuple object at 0x7da1b160a8c0> assign[=] call[name[cv2].clipLine, parameter[tuple[[<ast.Constant object at 0x7da1b1609900>, <ast.Constant object at 0x7da1b1609b10>, <ast.Name object at 0x7da1b1609690>, <ast.Name object at 0x7da1b1609000>]], name[pix1], name[pix2]]]
if compare[name[ret] is constant[False]] begin[:]
return[None]
call[name[cv2].line, parameter[name[img], name[pix1], name[pix2], name[colour], name[linewidth]]]
call[name[cv2].circle, parameter[name[img], name[pix2], binary_operation[name[linewidth] * constant[2]], name[colour]]] | keyword[def] identifier[draw_line] ( identifier[self] , identifier[img] , identifier[pixmapper] , identifier[pt1] , identifier[pt2] , identifier[colour] , identifier[linewidth] ):
literal[string]
identifier[pix1] = identifier[pixmapper] ( identifier[pt1] )
identifier[pix2] = identifier[pixmapper] ( identifier[pt2] )
( identifier[width] , identifier[height] )= identifier[image_shape] ( identifier[img] )
( identifier[ret] , identifier[pix1] , identifier[pix2] )= identifier[cv2] . identifier[clipLine] (( literal[int] , literal[int] , identifier[width] , identifier[height] ), identifier[pix1] , identifier[pix2] )
keyword[if] identifier[ret] keyword[is] keyword[False] :
keyword[return]
identifier[cv2] . identifier[line] ( identifier[img] , identifier[pix1] , identifier[pix2] , identifier[colour] , identifier[linewidth] )
identifier[cv2] . identifier[circle] ( identifier[img] , identifier[pix2] , identifier[linewidth] * literal[int] , identifier[colour] ) | def draw_line(self, img, pixmapper, pt1, pt2, colour, linewidth):
"""draw a line on the image"""
pix1 = pixmapper(pt1)
pix2 = pixmapper(pt2)
(width, height) = image_shape(img)
(ret, pix1, pix2) = cv2.clipLine((0, 0, width, height), pix1, pix2)
if ret is False:
return # depends on [control=['if'], data=[]]
cv2.line(img, pix1, pix2, colour, linewidth)
cv2.circle(img, pix2, linewidth * 2, colour) |
def match_ref(self, ref):
""" Check if the ref matches one the concept's aliases.
If so, mark the matched ref so that we use it as the column label.
"""
if ref in self.refs:
self._matched_ref = ref
return True
return False | def function[match_ref, parameter[self, ref]]:
constant[ Check if the ref matches one the concept's aliases.
If so, mark the matched ref so that we use it as the column label.
]
if compare[name[ref] in name[self].refs] begin[:]
name[self]._matched_ref assign[=] name[ref]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[match_ref] ( identifier[self] , identifier[ref] ):
literal[string]
keyword[if] identifier[ref] keyword[in] identifier[self] . identifier[refs] :
identifier[self] . identifier[_matched_ref] = identifier[ref]
keyword[return] keyword[True]
keyword[return] keyword[False] | def match_ref(self, ref):
""" Check if the ref matches one the concept's aliases.
If so, mark the matched ref so that we use it as the column label.
"""
if ref in self.refs:
self._matched_ref = ref
return True # depends on [control=['if'], data=['ref']]
return False |
def add_flags(self, *flags):
"""Adds one or more flags to the query.
For example:
current-patch-set -> --current-patch-set
"""
if not isinstance(flags, (list, tuple)):
flags = [str(flags)]
self.extend(["--%s" % f for f in flags])
return self | def function[add_flags, parameter[self]]:
constant[Adds one or more flags to the query.
For example:
current-patch-set -> --current-patch-set
]
if <ast.UnaryOp object at 0x7da20c6c4520> begin[:]
variable[flags] assign[=] list[[<ast.Call object at 0x7da20c6c5270>]]
call[name[self].extend, parameter[<ast.ListComp object at 0x7da20c6c4dc0>]]
return[name[self]] | keyword[def] identifier[add_flags] ( identifier[self] ,* identifier[flags] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[flags] ,( identifier[list] , identifier[tuple] )):
identifier[flags] =[ identifier[str] ( identifier[flags] )]
identifier[self] . identifier[extend] ([ literal[string] % identifier[f] keyword[for] identifier[f] keyword[in] identifier[flags] ])
keyword[return] identifier[self] | def add_flags(self, *flags):
"""Adds one or more flags to the query.
For example:
current-patch-set -> --current-patch-set
"""
if not isinstance(flags, (list, tuple)):
flags = [str(flags)] # depends on [control=['if'], data=[]]
self.extend(['--%s' % f for f in flags])
return self |
def get_fieldsets(self, request, obj=None):
''' Override polymorphic default to put the subclass-specific fields first '''
# If subclass declares fieldsets, this is respected
if (hasattr(self, 'declared_fieldset') and self.declared_fieldsets) \
or not self.base_fieldsets:
return super(PolymorphicChildModelAdmin, self).get_fieldsets(request, obj)
other_fields = self.get_subclass_fields(request, obj)
if other_fields:
return (
(self.extra_fieldset_title, {'fields': other_fields}),
) + self.base_fieldsets
else:
return self.base_fieldsets | def function[get_fieldsets, parameter[self, request, obj]]:
constant[ Override polymorphic default to put the subclass-specific fields first ]
if <ast.BoolOp object at 0x7da18dc98df0> begin[:]
return[call[call[name[super], parameter[name[PolymorphicChildModelAdmin], name[self]]].get_fieldsets, parameter[name[request], name[obj]]]]
variable[other_fields] assign[=] call[name[self].get_subclass_fields, parameter[name[request], name[obj]]]
if name[other_fields] begin[:]
return[binary_operation[tuple[[<ast.Tuple object at 0x7da18dc9bdc0>]] + name[self].base_fieldsets]] | keyword[def] identifier[get_fieldsets] ( identifier[self] , identifier[request] , identifier[obj] = keyword[None] ):
literal[string]
keyword[if] ( identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[declared_fieldsets] ) keyword[or] keyword[not] identifier[self] . identifier[base_fieldsets] :
keyword[return] identifier[super] ( identifier[PolymorphicChildModelAdmin] , identifier[self] ). identifier[get_fieldsets] ( identifier[request] , identifier[obj] )
identifier[other_fields] = identifier[self] . identifier[get_subclass_fields] ( identifier[request] , identifier[obj] )
keyword[if] identifier[other_fields] :
keyword[return] (
( identifier[self] . identifier[extra_fieldset_title] ,{ literal[string] : identifier[other_fields] }),
)+ identifier[self] . identifier[base_fieldsets]
keyword[else] :
keyword[return] identifier[self] . identifier[base_fieldsets] | def get_fieldsets(self, request, obj=None):
""" Override polymorphic default to put the subclass-specific fields first """
# If subclass declares fieldsets, this is respected
if hasattr(self, 'declared_fieldset') and self.declared_fieldsets or not self.base_fieldsets:
return super(PolymorphicChildModelAdmin, self).get_fieldsets(request, obj) # depends on [control=['if'], data=[]]
other_fields = self.get_subclass_fields(request, obj)
if other_fields:
return ((self.extra_fieldset_title, {'fields': other_fields}),) + self.base_fieldsets # depends on [control=['if'], data=[]]
else:
return self.base_fieldsets |
def resolve(self, token):
"""Attempts to resolve the :class:`SymbolToken` against the current table.
If the ``text`` is not None, the token is returned, otherwise, a token
in the table is attempted to be retrieved. If not token is found, then
this method will raise.
"""
if token.text is not None:
return token
resolved_token = self.symbol_table.get(token.sid, None)
if resolved_token is None:
raise IonException('Out of range SID: %d' % token.sid)
return resolved_token | def function[resolve, parameter[self, token]]:
constant[Attempts to resolve the :class:`SymbolToken` against the current table.
If the ``text`` is not None, the token is returned, otherwise, a token
in the table is attempted to be retrieved. If not token is found, then
this method will raise.
]
if compare[name[token].text is_not constant[None]] begin[:]
return[name[token]]
variable[resolved_token] assign[=] call[name[self].symbol_table.get, parameter[name[token].sid, constant[None]]]
if compare[name[resolved_token] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b15ad570>
return[name[resolved_token]] | keyword[def] identifier[resolve] ( identifier[self] , identifier[token] ):
literal[string]
keyword[if] identifier[token] . identifier[text] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[token]
identifier[resolved_token] = identifier[self] . identifier[symbol_table] . identifier[get] ( identifier[token] . identifier[sid] , keyword[None] )
keyword[if] identifier[resolved_token] keyword[is] keyword[None] :
keyword[raise] identifier[IonException] ( literal[string] % identifier[token] . identifier[sid] )
keyword[return] identifier[resolved_token] | def resolve(self, token):
"""Attempts to resolve the :class:`SymbolToken` against the current table.
If the ``text`` is not None, the token is returned, otherwise, a token
in the table is attempted to be retrieved. If not token is found, then
this method will raise.
"""
if token.text is not None:
return token # depends on [control=['if'], data=[]]
resolved_token = self.symbol_table.get(token.sid, None)
if resolved_token is None:
raise IonException('Out of range SID: %d' % token.sid) # depends on [control=['if'], data=[]]
return resolved_token |
def get_open_fds():
"""
Return the number of open file descriptors for current process
.. warning: will only work on UNIX-like OS-es.
"""
pid = os.getpid()
procs = subprocess.check_output(["lsof", '-w', '-Ff', "-p", str(pid)])
procs = procs.decode("utf-8")
return len([s for s in procs.split('\n')
if s and s[0] == 'f' and s[1:].isdigit()]) | def function[get_open_fds, parameter[]]:
constant[
Return the number of open file descriptors for current process
.. warning: will only work on UNIX-like OS-es.
]
variable[pid] assign[=] call[name[os].getpid, parameter[]]
variable[procs] assign[=] call[name[subprocess].check_output, parameter[list[[<ast.Constant object at 0x7da1b13471f0>, <ast.Constant object at 0x7da1b1345180>, <ast.Constant object at 0x7da1b1344400>, <ast.Constant object at 0x7da1b1344610>, <ast.Call object at 0x7da1b1344be0>]]]]
variable[procs] assign[=] call[name[procs].decode, parameter[constant[utf-8]]]
return[call[name[len], parameter[<ast.ListComp object at 0x7da1b1345090>]]] | keyword[def] identifier[get_open_fds] ():
literal[string]
identifier[pid] = identifier[os] . identifier[getpid] ()
identifier[procs] = identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] , literal[string] , literal[string] , identifier[str] ( identifier[pid] )])
identifier[procs] = identifier[procs] . identifier[decode] ( literal[string] )
keyword[return] identifier[len] ([ identifier[s] keyword[for] identifier[s] keyword[in] identifier[procs] . identifier[split] ( literal[string] )
keyword[if] identifier[s] keyword[and] identifier[s] [ literal[int] ]== literal[string] keyword[and] identifier[s] [ literal[int] :]. identifier[isdigit] ()]) | def get_open_fds():
"""
Return the number of open file descriptors for current process
.. warning: will only work on UNIX-like OS-es.
"""
pid = os.getpid()
procs = subprocess.check_output(['lsof', '-w', '-Ff', '-p', str(pid)])
procs = procs.decode('utf-8')
return len([s for s in procs.split('\n') if s and s[0] == 'f' and s[1:].isdigit()]) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.