code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def check_datafile_present_and_download(path, backup_url=None):
"""Check whether the file is present, otherwise download.
"""
path = Path(path)
if path.is_file(): return True
if backup_url is None: return False
logg.info('try downloading from url\n' + backup_url + '\n' +
'... this may take a while but only happens once')
if not path.parent.is_dir():
logg.info('creating directory', str(path.parent) + '/', 'for saving data')
path.parent.mkdir(parents=True)
from urllib.request import urlretrieve
urlretrieve(backup_url, str(path), reporthook=download_progress)
logg.info('')
return True
|
def function[check_datafile_present_and_download, parameter[path, backup_url]]:
constant[Check whether the file is present, otherwise download.
]
variable[path] assign[=] call[name[Path], parameter[name[path]]]
if call[name[path].is_file, parameter[]] begin[:]
return[constant[True]]
if compare[name[backup_url] is constant[None]] begin[:]
return[constant[False]]
call[name[logg].info, parameter[binary_operation[binary_operation[binary_operation[constant[try downloading from url
] + name[backup_url]] + constant[
]] + constant[... this may take a while but only happens once]]]]
if <ast.UnaryOp object at 0x7da2054a58d0> begin[:]
call[name[logg].info, parameter[constant[creating directory], binary_operation[call[name[str], parameter[name[path].parent]] + constant[/]], constant[for saving data]]]
call[name[path].parent.mkdir, parameter[]]
from relative_module[urllib.request] import module[urlretrieve]
call[name[urlretrieve], parameter[name[backup_url], call[name[str], parameter[name[path]]]]]
call[name[logg].info, parameter[constant[]]]
return[constant[True]]
|
keyword[def] identifier[check_datafile_present_and_download] ( identifier[path] , identifier[backup_url] = keyword[None] ):
literal[string]
identifier[path] = identifier[Path] ( identifier[path] )
keyword[if] identifier[path] . identifier[is_file] (): keyword[return] keyword[True]
keyword[if] identifier[backup_url] keyword[is] keyword[None] : keyword[return] keyword[False]
identifier[logg] . identifier[info] ( literal[string] + identifier[backup_url] + literal[string] +
literal[string] )
keyword[if] keyword[not] identifier[path] . identifier[parent] . identifier[is_dir] ():
identifier[logg] . identifier[info] ( literal[string] , identifier[str] ( identifier[path] . identifier[parent] )+ literal[string] , literal[string] )
identifier[path] . identifier[parent] . identifier[mkdir] ( identifier[parents] = keyword[True] )
keyword[from] identifier[urllib] . identifier[request] keyword[import] identifier[urlretrieve]
identifier[urlretrieve] ( identifier[backup_url] , identifier[str] ( identifier[path] ), identifier[reporthook] = identifier[download_progress] )
identifier[logg] . identifier[info] ( literal[string] )
keyword[return] keyword[True]
|
def check_datafile_present_and_download(path, backup_url=None):
"""Check whether the file is present, otherwise download.
"""
path = Path(path)
if path.is_file():
return True # depends on [control=['if'], data=[]]
if backup_url is None:
return False # depends on [control=['if'], data=[]]
logg.info('try downloading from url\n' + backup_url + '\n' + '... this may take a while but only happens once')
if not path.parent.is_dir():
logg.info('creating directory', str(path.parent) + '/', 'for saving data')
path.parent.mkdir(parents=True) # depends on [control=['if'], data=[]]
from urllib.request import urlretrieve
urlretrieve(backup_url, str(path), reporthook=download_progress)
logg.info('')
return True
|
def calculate(*args, **kwargs):
'''
Calculates and returns a requested quantity from quantities passed in as
keyword arguments.
Parameters
----------
\*args : string
Names of quantities to be calculated.
assumptions : tuple, optional
Strings specifying which assumptions to enable. Overrides the default
assumptions. See below for a list of default assumptions.
add_assumptions : tuple, optional
Strings specifying assumptions to use in addition to the default
assumptions. May not be given in combination with the assumptions kwarg.
remove_assumptions : tuple, optional
Strings specifying assumptions not to use from the default assumptions.
May not be given in combination with the assumptions kwarg. May not
contain strings that are contained in add_assumptions, if given.
\*\*kwargs : ndarray, optional
Keyword arguments used to pass in arrays of data that correspond to
quantities used for calculations, or unit specifications for quantities.
For a complete list of kwargs that may be used, see the Quantity Parameters
section below.
Returns
-------
quantity : ndarray
Calculated quantity.
Return type is the same as quantity parameter types.
If multiple quantities are requested, returns a tuple containing the
quantities.
Notes
-----
Calculating multiple quantities at once can avoid re-computing intermediate
quantities, but requires more memory.
**Quantity kwargs**
<quantity parameter list goes here>
In addition to the quantities above, kwargs of the form <quantity>_unit or
<quantity>_units can be used with a string specifying a unit for the quantity.
This will cause input data for that quantity to be assumed to be in that
unit, and output data for that quantity to be given in that unit. Note this
must be specified separately for *each* quantity. Acceptable units are the
units available in the Pint package, with the exception that RH can be in
units of "fraction" or "percent".
**Assumptions**
<default assumptions list goes here>
**Assumption descriptions**
<assumptions list goes here>
Examples
--------
Calculating pressure from virtual temperature and density:
>>> calculate('p', Tv=273., rho=1.27)
99519.638400000011
Same calculation, but also returning a list of functions used:
>>> p, funcs = calculate('p', Tv=273., rho=1.27, debug=True)
>>> funcs
(<function atmos.equations.p_from_rho_Tv_ideal_gas>,)
Same calculation with temperature instead, ignoring virtual temperature
correction:
>>> calculate('p', T=273., rho=1.27, add_assumptions=('Tv equals T',))
99519.638400000011
'''
if len(args) == 0:
raise ValueError('must specify quantities to calculate')
# initialize a solver to do the work
solver = FluidSolver(**kwargs)
# get the output
return solver.calculate(*args)
|
def function[calculate, parameter[]]:
constant[
Calculates and returns a requested quantity from quantities passed in as
keyword arguments.
Parameters
----------
\*args : string
Names of quantities to be calculated.
assumptions : tuple, optional
Strings specifying which assumptions to enable. Overrides the default
assumptions. See below for a list of default assumptions.
add_assumptions : tuple, optional
Strings specifying assumptions to use in addition to the default
assumptions. May not be given in combination with the assumptions kwarg.
remove_assumptions : tuple, optional
Strings specifying assumptions not to use from the default assumptions.
May not be given in combination with the assumptions kwarg. May not
contain strings that are contained in add_assumptions, if given.
\*\*kwargs : ndarray, optional
Keyword arguments used to pass in arrays of data that correspond to
quantities used for calculations, or unit specifications for quantities.
For a complete list of kwargs that may be used, see the Quantity Parameters
section below.
Returns
-------
quantity : ndarray
Calculated quantity.
Return type is the same as quantity parameter types.
If multiple quantities are requested, returns a tuple containing the
quantities.
Notes
-----
Calculating multiple quantities at once can avoid re-computing intermediate
quantities, but requires more memory.
**Quantity kwargs**
<quantity parameter list goes here>
In addition to the quantities above, kwargs of the form <quantity>_unit or
<quantity>_units can be used with a string specifying a unit for the quantity.
This will cause input data for that quantity to be assumed to be in that
unit, and output data for that quantity to be given in that unit. Note this
must be specified separately for *each* quantity. Acceptable units are the
units available in the Pint package, with the exception that RH can be in
units of "fraction" or "percent".
**Assumptions**
<default assumptions list goes here>
**Assumption descriptions**
<assumptions list goes here>
Examples
--------
Calculating pressure from virtual temperature and density:
>>> calculate('p', Tv=273., rho=1.27)
99519.638400000011
Same calculation, but also returning a list of functions used:
>>> p, funcs = calculate('p', Tv=273., rho=1.27, debug=True)
>>> funcs
(<function atmos.equations.p_from_rho_Tv_ideal_gas>,)
Same calculation with temperature instead, ignoring virtual temperature
correction:
>>> calculate('p', T=273., rho=1.27, add_assumptions=('Tv equals T',))
99519.638400000011
]
if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da20c993eb0>
variable[solver] assign[=] call[name[FluidSolver], parameter[]]
return[call[name[solver].calculate, parameter[<ast.Starred object at 0x7da20c992ce0>]]]
|
keyword[def] identifier[calculate] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[len] ( identifier[args] )== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[solver] = identifier[FluidSolver] (** identifier[kwargs] )
keyword[return] identifier[solver] . identifier[calculate] (* identifier[args] )
|
def calculate(*args, **kwargs):
"""
Calculates and returns a requested quantity from quantities passed in as
keyword arguments.
Parameters
----------
\\*args : string
Names of quantities to be calculated.
assumptions : tuple, optional
Strings specifying which assumptions to enable. Overrides the default
assumptions. See below for a list of default assumptions.
add_assumptions : tuple, optional
Strings specifying assumptions to use in addition to the default
assumptions. May not be given in combination with the assumptions kwarg.
remove_assumptions : tuple, optional
Strings specifying assumptions not to use from the default assumptions.
May not be given in combination with the assumptions kwarg. May not
contain strings that are contained in add_assumptions, if given.
\\*\\*kwargs : ndarray, optional
Keyword arguments used to pass in arrays of data that correspond to
quantities used for calculations, or unit specifications for quantities.
For a complete list of kwargs that may be used, see the Quantity Parameters
section below.
Returns
-------
quantity : ndarray
Calculated quantity.
Return type is the same as quantity parameter types.
If multiple quantities are requested, returns a tuple containing the
quantities.
Notes
-----
Calculating multiple quantities at once can avoid re-computing intermediate
quantities, but requires more memory.
**Quantity kwargs**
<quantity parameter list goes here>
In addition to the quantities above, kwargs of the form <quantity>_unit or
<quantity>_units can be used with a string specifying a unit for the quantity.
This will cause input data for that quantity to be assumed to be in that
unit, and output data for that quantity to be given in that unit. Note this
must be specified separately for *each* quantity. Acceptable units are the
units available in the Pint package, with the exception that RH can be in
units of "fraction" or "percent".
**Assumptions**
<default assumptions list goes here>
**Assumption descriptions**
<assumptions list goes here>
Examples
--------
Calculating pressure from virtual temperature and density:
>>> calculate('p', Tv=273., rho=1.27)
99519.638400000011
Same calculation, but also returning a list of functions used:
>>> p, funcs = calculate('p', Tv=273., rho=1.27, debug=True)
>>> funcs
(<function atmos.equations.p_from_rho_Tv_ideal_gas>,)
Same calculation with temperature instead, ignoring virtual temperature
correction:
>>> calculate('p', T=273., rho=1.27, add_assumptions=('Tv equals T',))
99519.638400000011
"""
if len(args) == 0:
raise ValueError('must specify quantities to calculate') # depends on [control=['if'], data=[]]
# initialize a solver to do the work
solver = FluidSolver(**kwargs)
# get the output
return solver.calculate(*args)
|
def full_name(self):
"""Get the name of the day of the week.
Returns
-------
StringValue
The name of the day of the week
"""
import ibis.expr.operations as ops
return ops.DayOfWeekName(self.op().arg).to_expr()
|
def function[full_name, parameter[self]]:
constant[Get the name of the day of the week.
Returns
-------
StringValue
The name of the day of the week
]
import module[ibis.expr.operations] as alias[ops]
return[call[call[name[ops].DayOfWeekName, parameter[call[name[self].op, parameter[]].arg]].to_expr, parameter[]]]
|
keyword[def] identifier[full_name] ( identifier[self] ):
literal[string]
keyword[import] identifier[ibis] . identifier[expr] . identifier[operations] keyword[as] identifier[ops]
keyword[return] identifier[ops] . identifier[DayOfWeekName] ( identifier[self] . identifier[op] (). identifier[arg] ). identifier[to_expr] ()
|
def full_name(self):
"""Get the name of the day of the week.
Returns
-------
StringValue
The name of the day of the week
"""
import ibis.expr.operations as ops
return ops.DayOfWeekName(self.op().arg).to_expr()
|
def status_codes_by_date_stats():
"""
Get stats for status codes by date.
Returns:
list: status codes + date grouped by type: 2xx, 3xx, 4xx, 5xx, attacks.
"""
def date_counter(queryset):
return dict(Counter(map(
lambda dt: ms_since_epoch(datetime.combine(
make_naive(dt), datetime.min.time())),
list(queryset.values_list('datetime', flat=True)))))
codes = {low: date_counter(
RequestLog.objects.filter(status_code__gte=low, status_code__lt=high))
for low, high in ((200, 300), (300, 400), (400, 500))}
codes[500] = date_counter(RequestLog.objects.filter(status_code__gte=500))
codes['attacks'] = date_counter(RequestLog.objects.filter(
status_code__in=(400, 444, 502)))
stats = {}
for code in (200, 300, 400, 500, 'attacks'):
for date, count in codes[code].items():
if stats.get(date, None) is None:
stats[date] = {200: 0, 300: 0, 400: 0, 500: 0, 'attacks': 0}
stats[date][code] += count
stats = sorted([(k, v) for k, v in stats.items()], key=lambda x: x[0])
return stats
|
def function[status_codes_by_date_stats, parameter[]]:
constant[
Get stats for status codes by date.
Returns:
list: status codes + date grouped by type: 2xx, 3xx, 4xx, 5xx, attacks.
]
def function[date_counter, parameter[queryset]]:
return[call[name[dict], parameter[call[name[Counter], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b2453d00>, call[name[list], parameter[call[name[queryset].values_list, parameter[constant[datetime]]]]]]]]]]]]
variable[codes] assign[=] <ast.DictComp object at 0x7da1b2453cd0>
call[name[codes]][constant[500]] assign[=] call[name[date_counter], parameter[call[name[RequestLog].objects.filter, parameter[]]]]
call[name[codes]][constant[attacks]] assign[=] call[name[date_counter], parameter[call[name[RequestLog].objects.filter, parameter[]]]]
variable[stats] assign[=] dictionary[[], []]
for taget[name[code]] in starred[tuple[[<ast.Constant object at 0x7da1b2450b50>, <ast.Constant object at 0x7da1b2450ca0>, <ast.Constant object at 0x7da1b2450a90>, <ast.Constant object at 0x7da1b2450af0>, <ast.Constant object at 0x7da1b24507c0>]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b2450670>, <ast.Name object at 0x7da1b2450850>]]] in starred[call[call[name[codes]][name[code]].items, parameter[]]] begin[:]
if compare[call[name[stats].get, parameter[name[date], constant[None]]] is constant[None]] begin[:]
call[name[stats]][name[date]] assign[=] dictionary[[<ast.Constant object at 0x7da1b24529b0>, <ast.Constant object at 0x7da1b2452950>, <ast.Constant object at 0x7da1b2452920>, <ast.Constant object at 0x7da1b24528f0>, <ast.Constant object at 0x7da1b24529e0>], [<ast.Constant object at 0x7da1b2452830>, <ast.Constant object at 0x7da1b2452800>, <ast.Constant object at 0x7da1b2453f70>, <ast.Constant object at 0x7da1b2452710>, <ast.Constant object at 0x7da1b24534c0>]]
<ast.AugAssign object at 0x7da1b2453190>
variable[stats] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da1b2452e00>]]
return[name[stats]]
|
keyword[def] identifier[status_codes_by_date_stats] ():
literal[string]
keyword[def] identifier[date_counter] ( identifier[queryset] ):
keyword[return] identifier[dict] ( identifier[Counter] ( identifier[map] (
keyword[lambda] identifier[dt] : identifier[ms_since_epoch] ( identifier[datetime] . identifier[combine] (
identifier[make_naive] ( identifier[dt] ), identifier[datetime] . identifier[min] . identifier[time] ())),
identifier[list] ( identifier[queryset] . identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] )))))
identifier[codes] ={ identifier[low] : identifier[date_counter] (
identifier[RequestLog] . identifier[objects] . identifier[filter] ( identifier[status_code__gte] = identifier[low] , identifier[status_code__lt] = identifier[high] ))
keyword[for] identifier[low] , identifier[high] keyword[in] (( literal[int] , literal[int] ),( literal[int] , literal[int] ),( literal[int] , literal[int] ))}
identifier[codes] [ literal[int] ]= identifier[date_counter] ( identifier[RequestLog] . identifier[objects] . identifier[filter] ( identifier[status_code__gte] = literal[int] ))
identifier[codes] [ literal[string] ]= identifier[date_counter] ( identifier[RequestLog] . identifier[objects] . identifier[filter] (
identifier[status_code__in] =( literal[int] , literal[int] , literal[int] )))
identifier[stats] ={}
keyword[for] identifier[code] keyword[in] ( literal[int] , literal[int] , literal[int] , literal[int] , literal[string] ):
keyword[for] identifier[date] , identifier[count] keyword[in] identifier[codes] [ identifier[code] ]. identifier[items] ():
keyword[if] identifier[stats] . identifier[get] ( identifier[date] , keyword[None] ) keyword[is] keyword[None] :
identifier[stats] [ identifier[date] ]={ literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[string] : literal[int] }
identifier[stats] [ identifier[date] ][ identifier[code] ]+= identifier[count]
identifier[stats] = identifier[sorted] ([( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[stats] . identifier[items] ()], identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ])
keyword[return] identifier[stats]
|
def status_codes_by_date_stats():
"""
Get stats for status codes by date.
Returns:
list: status codes + date grouped by type: 2xx, 3xx, 4xx, 5xx, attacks.
"""
def date_counter(queryset):
return dict(Counter(map(lambda dt: ms_since_epoch(datetime.combine(make_naive(dt), datetime.min.time())), list(queryset.values_list('datetime', flat=True)))))
codes = {low: date_counter(RequestLog.objects.filter(status_code__gte=low, status_code__lt=high)) for (low, high) in ((200, 300), (300, 400), (400, 500))}
codes[500] = date_counter(RequestLog.objects.filter(status_code__gte=500))
codes['attacks'] = date_counter(RequestLog.objects.filter(status_code__in=(400, 444, 502)))
stats = {}
for code in (200, 300, 400, 500, 'attacks'):
for (date, count) in codes[code].items():
if stats.get(date, None) is None:
stats[date] = {200: 0, 300: 0, 400: 0, 500: 0, 'attacks': 0} # depends on [control=['if'], data=[]]
stats[date][code] += count # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['code']]
stats = sorted([(k, v) for (k, v) in stats.items()], key=lambda x: x[0])
return stats
|
def get_installed_version(self):
"""Return dependency status (string)"""
if self.check():
return '%s (%s)' % (self.installed_version, self.OK)
else:
return '%s (%s)' % (self.installed_version, self.NOK)
|
def function[get_installed_version, parameter[self]]:
constant[Return dependency status (string)]
if call[name[self].check, parameter[]] begin[:]
return[binary_operation[constant[%s (%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b20437f0>, <ast.Attribute object at 0x7da1b20422f0>]]]]
|
keyword[def] identifier[get_installed_version] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[check] ():
keyword[return] literal[string] %( identifier[self] . identifier[installed_version] , identifier[self] . identifier[OK] )
keyword[else] :
keyword[return] literal[string] %( identifier[self] . identifier[installed_version] , identifier[self] . identifier[NOK] )
|
def get_installed_version(self):
"""Return dependency status (string)"""
if self.check():
return '%s (%s)' % (self.installed_version, self.OK) # depends on [control=['if'], data=[]]
else:
return '%s (%s)' % (self.installed_version, self.NOK)
|
def setup(self, pin, value):
"""Set the input or output mode for a specified pin. Mode should be
either GPIO.OUT or GPIO.IN.
"""
self._validate_pin(pin)
# Set bit to 1 for input or 0 for output.
if value == GPIO.IN:
self.iodir[int(pin/8)] |= 1 << (int(pin%8))
elif value == GPIO.OUT:
self.iodir[int(pin/8)] &= ~(1 << (int(pin%8)))
else:
raise ValueError('Unexpected value. Must be GPIO.IN or GPIO.OUT.')
self.write_iodir()
|
def function[setup, parameter[self, pin, value]]:
constant[Set the input or output mode for a specified pin. Mode should be
either GPIO.OUT or GPIO.IN.
]
call[name[self]._validate_pin, parameter[name[pin]]]
if compare[name[value] equal[==] name[GPIO].IN] begin[:]
<ast.AugAssign object at 0x7da1b01c3880>
call[name[self].write_iodir, parameter[]]
|
keyword[def] identifier[setup] ( identifier[self] , identifier[pin] , identifier[value] ):
literal[string]
identifier[self] . identifier[_validate_pin] ( identifier[pin] )
keyword[if] identifier[value] == identifier[GPIO] . identifier[IN] :
identifier[self] . identifier[iodir] [ identifier[int] ( identifier[pin] / literal[int] )]|= literal[int] <<( identifier[int] ( identifier[pin] % literal[int] ))
keyword[elif] identifier[value] == identifier[GPIO] . identifier[OUT] :
identifier[self] . identifier[iodir] [ identifier[int] ( identifier[pin] / literal[int] )]&=~( literal[int] <<( identifier[int] ( identifier[pin] % literal[int] )))
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[write_iodir] ()
|
def setup(self, pin, value):
"""Set the input or output mode for a specified pin. Mode should be
either GPIO.OUT or GPIO.IN.
"""
self._validate_pin(pin)
# Set bit to 1 for input or 0 for output.
if value == GPIO.IN:
self.iodir[int(pin / 8)] |= 1 << int(pin % 8) # depends on [control=['if'], data=[]]
elif value == GPIO.OUT:
self.iodir[int(pin / 8)] &= ~(1 << int(pin % 8)) # depends on [control=['if'], data=[]]
else:
raise ValueError('Unexpected value. Must be GPIO.IN or GPIO.OUT.')
self.write_iodir()
|
def sgn(x):
"""
Return the sign of x.
Return a positive integer if x > 0, 0 if x == 0, and a negative integer if
x < 0. Raise ValueError if x is a NaN.
This function is equivalent to cmp(x, 0), but more efficient.
"""
x = BigFloat._implicit_convert(x)
if is_nan(x):
raise ValueError("Cannot take sign of a NaN.")
return mpfr.mpfr_sgn(x)
|
def function[sgn, parameter[x]]:
constant[
Return the sign of x.
Return a positive integer if x > 0, 0 if x == 0, and a negative integer if
x < 0. Raise ValueError if x is a NaN.
This function is equivalent to cmp(x, 0), but more efficient.
]
variable[x] assign[=] call[name[BigFloat]._implicit_convert, parameter[name[x]]]
if call[name[is_nan], parameter[name[x]]] begin[:]
<ast.Raise object at 0x7da207f993f0>
return[call[name[mpfr].mpfr_sgn, parameter[name[x]]]]
|
keyword[def] identifier[sgn] ( identifier[x] ):
literal[string]
identifier[x] = identifier[BigFloat] . identifier[_implicit_convert] ( identifier[x] )
keyword[if] identifier[is_nan] ( identifier[x] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[mpfr] . identifier[mpfr_sgn] ( identifier[x] )
|
def sgn(x):
"""
Return the sign of x.
Return a positive integer if x > 0, 0 if x == 0, and a negative integer if
x < 0. Raise ValueError if x is a NaN.
This function is equivalent to cmp(x, 0), but more efficient.
"""
x = BigFloat._implicit_convert(x)
if is_nan(x):
raise ValueError('Cannot take sign of a NaN.') # depends on [control=['if'], data=[]]
return mpfr.mpfr_sgn(x)
|
def shutdown(self):
"""Wait for all threads to complete"""
# cleanup
self.started = False
try:
# nice way of doing things - let's wait until all items
# in the queue are processed
for t in self._threads:
t.join()
finally:
# Emergency brake - if a KeyboardInterrupt is raised,
# threads will finish processing current task and exit
self.stopped = True
|
def function[shutdown, parameter[self]]:
constant[Wait for all threads to complete]
name[self].started assign[=] constant[False]
<ast.Try object at 0x7da1b287f4f0>
|
keyword[def] identifier[shutdown] ( identifier[self] ):
literal[string]
identifier[self] . identifier[started] = keyword[False]
keyword[try] :
keyword[for] identifier[t] keyword[in] identifier[self] . identifier[_threads] :
identifier[t] . identifier[join] ()
keyword[finally] :
identifier[self] . identifier[stopped] = keyword[True]
|
def shutdown(self):
"""Wait for all threads to complete"""
# cleanup
self.started = False
try:
# nice way of doing things - let's wait until all items
# in the queue are processed
for t in self._threads:
t.join() # depends on [control=['for'], data=['t']] # depends on [control=['try'], data=[]]
finally:
# Emergency brake - if a KeyboardInterrupt is raised,
# threads will finish processing current task and exit
self.stopped = True
|
def get_fields(Model,
parent_field="",
model_stack=None,
stack_limit=2,
excludes=['permissions', 'comment', 'content_type']):
"""
Given a Model, return a list of lists of strings with important stuff:
...
['test_user__user__customuser', 'customuser', 'User', 'RelatedObject']
['test_user__unique_id', 'unique_id', 'TestUser', 'CharField']
['test_user__confirmed', 'confirmed', 'TestUser', 'BooleanField']
...
"""
out_fields = []
if model_stack is None:
model_stack = []
# github.com/omab/python-social-auth/commit/d8637cec02422374e4102231488481170dc51057
if isinstance(Model, basestring):
app_label, model_name = Model.split('.')
Model = models.get_model(app_label, model_name)
fields = Model._meta.fields + Model._meta.many_to_many + Model._meta.get_all_related_objects()
model_stack.append(Model)
# do a variety of checks to ensure recursion isnt being redundant
stop_recursion = False
if len(model_stack) > stack_limit:
# rudimentary CustomUser->User->CustomUser->User detection
if model_stack[-3] == model_stack[-1]:
stop_recursion = True
# stack depth shouldn't exceed x
if len(model_stack) > 5:
stop_recursion = True
# we've hit a point where we are repeating models
if len(set(model_stack)) != len(model_stack):
stop_recursion = True
if stop_recursion:
return [] # give empty list for "extend"
for field in fields:
field_name = field.name
if isinstance(field, RelatedObject):
field_name = field.field.related_query_name()
if parent_field:
full_field = "__".join([parent_field, field_name])
else:
full_field = field_name
if len([True for exclude in excludes if (exclude in full_field)]):
continue
# add to the list
out_fields.append([full_field, field_name, Model, field.__class__])
if not stop_recursion and \
(isinstance(field, ForeignKey) or isinstance(field, OneToOneField) or \
isinstance(field, RelatedObject) or isinstance(field, ManyToManyField)):
if isinstance(field, RelatedObject):
RelModel = field.model
#field_names.extend(get_fields(RelModel, full_field, True))
else:
RelModel = field.related.parent_model
out_fields.extend(get_fields(RelModel, full_field, list(model_stack)))
return out_fields
|
def function[get_fields, parameter[Model, parent_field, model_stack, stack_limit, excludes]]:
constant[
Given a Model, return a list of lists of strings with important stuff:
...
['test_user__user__customuser', 'customuser', 'User', 'RelatedObject']
['test_user__unique_id', 'unique_id', 'TestUser', 'CharField']
['test_user__confirmed', 'confirmed', 'TestUser', 'BooleanField']
...
]
variable[out_fields] assign[=] list[[]]
if compare[name[model_stack] is constant[None]] begin[:]
variable[model_stack] assign[=] list[[]]
if call[name[isinstance], parameter[name[Model], name[basestring]]] begin[:]
<ast.Tuple object at 0x7da1b1287880> assign[=] call[name[Model].split, parameter[constant[.]]]
variable[Model] assign[=] call[name[models].get_model, parameter[name[app_label], name[model_name]]]
variable[fields] assign[=] binary_operation[binary_operation[name[Model]._meta.fields + name[Model]._meta.many_to_many] + call[name[Model]._meta.get_all_related_objects, parameter[]]]
call[name[model_stack].append, parameter[name[Model]]]
variable[stop_recursion] assign[=] constant[False]
if compare[call[name[len], parameter[name[model_stack]]] greater[>] name[stack_limit]] begin[:]
if compare[call[name[model_stack]][<ast.UnaryOp object at 0x7da1b1285390>] equal[==] call[name[model_stack]][<ast.UnaryOp object at 0x7da1b1284850>]] begin[:]
variable[stop_recursion] assign[=] constant[True]
if compare[call[name[len], parameter[name[model_stack]]] greater[>] constant[5]] begin[:]
variable[stop_recursion] assign[=] constant[True]
if compare[call[name[len], parameter[call[name[set], parameter[name[model_stack]]]]] not_equal[!=] call[name[len], parameter[name[model_stack]]]] begin[:]
variable[stop_recursion] assign[=] constant[True]
if name[stop_recursion] begin[:]
return[list[[]]]
for taget[name[field]] in starred[name[fields]] begin[:]
variable[field_name] assign[=] name[field].name
if call[name[isinstance], parameter[name[field], name[RelatedObject]]] begin[:]
variable[field_name] assign[=] call[name[field].field.related_query_name, parameter[]]
if name[parent_field] begin[:]
variable[full_field] assign[=] call[constant[__].join, parameter[list[[<ast.Name object at 0x7da1b1287550>, <ast.Name object at 0x7da1b1286f20>]]]]
if call[name[len], parameter[<ast.ListComp object at 0x7da1b1285450>]] begin[:]
continue
call[name[out_fields].append, parameter[list[[<ast.Name object at 0x7da1b1285ea0>, <ast.Name object at 0x7da1b12846d0>, <ast.Name object at 0x7da1b1287b20>, <ast.Attribute object at 0x7da1b1286a70>]]]]
if <ast.BoolOp object at 0x7da1b12856c0> begin[:]
if call[name[isinstance], parameter[name[field], name[RelatedObject]]] begin[:]
variable[RelModel] assign[=] name[field].model
call[name[out_fields].extend, parameter[call[name[get_fields], parameter[name[RelModel], name[full_field], call[name[list], parameter[name[model_stack]]]]]]]
return[name[out_fields]]
|
keyword[def] identifier[get_fields] ( identifier[Model] ,
identifier[parent_field] = literal[string] ,
identifier[model_stack] = keyword[None] ,
identifier[stack_limit] = literal[int] ,
identifier[excludes] =[ literal[string] , literal[string] , literal[string] ]):
literal[string]
identifier[out_fields] =[]
keyword[if] identifier[model_stack] keyword[is] keyword[None] :
identifier[model_stack] =[]
keyword[if] identifier[isinstance] ( identifier[Model] , identifier[basestring] ):
identifier[app_label] , identifier[model_name] = identifier[Model] . identifier[split] ( literal[string] )
identifier[Model] = identifier[models] . identifier[get_model] ( identifier[app_label] , identifier[model_name] )
identifier[fields] = identifier[Model] . identifier[_meta] . identifier[fields] + identifier[Model] . identifier[_meta] . identifier[many_to_many] + identifier[Model] . identifier[_meta] . identifier[get_all_related_objects] ()
identifier[model_stack] . identifier[append] ( identifier[Model] )
identifier[stop_recursion] = keyword[False]
keyword[if] identifier[len] ( identifier[model_stack] )> identifier[stack_limit] :
keyword[if] identifier[model_stack] [- literal[int] ]== identifier[model_stack] [- literal[int] ]:
identifier[stop_recursion] = keyword[True]
keyword[if] identifier[len] ( identifier[model_stack] )> literal[int] :
identifier[stop_recursion] = keyword[True]
keyword[if] identifier[len] ( identifier[set] ( identifier[model_stack] ))!= identifier[len] ( identifier[model_stack] ):
identifier[stop_recursion] = keyword[True]
keyword[if] identifier[stop_recursion] :
keyword[return] []
keyword[for] identifier[field] keyword[in] identifier[fields] :
identifier[field_name] = identifier[field] . identifier[name]
keyword[if] identifier[isinstance] ( identifier[field] , identifier[RelatedObject] ):
identifier[field_name] = identifier[field] . identifier[field] . identifier[related_query_name] ()
keyword[if] identifier[parent_field] :
identifier[full_field] = literal[string] . identifier[join] ([ identifier[parent_field] , identifier[field_name] ])
keyword[else] :
identifier[full_field] = identifier[field_name]
keyword[if] identifier[len] ([ keyword[True] keyword[for] identifier[exclude] keyword[in] identifier[excludes] keyword[if] ( identifier[exclude] keyword[in] identifier[full_field] )]):
keyword[continue]
identifier[out_fields] . identifier[append] ([ identifier[full_field] , identifier[field_name] , identifier[Model] , identifier[field] . identifier[__class__] ])
keyword[if] keyword[not] identifier[stop_recursion] keyword[and] ( identifier[isinstance] ( identifier[field] , identifier[ForeignKey] ) keyword[or] identifier[isinstance] ( identifier[field] , identifier[OneToOneField] ) keyword[or] identifier[isinstance] ( identifier[field] , identifier[RelatedObject] ) keyword[or] identifier[isinstance] ( identifier[field] , identifier[ManyToManyField] )):
keyword[if] identifier[isinstance] ( identifier[field] , identifier[RelatedObject] ):
identifier[RelModel] = identifier[field] . identifier[model]
keyword[else] :
identifier[RelModel] = identifier[field] . identifier[related] . identifier[parent_model]
identifier[out_fields] . identifier[extend] ( identifier[get_fields] ( identifier[RelModel] , identifier[full_field] , identifier[list] ( identifier[model_stack] )))
keyword[return] identifier[out_fields]
|
def get_fields(Model, parent_field='', model_stack=None, stack_limit=2, excludes=['permissions', 'comment', 'content_type']):
"""
Given a Model, return a list of lists of strings with important stuff:
...
['test_user__user__customuser', 'customuser', 'User', 'RelatedObject']
['test_user__unique_id', 'unique_id', 'TestUser', 'CharField']
['test_user__confirmed', 'confirmed', 'TestUser', 'BooleanField']
...
"""
out_fields = []
if model_stack is None:
model_stack = [] # depends on [control=['if'], data=['model_stack']]
# github.com/omab/python-social-auth/commit/d8637cec02422374e4102231488481170dc51057
if isinstance(Model, basestring):
(app_label, model_name) = Model.split('.')
Model = models.get_model(app_label, model_name) # depends on [control=['if'], data=[]]
fields = Model._meta.fields + Model._meta.many_to_many + Model._meta.get_all_related_objects()
model_stack.append(Model)
# do a variety of checks to ensure recursion isnt being redundant
stop_recursion = False
if len(model_stack) > stack_limit:
# rudimentary CustomUser->User->CustomUser->User detection
if model_stack[-3] == model_stack[-1]:
stop_recursion = True # depends on [control=['if'], data=[]]
# stack depth shouldn't exceed x
if len(model_stack) > 5:
stop_recursion = True # depends on [control=['if'], data=[]]
# we've hit a point where we are repeating models
if len(set(model_stack)) != len(model_stack):
stop_recursion = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if stop_recursion:
return [] # give empty list for "extend" # depends on [control=['if'], data=[]]
for field in fields:
field_name = field.name
if isinstance(field, RelatedObject):
field_name = field.field.related_query_name() # depends on [control=['if'], data=[]]
if parent_field:
full_field = '__'.join([parent_field, field_name]) # depends on [control=['if'], data=[]]
else:
full_field = field_name
if len([True for exclude in excludes if exclude in full_field]):
continue # depends on [control=['if'], data=[]]
# add to the list
out_fields.append([full_field, field_name, Model, field.__class__])
if not stop_recursion and (isinstance(field, ForeignKey) or isinstance(field, OneToOneField) or isinstance(field, RelatedObject) or isinstance(field, ManyToManyField)):
if isinstance(field, RelatedObject):
RelModel = field.model # depends on [control=['if'], data=[]]
else:
#field_names.extend(get_fields(RelModel, full_field, True))
RelModel = field.related.parent_model
out_fields.extend(get_fields(RelModel, full_field, list(model_stack))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
return out_fields
|
def user(context, institute_id, user_name, user_mail, admin):
"""Add a user to the database."""
adapter = context.obj['adapter']
institutes = []
for institute in institute_id:
institute_obj = adapter.institute(institute_id=institute)
if not institute_obj:
LOG.warning("Institute % does not exist", institute)
context.abort()
institutes.append(institute)
roles = []
if admin:
LOG.info("User is admin")
roles.append('admin')
user_info = dict(email=user_mail.lower(), name=user_name, roles=roles, institutes=institutes)
user_obj = build_user(user_info)
try:
adapter.add_user(user_obj)
except Exception as err:
LOG.warning(err)
context.abort()
|
def function[user, parameter[context, institute_id, user_name, user_mail, admin]]:
constant[Add a user to the database.]
variable[adapter] assign[=] call[name[context].obj][constant[adapter]]
variable[institutes] assign[=] list[[]]
for taget[name[institute]] in starred[name[institute_id]] begin[:]
variable[institute_obj] assign[=] call[name[adapter].institute, parameter[]]
if <ast.UnaryOp object at 0x7da2041d9ab0> begin[:]
call[name[LOG].warning, parameter[constant[Institute % does not exist], name[institute]]]
call[name[context].abort, parameter[]]
call[name[institutes].append, parameter[name[institute]]]
variable[roles] assign[=] list[[]]
if name[admin] begin[:]
call[name[LOG].info, parameter[constant[User is admin]]]
call[name[roles].append, parameter[constant[admin]]]
variable[user_info] assign[=] call[name[dict], parameter[]]
variable[user_obj] assign[=] call[name[build_user], parameter[name[user_info]]]
<ast.Try object at 0x7da2041db790>
|
keyword[def] identifier[user] ( identifier[context] , identifier[institute_id] , identifier[user_name] , identifier[user_mail] , identifier[admin] ):
literal[string]
identifier[adapter] = identifier[context] . identifier[obj] [ literal[string] ]
identifier[institutes] =[]
keyword[for] identifier[institute] keyword[in] identifier[institute_id] :
identifier[institute_obj] = identifier[adapter] . identifier[institute] ( identifier[institute_id] = identifier[institute] )
keyword[if] keyword[not] identifier[institute_obj] :
identifier[LOG] . identifier[warning] ( literal[string] , identifier[institute] )
identifier[context] . identifier[abort] ()
identifier[institutes] . identifier[append] ( identifier[institute] )
identifier[roles] =[]
keyword[if] identifier[admin] :
identifier[LOG] . identifier[info] ( literal[string] )
identifier[roles] . identifier[append] ( literal[string] )
identifier[user_info] = identifier[dict] ( identifier[email] = identifier[user_mail] . identifier[lower] (), identifier[name] = identifier[user_name] , identifier[roles] = identifier[roles] , identifier[institutes] = identifier[institutes] )
identifier[user_obj] = identifier[build_user] ( identifier[user_info] )
keyword[try] :
identifier[adapter] . identifier[add_user] ( identifier[user_obj] )
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[LOG] . identifier[warning] ( identifier[err] )
identifier[context] . identifier[abort] ()
|
def user(context, institute_id, user_name, user_mail, admin):
"""Add a user to the database."""
adapter = context.obj['adapter']
institutes = []
for institute in institute_id:
institute_obj = adapter.institute(institute_id=institute)
if not institute_obj:
LOG.warning('Institute % does not exist', institute)
context.abort() # depends on [control=['if'], data=[]]
institutes.append(institute) # depends on [control=['for'], data=['institute']]
roles = []
if admin:
LOG.info('User is admin')
roles.append('admin') # depends on [control=['if'], data=[]]
user_info = dict(email=user_mail.lower(), name=user_name, roles=roles, institutes=institutes)
user_obj = build_user(user_info)
try:
adapter.add_user(user_obj) # depends on [control=['try'], data=[]]
except Exception as err:
LOG.warning(err)
context.abort() # depends on [control=['except'], data=['err']]
|
def _get_simpx_plane(self):
"""
Locate the plane for simpx of on wulff_cv, by comparing the center of
the simpx triangle with the plane functions.
"""
on_wulff = [False] * len(self.miller_list)
surface_area = [0.0] * len(self.miller_list)
for simpx in self.wulff_cv_simp:
pts = [self.wulff_pt_list[simpx[i]] for i in range(3)]
center = np.sum(pts, 0) / 3.0
# check whether the center of the simplices is on one plane
for plane in self.facets:
abs_diff = abs(np.dot(plane.normal, center) - plane.e_surf)
if abs_diff < 1e-5:
on_wulff[plane.index] = True
surface_area[plane.index] += get_tri_area(pts)
plane.points.append(pts)
plane.outer_lines.append([simpx[0], simpx[1]])
plane.outer_lines.append([simpx[1], simpx[2]])
plane.outer_lines.append([simpx[0], simpx[2]])
# already find the plane, move to the next simplices
break
for plane in self.facets:
plane.outer_lines.sort()
plane.outer_lines = [line for line in plane.outer_lines
if plane.outer_lines.count(line) != 2]
return on_wulff, surface_area
|
def function[_get_simpx_plane, parameter[self]]:
constant[
Locate the plane for simpx of on wulff_cv, by comparing the center of
the simpx triangle with the plane functions.
]
variable[on_wulff] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18dc04d30>]] * call[name[len], parameter[name[self].miller_list]]]
variable[surface_area] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18dc06110>]] * call[name[len], parameter[name[self].miller_list]]]
for taget[name[simpx]] in starred[name[self].wulff_cv_simp] begin[:]
variable[pts] assign[=] <ast.ListComp object at 0x7da1b1c96d70>
variable[center] assign[=] binary_operation[call[name[np].sum, parameter[name[pts], constant[0]]] / constant[3.0]]
for taget[name[plane]] in starred[name[self].facets] begin[:]
variable[abs_diff] assign[=] call[name[abs], parameter[binary_operation[call[name[np].dot, parameter[name[plane].normal, name[center]]] - name[plane].e_surf]]]
if compare[name[abs_diff] less[<] constant[1e-05]] begin[:]
call[name[on_wulff]][name[plane].index] assign[=] constant[True]
<ast.AugAssign object at 0x7da1b1c96920>
call[name[plane].points.append, parameter[name[pts]]]
call[name[plane].outer_lines.append, parameter[list[[<ast.Subscript object at 0x7da1b1c963e0>, <ast.Subscript object at 0x7da1b1c965c0>]]]]
call[name[plane].outer_lines.append, parameter[list[[<ast.Subscript object at 0x7da1b1c96590>, <ast.Subscript object at 0x7da1b1c966e0>]]]]
call[name[plane].outer_lines.append, parameter[list[[<ast.Subscript object at 0x7da1b1c688e0>, <ast.Subscript object at 0x7da1b1c69960>]]]]
break
for taget[name[plane]] in starred[name[self].facets] begin[:]
call[name[plane].outer_lines.sort, parameter[]]
name[plane].outer_lines assign[=] <ast.ListComp object at 0x7da1b1c69f30>
return[tuple[[<ast.Name object at 0x7da1b1c6b8e0>, <ast.Name object at 0x7da1b1c6a590>]]]
|
keyword[def] identifier[_get_simpx_plane] ( identifier[self] ):
literal[string]
identifier[on_wulff] =[ keyword[False] ]* identifier[len] ( identifier[self] . identifier[miller_list] )
identifier[surface_area] =[ literal[int] ]* identifier[len] ( identifier[self] . identifier[miller_list] )
keyword[for] identifier[simpx] keyword[in] identifier[self] . identifier[wulff_cv_simp] :
identifier[pts] =[ identifier[self] . identifier[wulff_pt_list] [ identifier[simpx] [ identifier[i] ]] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )]
identifier[center] = identifier[np] . identifier[sum] ( identifier[pts] , literal[int] )/ literal[int]
keyword[for] identifier[plane] keyword[in] identifier[self] . identifier[facets] :
identifier[abs_diff] = identifier[abs] ( identifier[np] . identifier[dot] ( identifier[plane] . identifier[normal] , identifier[center] )- identifier[plane] . identifier[e_surf] )
keyword[if] identifier[abs_diff] < literal[int] :
identifier[on_wulff] [ identifier[plane] . identifier[index] ]= keyword[True]
identifier[surface_area] [ identifier[plane] . identifier[index] ]+= identifier[get_tri_area] ( identifier[pts] )
identifier[plane] . identifier[points] . identifier[append] ( identifier[pts] )
identifier[plane] . identifier[outer_lines] . identifier[append] ([ identifier[simpx] [ literal[int] ], identifier[simpx] [ literal[int] ]])
identifier[plane] . identifier[outer_lines] . identifier[append] ([ identifier[simpx] [ literal[int] ], identifier[simpx] [ literal[int] ]])
identifier[plane] . identifier[outer_lines] . identifier[append] ([ identifier[simpx] [ literal[int] ], identifier[simpx] [ literal[int] ]])
keyword[break]
keyword[for] identifier[plane] keyword[in] identifier[self] . identifier[facets] :
identifier[plane] . identifier[outer_lines] . identifier[sort] ()
identifier[plane] . identifier[outer_lines] =[ identifier[line] keyword[for] identifier[line] keyword[in] identifier[plane] . identifier[outer_lines]
keyword[if] identifier[plane] . identifier[outer_lines] . identifier[count] ( identifier[line] )!= literal[int] ]
keyword[return] identifier[on_wulff] , identifier[surface_area]
|
def _get_simpx_plane(self):
"""
Locate the plane for simpx of on wulff_cv, by comparing the center of
the simpx triangle with the plane functions.
"""
on_wulff = [False] * len(self.miller_list)
surface_area = [0.0] * len(self.miller_list)
for simpx in self.wulff_cv_simp:
pts = [self.wulff_pt_list[simpx[i]] for i in range(3)]
center = np.sum(pts, 0) / 3.0
# check whether the center of the simplices is on one plane
for plane in self.facets:
abs_diff = abs(np.dot(plane.normal, center) - plane.e_surf)
if abs_diff < 1e-05:
on_wulff[plane.index] = True
surface_area[plane.index] += get_tri_area(pts)
plane.points.append(pts)
plane.outer_lines.append([simpx[0], simpx[1]])
plane.outer_lines.append([simpx[1], simpx[2]])
plane.outer_lines.append([simpx[0], simpx[2]])
# already find the plane, move to the next simplices
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['plane']] # depends on [control=['for'], data=['simpx']]
for plane in self.facets:
plane.outer_lines.sort()
plane.outer_lines = [line for line in plane.outer_lines if plane.outer_lines.count(line) != 2] # depends on [control=['for'], data=['plane']]
return (on_wulff, surface_area)
|
def export_img(visio_filename, image_filename, pagenum=None, pagename=None):
""" Exports images from visio file """
# visio requires absolute path
image_pathname = os.path.abspath(image_filename)
if not os.path.isdir(os.path.dirname(image_pathname)):
msg = 'Could not write image file: %s' % image_filename
raise IOError(msg)
with VisioFile.Open(visio_filename) as visio:
pages = filter_pages(visio.pages, pagenum, pagename)
try:
if len(pages) == 1:
pages[0].Export(image_pathname)
else:
digits = int(log(len(pages), 10)) + 1
basename, ext = os.path.splitext(image_pathname)
filename_format = "%s%%0%dd%s" % (basename, digits, ext)
for i, page in enumerate(pages):
filename = filename_format % (i + 1)
page.Export(filename)
except Exception:
raise IOError('Could not write image: %s' % image_pathname)
|
def function[export_img, parameter[visio_filename, image_filename, pagenum, pagename]]:
constant[ Exports images from visio file ]
variable[image_pathname] assign[=] call[name[os].path.abspath, parameter[name[image_filename]]]
if <ast.UnaryOp object at 0x7da18c4ce320> begin[:]
variable[msg] assign[=] binary_operation[constant[Could not write image file: %s] <ast.Mod object at 0x7da2590d6920> name[image_filename]]
<ast.Raise object at 0x7da18c4cd9f0>
with call[name[VisioFile].Open, parameter[name[visio_filename]]] begin[:]
variable[pages] assign[=] call[name[filter_pages], parameter[name[visio].pages, name[pagenum], name[pagename]]]
<ast.Try object at 0x7da18c4cf8b0>
|
keyword[def] identifier[export_img] ( identifier[visio_filename] , identifier[image_filename] , identifier[pagenum] = keyword[None] , identifier[pagename] = keyword[None] ):
literal[string]
identifier[image_pathname] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[image_filename] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[image_pathname] )):
identifier[msg] = literal[string] % identifier[image_filename]
keyword[raise] identifier[IOError] ( identifier[msg] )
keyword[with] identifier[VisioFile] . identifier[Open] ( identifier[visio_filename] ) keyword[as] identifier[visio] :
identifier[pages] = identifier[filter_pages] ( identifier[visio] . identifier[pages] , identifier[pagenum] , identifier[pagename] )
keyword[try] :
keyword[if] identifier[len] ( identifier[pages] )== literal[int] :
identifier[pages] [ literal[int] ]. identifier[Export] ( identifier[image_pathname] )
keyword[else] :
identifier[digits] = identifier[int] ( identifier[log] ( identifier[len] ( identifier[pages] ), literal[int] ))+ literal[int]
identifier[basename] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[image_pathname] )
identifier[filename_format] = literal[string] %( identifier[basename] , identifier[digits] , identifier[ext] )
keyword[for] identifier[i] , identifier[page] keyword[in] identifier[enumerate] ( identifier[pages] ):
identifier[filename] = identifier[filename_format] %( identifier[i] + literal[int] )
identifier[page] . identifier[Export] ( identifier[filename] )
keyword[except] identifier[Exception] :
keyword[raise] identifier[IOError] ( literal[string] % identifier[image_pathname] )
|
def export_img(visio_filename, image_filename, pagenum=None, pagename=None):
""" Exports images from visio file """
# visio requires absolute path
image_pathname = os.path.abspath(image_filename)
if not os.path.isdir(os.path.dirname(image_pathname)):
msg = 'Could not write image file: %s' % image_filename
raise IOError(msg) # depends on [control=['if'], data=[]]
with VisioFile.Open(visio_filename) as visio:
pages = filter_pages(visio.pages, pagenum, pagename)
try:
if len(pages) == 1:
pages[0].Export(image_pathname) # depends on [control=['if'], data=[]]
else:
digits = int(log(len(pages), 10)) + 1
(basename, ext) = os.path.splitext(image_pathname)
filename_format = '%s%%0%dd%s' % (basename, digits, ext)
for (i, page) in enumerate(pages):
filename = filename_format % (i + 1)
page.Export(filename) # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
except Exception:
raise IOError('Could not write image: %s' % image_pathname) # depends on [control=['except'], data=[]] # depends on [control=['with'], data=['visio']]
|
def _metrics_options(p):
""" Add options specific to metrics subcommand. """
_default_options(p, blacklist=['log-group', 'output-dir', 'cache', 'quiet'])
p.add_argument(
'--start', type=date_parse,
help='Start date (requires --end, overrides --days)')
p.add_argument(
'--end', type=date_parse, help='End date')
p.add_argument(
'--days', type=int, default=14,
help='Number of days of history to consider (default: %(default)i)')
p.add_argument('--period', type=int, default=60 * 24 * 24)
|
def function[_metrics_options, parameter[p]]:
constant[ Add options specific to metrics subcommand. ]
call[name[_default_options], parameter[name[p]]]
call[name[p].add_argument, parameter[constant[--start]]]
call[name[p].add_argument, parameter[constant[--end]]]
call[name[p].add_argument, parameter[constant[--days]]]
call[name[p].add_argument, parameter[constant[--period]]]
|
keyword[def] identifier[_metrics_options] ( identifier[p] ):
literal[string]
identifier[_default_options] ( identifier[p] , identifier[blacklist] =[ literal[string] , literal[string] , literal[string] , literal[string] ])
identifier[p] . identifier[add_argument] (
literal[string] , identifier[type] = identifier[date_parse] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_argument] (
literal[string] , identifier[type] = identifier[date_parse] , identifier[help] = literal[string] )
identifier[p] . identifier[add_argument] (
literal[string] , identifier[type] = identifier[int] , identifier[default] = literal[int] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[default] = literal[int] * literal[int] * literal[int] )
|
def _metrics_options(p):
""" Add options specific to metrics subcommand. """
_default_options(p, blacklist=['log-group', 'output-dir', 'cache', 'quiet'])
p.add_argument('--start', type=date_parse, help='Start date (requires --end, overrides --days)')
p.add_argument('--end', type=date_parse, help='End date')
p.add_argument('--days', type=int, default=14, help='Number of days of history to consider (default: %(default)i)')
p.add_argument('--period', type=int, default=60 * 24 * 24)
|
def read_data(self, **kwargs):
'''
Read the datafile specified in Sample.datafile and
return the resulting object.
Does NOT assign the data to self.data
It's advised not to use this method, but instead to access
the data through the FCMeasurement.data attribute.
'''
meta, data = parse_fcs(self.datafile, **kwargs)
return data
|
def function[read_data, parameter[self]]:
constant[
Read the datafile specified in Sample.datafile and
return the resulting object.
Does NOT assign the data to self.data
It's advised not to use this method, but instead to access
the data through the FCMeasurement.data attribute.
]
<ast.Tuple object at 0x7da20e963250> assign[=] call[name[parse_fcs], parameter[name[self].datafile]]
return[name[data]]
|
keyword[def] identifier[read_data] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[meta] , identifier[data] = identifier[parse_fcs] ( identifier[self] . identifier[datafile] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def read_data(self, **kwargs):
"""
Read the datafile specified in Sample.datafile and
return the resulting object.
Does NOT assign the data to self.data
It's advised not to use this method, but instead to access
the data through the FCMeasurement.data attribute.
"""
(meta, data) = parse_fcs(self.datafile, **kwargs)
return data
|
def set_defaults(self, config_file):
"""Set defaults.
"""
self.defaults = Defaults(config_file)
self.locations = Locations(self.defaults)
self.python = Python()
self.setuptools = Setuptools()
self.scp = SCP()
self.scms = SCMFactory()
self.urlparser = URLParser()
self.skipcommit = not self.defaults.commit
self.skiptag = not self.defaults.tag
self.skipregister = False # per server
self.skipupload = False # special
self.push = self.defaults.push
self.develop = False # special
self.quiet = self.defaults.quiet
self.sign = False # per server
self.list = False
self.manifest = self.defaults.manifest
self.identity = '' # per server
self.branch = ''
self.scmtype = ''
self.infoflags = []
self.formats = []
self.distributions = []
self.directory = os.curdir
self.scm = None
|
def function[set_defaults, parameter[self, config_file]]:
constant[Set defaults.
]
name[self].defaults assign[=] call[name[Defaults], parameter[name[config_file]]]
name[self].locations assign[=] call[name[Locations], parameter[name[self].defaults]]
name[self].python assign[=] call[name[Python], parameter[]]
name[self].setuptools assign[=] call[name[Setuptools], parameter[]]
name[self].scp assign[=] call[name[SCP], parameter[]]
name[self].scms assign[=] call[name[SCMFactory], parameter[]]
name[self].urlparser assign[=] call[name[URLParser], parameter[]]
name[self].skipcommit assign[=] <ast.UnaryOp object at 0x7da1b27e9a80>
name[self].skiptag assign[=] <ast.UnaryOp object at 0x7da1b27e98a0>
name[self].skipregister assign[=] constant[False]
name[self].skipupload assign[=] constant[False]
name[self].push assign[=] name[self].defaults.push
name[self].develop assign[=] constant[False]
name[self].quiet assign[=] name[self].defaults.quiet
name[self].sign assign[=] constant[False]
name[self].list assign[=] constant[False]
name[self].manifest assign[=] name[self].defaults.manifest
name[self].identity assign[=] constant[]
name[self].branch assign[=] constant[]
name[self].scmtype assign[=] constant[]
name[self].infoflags assign[=] list[[]]
name[self].formats assign[=] list[[]]
name[self].distributions assign[=] list[[]]
name[self].directory assign[=] name[os].curdir
name[self].scm assign[=] constant[None]
|
keyword[def] identifier[set_defaults] ( identifier[self] , identifier[config_file] ):
literal[string]
identifier[self] . identifier[defaults] = identifier[Defaults] ( identifier[config_file] )
identifier[self] . identifier[locations] = identifier[Locations] ( identifier[self] . identifier[defaults] )
identifier[self] . identifier[python] = identifier[Python] ()
identifier[self] . identifier[setuptools] = identifier[Setuptools] ()
identifier[self] . identifier[scp] = identifier[SCP] ()
identifier[self] . identifier[scms] = identifier[SCMFactory] ()
identifier[self] . identifier[urlparser] = identifier[URLParser] ()
identifier[self] . identifier[skipcommit] = keyword[not] identifier[self] . identifier[defaults] . identifier[commit]
identifier[self] . identifier[skiptag] = keyword[not] identifier[self] . identifier[defaults] . identifier[tag]
identifier[self] . identifier[skipregister] = keyword[False]
identifier[self] . identifier[skipupload] = keyword[False]
identifier[self] . identifier[push] = identifier[self] . identifier[defaults] . identifier[push]
identifier[self] . identifier[develop] = keyword[False]
identifier[self] . identifier[quiet] = identifier[self] . identifier[defaults] . identifier[quiet]
identifier[self] . identifier[sign] = keyword[False]
identifier[self] . identifier[list] = keyword[False]
identifier[self] . identifier[manifest] = identifier[self] . identifier[defaults] . identifier[manifest]
identifier[self] . identifier[identity] = literal[string]
identifier[self] . identifier[branch] = literal[string]
identifier[self] . identifier[scmtype] = literal[string]
identifier[self] . identifier[infoflags] =[]
identifier[self] . identifier[formats] =[]
identifier[self] . identifier[distributions] =[]
identifier[self] . identifier[directory] = identifier[os] . identifier[curdir]
identifier[self] . identifier[scm] = keyword[None]
|
def set_defaults(self, config_file):
"""Set defaults.
"""
self.defaults = Defaults(config_file)
self.locations = Locations(self.defaults)
self.python = Python()
self.setuptools = Setuptools()
self.scp = SCP()
self.scms = SCMFactory()
self.urlparser = URLParser()
self.skipcommit = not self.defaults.commit
self.skiptag = not self.defaults.tag
self.skipregister = False # per server
self.skipupload = False # special
self.push = self.defaults.push
self.develop = False # special
self.quiet = self.defaults.quiet
self.sign = False # per server
self.list = False
self.manifest = self.defaults.manifest
self.identity = '' # per server
self.branch = ''
self.scmtype = ''
self.infoflags = []
self.formats = []
self.distributions = []
self.directory = os.curdir
self.scm = None
|
def get_host_map(root):
''' Gets a mapping between CM hostId and Nagios host information
The key is the CM hostId
The value is an object containing the Nagios hostname and host address
'''
hosts_map = {}
for host in root.get_all_hosts():
hosts_map[host.hostId] = {"hostname": NAGIOS_HOSTNAME_FORMAT % (host.hostname,),
"address": host.ipAddress}
''' Also define "virtual hosts" for the CM clusters- they will be the hosts
to which CM services are mapped
'''
for cluster in root.get_all_clusters():
hosts_map[cluster.name] = {"hostname": cluster.name,
"address": quote(cluster.name)}
hosts_map[CM_DUMMY_HOST] = {"hostname": CM_DUMMY_HOST,
"address": CM_DUMMY_HOST}
return hosts_map
|
def function[get_host_map, parameter[root]]:
constant[ Gets a mapping between CM hostId and Nagios host information
The key is the CM hostId
The value is an object containing the Nagios hostname and host address
]
variable[hosts_map] assign[=] dictionary[[], []]
for taget[name[host]] in starred[call[name[root].get_all_hosts, parameter[]]] begin[:]
call[name[hosts_map]][name[host].hostId] assign[=] dictionary[[<ast.Constant object at 0x7da1b1d550f0>, <ast.Constant object at 0x7da1b1d56020>], [<ast.BinOp object at 0x7da1b1d553f0>, <ast.Attribute object at 0x7da1b1d54c70>]]
constant[ Also define "virtual hosts" for the CM clusters- they will be the hosts
to which CM services are mapped
]
for taget[name[cluster]] in starred[call[name[root].get_all_clusters, parameter[]]] begin[:]
call[name[hosts_map]][name[cluster].name] assign[=] dictionary[[<ast.Constant object at 0x7da1b1d57e80>, <ast.Constant object at 0x7da1b1d542e0>], [<ast.Attribute object at 0x7da1b1d54d90>, <ast.Call object at 0x7da1b1d57af0>]]
call[name[hosts_map]][name[CM_DUMMY_HOST]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1d550c0>, <ast.Constant object at 0x7da1b1d54130>], [<ast.Name object at 0x7da1b1d55b40>, <ast.Name object at 0x7da1b1d57940>]]
return[name[hosts_map]]
|
keyword[def] identifier[get_host_map] ( identifier[root] ):
literal[string]
identifier[hosts_map] ={}
keyword[for] identifier[host] keyword[in] identifier[root] . identifier[get_all_hosts] ():
identifier[hosts_map] [ identifier[host] . identifier[hostId] ]={ literal[string] : identifier[NAGIOS_HOSTNAME_FORMAT] %( identifier[host] . identifier[hostname] ,),
literal[string] : identifier[host] . identifier[ipAddress] }
literal[string]
keyword[for] identifier[cluster] keyword[in] identifier[root] . identifier[get_all_clusters] ():
identifier[hosts_map] [ identifier[cluster] . identifier[name] ]={ literal[string] : identifier[cluster] . identifier[name] ,
literal[string] : identifier[quote] ( identifier[cluster] . identifier[name] )}
identifier[hosts_map] [ identifier[CM_DUMMY_HOST] ]={ literal[string] : identifier[CM_DUMMY_HOST] ,
literal[string] : identifier[CM_DUMMY_HOST] }
keyword[return] identifier[hosts_map]
|
def get_host_map(root):
""" Gets a mapping between CM hostId and Nagios host information
The key is the CM hostId
The value is an object containing the Nagios hostname and host address
"""
hosts_map = {}
for host in root.get_all_hosts():
hosts_map[host.hostId] = {'hostname': NAGIOS_HOSTNAME_FORMAT % (host.hostname,), 'address': host.ipAddress} # depends on [control=['for'], data=['host']]
' Also define "virtual hosts" for the CM clusters- they will be the hosts\n to which CM services are mapped\n '
for cluster in root.get_all_clusters():
hosts_map[cluster.name] = {'hostname': cluster.name, 'address': quote(cluster.name)} # depends on [control=['for'], data=['cluster']]
hosts_map[CM_DUMMY_HOST] = {'hostname': CM_DUMMY_HOST, 'address': CM_DUMMY_HOST}
return hosts_map
|
def save_checkpoint(model, filename, optimizer=None, meta=None):
"""Save checkpoint to file.
The checkpoint will have 3 fields: ``meta``, ``state_dict`` and
``optimizer``. By default ``meta`` will contain version and time info.
Args:
model (Module): Module whose params are to be saved.
filename (str): Checkpoint filename.
optimizer (:obj:`Optimizer`, optional): Optimizer to be saved.
meta (dict, optional): Metadata to be saved in checkpoint.
"""
if meta is None:
meta = {}
elif not isinstance(meta, dict):
raise TypeError('meta must be a dict or None, but got {}'.format(
type(meta)))
meta.update(mmcv_version=mmcv.__version__, time=time.asctime())
mmcv.mkdir_or_exist(osp.dirname(filename))
if hasattr(model, 'module'):
model = model.module
checkpoint = {
'meta': meta,
'state_dict': weights_to_cpu(model.state_dict())
}
if optimizer is not None:
checkpoint['optimizer'] = optimizer.state_dict()
torch.save(checkpoint, filename)
|
def function[save_checkpoint, parameter[model, filename, optimizer, meta]]:
constant[Save checkpoint to file.
The checkpoint will have 3 fields: ``meta``, ``state_dict`` and
``optimizer``. By default ``meta`` will contain version and time info.
Args:
model (Module): Module whose params are to be saved.
filename (str): Checkpoint filename.
optimizer (:obj:`Optimizer`, optional): Optimizer to be saved.
meta (dict, optional): Metadata to be saved in checkpoint.
]
if compare[name[meta] is constant[None]] begin[:]
variable[meta] assign[=] dictionary[[], []]
call[name[meta].update, parameter[]]
call[name[mmcv].mkdir_or_exist, parameter[call[name[osp].dirname, parameter[name[filename]]]]]
if call[name[hasattr], parameter[name[model], constant[module]]] begin[:]
variable[model] assign[=] name[model].module
variable[checkpoint] assign[=] dictionary[[<ast.Constant object at 0x7da1b0533fa0>, <ast.Constant object at 0x7da1b05318d0>], [<ast.Name object at 0x7da1b0533be0>, <ast.Call object at 0x7da1b0531fc0>]]
if compare[name[optimizer] is_not constant[None]] begin[:]
call[name[checkpoint]][constant[optimizer]] assign[=] call[name[optimizer].state_dict, parameter[]]
call[name[torch].save, parameter[name[checkpoint], name[filename]]]
|
keyword[def] identifier[save_checkpoint] ( identifier[model] , identifier[filename] , identifier[optimizer] = keyword[None] , identifier[meta] = keyword[None] ):
literal[string]
keyword[if] identifier[meta] keyword[is] keyword[None] :
identifier[meta] ={}
keyword[elif] keyword[not] identifier[isinstance] ( identifier[meta] , identifier[dict] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] (
identifier[type] ( identifier[meta] )))
identifier[meta] . identifier[update] ( identifier[mmcv_version] = identifier[mmcv] . identifier[__version__] , identifier[time] = identifier[time] . identifier[asctime] ())
identifier[mmcv] . identifier[mkdir_or_exist] ( identifier[osp] . identifier[dirname] ( identifier[filename] ))
keyword[if] identifier[hasattr] ( identifier[model] , literal[string] ):
identifier[model] = identifier[model] . identifier[module]
identifier[checkpoint] ={
literal[string] : identifier[meta] ,
literal[string] : identifier[weights_to_cpu] ( identifier[model] . identifier[state_dict] ())
}
keyword[if] identifier[optimizer] keyword[is] keyword[not] keyword[None] :
identifier[checkpoint] [ literal[string] ]= identifier[optimizer] . identifier[state_dict] ()
identifier[torch] . identifier[save] ( identifier[checkpoint] , identifier[filename] )
|
def save_checkpoint(model, filename, optimizer=None, meta=None):
"""Save checkpoint to file.
The checkpoint will have 3 fields: ``meta``, ``state_dict`` and
``optimizer``. By default ``meta`` will contain version and time info.
Args:
model (Module): Module whose params are to be saved.
filename (str): Checkpoint filename.
optimizer (:obj:`Optimizer`, optional): Optimizer to be saved.
meta (dict, optional): Metadata to be saved in checkpoint.
"""
if meta is None:
meta = {} # depends on [control=['if'], data=['meta']]
elif not isinstance(meta, dict):
raise TypeError('meta must be a dict or None, but got {}'.format(type(meta))) # depends on [control=['if'], data=[]]
meta.update(mmcv_version=mmcv.__version__, time=time.asctime())
mmcv.mkdir_or_exist(osp.dirname(filename))
if hasattr(model, 'module'):
model = model.module # depends on [control=['if'], data=[]]
checkpoint = {'meta': meta, 'state_dict': weights_to_cpu(model.state_dict())}
if optimizer is not None:
checkpoint['optimizer'] = optimizer.state_dict() # depends on [control=['if'], data=['optimizer']]
torch.save(checkpoint, filename)
|
def add_profiler(self, id, profiler):
""" Add a profiler for RDD `id` """
if not self.profilers:
if self.profile_dump_path:
atexit.register(self.dump_profiles, self.profile_dump_path)
else:
atexit.register(self.show_profiles)
self.profilers.append([id, profiler, False])
|
def function[add_profiler, parameter[self, id, profiler]]:
constant[ Add a profiler for RDD `id` ]
if <ast.UnaryOp object at 0x7da18dc9bd30> begin[:]
if name[self].profile_dump_path begin[:]
call[name[atexit].register, parameter[name[self].dump_profiles, name[self].profile_dump_path]]
call[name[self].profilers.append, parameter[list[[<ast.Name object at 0x7da18dc9be20>, <ast.Name object at 0x7da18dc985e0>, <ast.Constant object at 0x7da18dc9ba60>]]]]
|
keyword[def] identifier[add_profiler] ( identifier[self] , identifier[id] , identifier[profiler] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[profilers] :
keyword[if] identifier[self] . identifier[profile_dump_path] :
identifier[atexit] . identifier[register] ( identifier[self] . identifier[dump_profiles] , identifier[self] . identifier[profile_dump_path] )
keyword[else] :
identifier[atexit] . identifier[register] ( identifier[self] . identifier[show_profiles] )
identifier[self] . identifier[profilers] . identifier[append] ([ identifier[id] , identifier[profiler] , keyword[False] ])
|
def add_profiler(self, id, profiler):
""" Add a profiler for RDD `id` """
if not self.profilers:
if self.profile_dump_path:
atexit.register(self.dump_profiles, self.profile_dump_path) # depends on [control=['if'], data=[]]
else:
atexit.register(self.show_profiles) # depends on [control=['if'], data=[]]
self.profilers.append([id, profiler, False])
|
def refresh_waiting_tasks(self):
"""
Refresh the state of all WAITING tasks. This will, for example, update
Catching Timer Events whose waiting time has passed.
"""
assert not self.read_only
for my_task in self.get_tasks(Task.WAITING):
my_task.task_spec._update(my_task)
|
def function[refresh_waiting_tasks, parameter[self]]:
constant[
Refresh the state of all WAITING tasks. This will, for example, update
Catching Timer Events whose waiting time has passed.
]
assert[<ast.UnaryOp object at 0x7da1b01ba6b0>]
for taget[name[my_task]] in starred[call[name[self].get_tasks, parameter[name[Task].WAITING]]] begin[:]
call[name[my_task].task_spec._update, parameter[name[my_task]]]
|
keyword[def] identifier[refresh_waiting_tasks] ( identifier[self] ):
literal[string]
keyword[assert] keyword[not] identifier[self] . identifier[read_only]
keyword[for] identifier[my_task] keyword[in] identifier[self] . identifier[get_tasks] ( identifier[Task] . identifier[WAITING] ):
identifier[my_task] . identifier[task_spec] . identifier[_update] ( identifier[my_task] )
|
def refresh_waiting_tasks(self):
"""
Refresh the state of all WAITING tasks. This will, for example, update
Catching Timer Events whose waiting time has passed.
"""
assert not self.read_only
for my_task in self.get_tasks(Task.WAITING):
my_task.task_spec._update(my_task) # depends on [control=['for'], data=['my_task']]
|
def maybe(cls, val: Optional[T]) -> 'Option[T]':
"""
Shortcut method to return ``Some`` or :py:data:`NONE` based on ``val``.
Args:
val: Some value.
Returns:
``Some(val)`` if the ``val`` is not None, otherwise :py:data:`NONE`.
Examples:
>>> Option.maybe(0)
Some(0)
>>> Option.maybe(None)
NONE
"""
return cast('Option[T]', NONE) if val is None else cls.Some(val)
|
def function[maybe, parameter[cls, val]]:
constant[
Shortcut method to return ``Some`` or :py:data:`NONE` based on ``val``.
Args:
val: Some value.
Returns:
``Some(val)`` if the ``val`` is not None, otherwise :py:data:`NONE`.
Examples:
>>> Option.maybe(0)
Some(0)
>>> Option.maybe(None)
NONE
]
return[<ast.IfExp object at 0x7da18bc70580>]
|
keyword[def] identifier[maybe] ( identifier[cls] , identifier[val] : identifier[Optional] [ identifier[T] ])-> literal[string] :
literal[string]
keyword[return] identifier[cast] ( literal[string] , identifier[NONE] ) keyword[if] identifier[val] keyword[is] keyword[None] keyword[else] identifier[cls] . identifier[Some] ( identifier[val] )
|
def maybe(cls, val: Optional[T]) -> 'Option[T]':
"""
Shortcut method to return ``Some`` or :py:data:`NONE` based on ``val``.
Args:
val: Some value.
Returns:
``Some(val)`` if the ``val`` is not None, otherwise :py:data:`NONE`.
Examples:
>>> Option.maybe(0)
Some(0)
>>> Option.maybe(None)
NONE
"""
return cast('Option[T]', NONE) if val is None else cls.Some(val)
|
def addValueToField(self, i, value=None):
"""Add 'value' to the field i.
Parameters:
--------------------------------------------------------------------
value: value to be added
i: value is added to field i
"""
assert(len(self.fields)>i)
if value is None:
value = self.fields[i].dataClass.getNext()
self.fields[i].addValue(value)
return value
else: self.fields[i].addValue(value)
|
def function[addValueToField, parameter[self, i, value]]:
constant[Add 'value' to the field i.
Parameters:
--------------------------------------------------------------------
value: value to be added
i: value is added to field i
]
assert[compare[call[name[len], parameter[name[self].fields]] greater[>] name[i]]]
if compare[name[value] is constant[None]] begin[:]
variable[value] assign[=] call[call[name[self].fields][name[i]].dataClass.getNext, parameter[]]
call[call[name[self].fields][name[i]].addValue, parameter[name[value]]]
return[name[value]]
|
keyword[def] identifier[addValueToField] ( identifier[self] , identifier[i] , identifier[value] = keyword[None] ):
literal[string]
keyword[assert] ( identifier[len] ( identifier[self] . identifier[fields] )> identifier[i] )
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[value] = identifier[self] . identifier[fields] [ identifier[i] ]. identifier[dataClass] . identifier[getNext] ()
identifier[self] . identifier[fields] [ identifier[i] ]. identifier[addValue] ( identifier[value] )
keyword[return] identifier[value]
keyword[else] : identifier[self] . identifier[fields] [ identifier[i] ]. identifier[addValue] ( identifier[value] )
|
def addValueToField(self, i, value=None):
"""Add 'value' to the field i.
Parameters:
--------------------------------------------------------------------
value: value to be added
i: value is added to field i
"""
assert len(self.fields) > i
if value is None:
value = self.fields[i].dataClass.getNext()
self.fields[i].addValue(value)
return value # depends on [control=['if'], data=['value']]
else:
self.fields[i].addValue(value)
|
def cancelMarketData(self, contracts=None):
"""
Cancel streaming market data for contract
https://www.interactivebrokers.com/en/software/api/apiguide/java/cancelmktdata.htm
"""
if contracts == None:
contracts = list(self.contracts.values())
elif not isinstance(contracts, list):
contracts = [contracts]
for contract in contracts:
# tickerId = self.tickerId(contract.m_symbol)
tickerId = self.tickerId(self.contractString(contract))
self.ibConn.cancelMktData(tickerId=tickerId)
|
def function[cancelMarketData, parameter[self, contracts]]:
constant[
Cancel streaming market data for contract
https://www.interactivebrokers.com/en/software/api/apiguide/java/cancelmktdata.htm
]
if compare[name[contracts] equal[==] constant[None]] begin[:]
variable[contracts] assign[=] call[name[list], parameter[call[name[self].contracts.values, parameter[]]]]
for taget[name[contract]] in starred[name[contracts]] begin[:]
variable[tickerId] assign[=] call[name[self].tickerId, parameter[call[name[self].contractString, parameter[name[contract]]]]]
call[name[self].ibConn.cancelMktData, parameter[]]
|
keyword[def] identifier[cancelMarketData] ( identifier[self] , identifier[contracts] = keyword[None] ):
literal[string]
keyword[if] identifier[contracts] == keyword[None] :
identifier[contracts] = identifier[list] ( identifier[self] . identifier[contracts] . identifier[values] ())
keyword[elif] keyword[not] identifier[isinstance] ( identifier[contracts] , identifier[list] ):
identifier[contracts] =[ identifier[contracts] ]
keyword[for] identifier[contract] keyword[in] identifier[contracts] :
identifier[tickerId] = identifier[self] . identifier[tickerId] ( identifier[self] . identifier[contractString] ( identifier[contract] ))
identifier[self] . identifier[ibConn] . identifier[cancelMktData] ( identifier[tickerId] = identifier[tickerId] )
|
def cancelMarketData(self, contracts=None):
"""
Cancel streaming market data for contract
https://www.interactivebrokers.com/en/software/api/apiguide/java/cancelmktdata.htm
"""
if contracts == None:
contracts = list(self.contracts.values()) # depends on [control=['if'], data=['contracts']]
elif not isinstance(contracts, list):
contracts = [contracts] # depends on [control=['if'], data=[]]
for contract in contracts:
# tickerId = self.tickerId(contract.m_symbol)
tickerId = self.tickerId(self.contractString(contract))
self.ibConn.cancelMktData(tickerId=tickerId) # depends on [control=['for'], data=['contract']]
|
def setRandomParams(self):
"""
set random hyperparameters
"""
params = SP.randn(self.getNumberParams())
self.setParams(params)
|
def function[setRandomParams, parameter[self]]:
constant[
set random hyperparameters
]
variable[params] assign[=] call[name[SP].randn, parameter[call[name[self].getNumberParams, parameter[]]]]
call[name[self].setParams, parameter[name[params]]]
|
keyword[def] identifier[setRandomParams] ( identifier[self] ):
literal[string]
identifier[params] = identifier[SP] . identifier[randn] ( identifier[self] . identifier[getNumberParams] ())
identifier[self] . identifier[setParams] ( identifier[params] )
|
def setRandomParams(self):
"""
set random hyperparameters
"""
params = SP.randn(self.getNumberParams())
self.setParams(params)
|
def db_remove(name, user=None, password=None, host=None, port=None):
'''
Remove a database
name
Database name to remove
user
The user to connect as
password
The password of the user
host
The host to connect to
port
The port to connect to
CLI Example:
.. code-block:: bash
salt '*' influxdb08.db_remove <name>
salt '*' influxdb08.db_remove <name> <user> <password> <host> <port>
'''
if not db_exists(name, user, password, host, port):
log.info('DB \'%s\' does not exist', name)
return False
client = _client(user=user, password=password, host=host, port=port)
return client.delete_database(name)
|
def function[db_remove, parameter[name, user, password, host, port]]:
constant[
Remove a database
name
Database name to remove
user
The user to connect as
password
The password of the user
host
The host to connect to
port
The port to connect to
CLI Example:
.. code-block:: bash
salt '*' influxdb08.db_remove <name>
salt '*' influxdb08.db_remove <name> <user> <password> <host> <port>
]
if <ast.UnaryOp object at 0x7da204621300> begin[:]
call[name[log].info, parameter[constant[DB '%s' does not exist], name[name]]]
return[constant[False]]
variable[client] assign[=] call[name[_client], parameter[]]
return[call[name[client].delete_database, parameter[name[name]]]]
|
keyword[def] identifier[db_remove] ( identifier[name] , identifier[user] = keyword[None] , identifier[password] = keyword[None] , identifier[host] = keyword[None] , identifier[port] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[db_exists] ( identifier[name] , identifier[user] , identifier[password] , identifier[host] , identifier[port] ):
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
keyword[return] keyword[False]
identifier[client] = identifier[_client] ( identifier[user] = identifier[user] , identifier[password] = identifier[password] , identifier[host] = identifier[host] , identifier[port] = identifier[port] )
keyword[return] identifier[client] . identifier[delete_database] ( identifier[name] )
|
def db_remove(name, user=None, password=None, host=None, port=None):
"""
Remove a database
name
Database name to remove
user
The user to connect as
password
The password of the user
host
The host to connect to
port
The port to connect to
CLI Example:
.. code-block:: bash
salt '*' influxdb08.db_remove <name>
salt '*' influxdb08.db_remove <name> <user> <password> <host> <port>
"""
if not db_exists(name, user, password, host, port):
log.info("DB '%s' does not exist", name)
return False # depends on [control=['if'], data=[]]
client = _client(user=user, password=password, host=host, port=port)
return client.delete_database(name)
|
def create_widget(self):
""" Create the underlying widget.
"""
d = self.declaration
self.widget = ToggleButton(self.get_context(), None,
d.style or "@attr/buttonStyleToggle")
|
def function[create_widget, parameter[self]]:
constant[ Create the underlying widget.
]
variable[d] assign[=] name[self].declaration
name[self].widget assign[=] call[name[ToggleButton], parameter[call[name[self].get_context, parameter[]], constant[None], <ast.BoolOp object at 0x7da1b1b1ae30>]]
|
keyword[def] identifier[create_widget] ( identifier[self] ):
literal[string]
identifier[d] = identifier[self] . identifier[declaration]
identifier[self] . identifier[widget] = identifier[ToggleButton] ( identifier[self] . identifier[get_context] (), keyword[None] ,
identifier[d] . identifier[style] keyword[or] literal[string] )
|
def create_widget(self):
""" Create the underlying widget.
"""
d = self.declaration
self.widget = ToggleButton(self.get_context(), None, d.style or '@attr/buttonStyleToggle')
|
def add(self, properties):
"""
Add a faked HBA resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'hba',
if not specified.
* 'adapter-port-uri' identifies the backing FCP port for this HBA
and is required to be specified.
* 'device-number' will be auto-generated with a unique value
within the partition in the range 0x8000 to 0xFFFF, if not
specified.
This method also updates the 'hba-uris' property in the parent
faked Partition resource, by adding the URI for the faked HBA
resource.
Returns:
:class:`~zhmcclient_mock.FakedHba`: The faked HBA resource.
Raises:
:exc:`zhmcclient_mock.InputError`: Some issue with the input
properties.
"""
new_hba = super(FakedHbaManager, self).add(properties)
partition = self.parent
# Reflect the new NIC in the partition
assert 'hba-uris' in partition.properties
partition.properties['hba-uris'].append(new_hba.uri)
# Create a default device-number if not specified
if 'device-number' not in new_hba.properties:
devno = partition.devno_alloc()
new_hba.properties['device-number'] = devno
# Create a default wwpn if not specified
if 'wwpn' not in new_hba.properties:
wwpn = partition.wwpn_alloc()
new_hba.properties['wwpn'] = wwpn
return new_hba
|
def function[add, parameter[self, properties]]:
constant[
Add a faked HBA resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'hba',
if not specified.
* 'adapter-port-uri' identifies the backing FCP port for this HBA
and is required to be specified.
* 'device-number' will be auto-generated with a unique value
within the partition in the range 0x8000 to 0xFFFF, if not
specified.
This method also updates the 'hba-uris' property in the parent
faked Partition resource, by adding the URI for the faked HBA
resource.
Returns:
:class:`~zhmcclient_mock.FakedHba`: The faked HBA resource.
Raises:
:exc:`zhmcclient_mock.InputError`: Some issue with the input
properties.
]
variable[new_hba] assign[=] call[call[name[super], parameter[name[FakedHbaManager], name[self]]].add, parameter[name[properties]]]
variable[partition] assign[=] name[self].parent
assert[compare[constant[hba-uris] in name[partition].properties]]
call[call[name[partition].properties][constant[hba-uris]].append, parameter[name[new_hba].uri]]
if compare[constant[device-number] <ast.NotIn object at 0x7da2590d7190> name[new_hba].properties] begin[:]
variable[devno] assign[=] call[name[partition].devno_alloc, parameter[]]
call[name[new_hba].properties][constant[device-number]] assign[=] name[devno]
if compare[constant[wwpn] <ast.NotIn object at 0x7da2590d7190> name[new_hba].properties] begin[:]
variable[wwpn] assign[=] call[name[partition].wwpn_alloc, parameter[]]
call[name[new_hba].properties][constant[wwpn]] assign[=] name[wwpn]
return[name[new_hba]]
|
keyword[def] identifier[add] ( identifier[self] , identifier[properties] ):
literal[string]
identifier[new_hba] = identifier[super] ( identifier[FakedHbaManager] , identifier[self] ). identifier[add] ( identifier[properties] )
identifier[partition] = identifier[self] . identifier[parent]
keyword[assert] literal[string] keyword[in] identifier[partition] . identifier[properties]
identifier[partition] . identifier[properties] [ literal[string] ]. identifier[append] ( identifier[new_hba] . identifier[uri] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[new_hba] . identifier[properties] :
identifier[devno] = identifier[partition] . identifier[devno_alloc] ()
identifier[new_hba] . identifier[properties] [ literal[string] ]= identifier[devno]
keyword[if] literal[string] keyword[not] keyword[in] identifier[new_hba] . identifier[properties] :
identifier[wwpn] = identifier[partition] . identifier[wwpn_alloc] ()
identifier[new_hba] . identifier[properties] [ literal[string] ]= identifier[wwpn]
keyword[return] identifier[new_hba]
|
def add(self, properties):
"""
Add a faked HBA resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'hba',
if not specified.
* 'adapter-port-uri' identifies the backing FCP port for this HBA
and is required to be specified.
* 'device-number' will be auto-generated with a unique value
within the partition in the range 0x8000 to 0xFFFF, if not
specified.
This method also updates the 'hba-uris' property in the parent
faked Partition resource, by adding the URI for the faked HBA
resource.
Returns:
:class:`~zhmcclient_mock.FakedHba`: The faked HBA resource.
Raises:
:exc:`zhmcclient_mock.InputError`: Some issue with the input
properties.
"""
new_hba = super(FakedHbaManager, self).add(properties)
partition = self.parent
# Reflect the new NIC in the partition
assert 'hba-uris' in partition.properties
partition.properties['hba-uris'].append(new_hba.uri)
# Create a default device-number if not specified
if 'device-number' not in new_hba.properties:
devno = partition.devno_alloc()
new_hba.properties['device-number'] = devno # depends on [control=['if'], data=[]]
# Create a default wwpn if not specified
if 'wwpn' not in new_hba.properties:
wwpn = partition.wwpn_alloc()
new_hba.properties['wwpn'] = wwpn # depends on [control=['if'], data=[]]
return new_hba
|
def parse_from_args(self, l):
"""Secondary customization point, called from getFromKwargs to turn
a validated value into a single property value"""
if self.multiple:
return [self.parse_from_arg(arg) for arg in l]
return self.parse_from_arg(l[0])
|
def function[parse_from_args, parameter[self, l]]:
constant[Secondary customization point, called from getFromKwargs to turn
a validated value into a single property value]
if name[self].multiple begin[:]
return[<ast.ListComp object at 0x7da1b20981f0>]
return[call[name[self].parse_from_arg, parameter[call[name[l]][constant[0]]]]]
|
keyword[def] identifier[parse_from_args] ( identifier[self] , identifier[l] ):
literal[string]
keyword[if] identifier[self] . identifier[multiple] :
keyword[return] [ identifier[self] . identifier[parse_from_arg] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[l] ]
keyword[return] identifier[self] . identifier[parse_from_arg] ( identifier[l] [ literal[int] ])
|
def parse_from_args(self, l):
"""Secondary customization point, called from getFromKwargs to turn
a validated value into a single property value"""
if self.multiple:
return [self.parse_from_arg(arg) for arg in l] # depends on [control=['if'], data=[]]
return self.parse_from_arg(l[0])
|
def recording_command(event):
'''Run the actual command to record the a/v material.
'''
conf = config('capture')
# Prepare command line
cmd = conf['command']
cmd = cmd.replace('{{time}}', str(event.remaining_duration(timestamp())))
cmd = cmd.replace('{{dir}}', event.directory())
cmd = cmd.replace('{{name}}', event.name())
cmd = cmd.replace('{{previewdir}}', conf['preview_dir'])
# Signal configuration
sigterm_time = conf['sigterm_time']
sigkill_time = conf['sigkill_time']
sigcustom_time = conf['sigcustom_time']
sigcustom_time = 0 if sigcustom_time < 0 else event.end + sigcustom_time
sigterm_time = 0 if sigterm_time < 0 else event.end + sigterm_time
sigkill_time = 0 if sigkill_time < 0 else event.end + sigkill_time
# Launch capture command
logger.info(cmd)
args = shlex.split(cmd)
DEVNULL = getattr(subprocess, 'DEVNULL', os.open(os.devnull, os.O_RDWR))
captureproc = subprocess.Popen(args, stdin=DEVNULL)
hasattr(subprocess, 'DEVNULL') or os.close(DEVNULL)
# Set systemd status
notify.notify('STATUS=Capturing')
# Check process
while captureproc.poll() is None:
notify.notify('WATCHDOG=1')
if sigcustom_time and timestamp() > sigcustom_time:
logger.info("Sending custom signal to capture process")
captureproc.send_signal(conf['sigcustom'])
sigcustom_time = 0 # send only once
if sigterm_time and timestamp() > sigterm_time:
logger.info("Terminating capture process")
captureproc.terminate()
sigterm_time = 0 # send only once
elif sigkill_time and timestamp() > sigkill_time:
logger.warning("Killing capture process")
captureproc.kill()
sigkill_time = 0 # send only once
time.sleep(0.1)
# Remove preview files:
for preview in conf['preview']:
try:
os.remove(preview.replace('{{previewdir}}', conf['preview_dir']))
except OSError:
logger.warning('Could not remove preview files')
logger.warning(traceback.format_exc())
# Check process for errors
exitcode = config()['capture']['exit_code']
if captureproc.poll() > 0 and captureproc.returncode != exitcode:
raise RuntimeError('Recording failed (%i)' % captureproc.returncode)
# Reset systemd status
notify.notify('STATUS=Waiting')
# Return [(flavor,path),…]
files = (f.replace('{{dir}}', event.directory()) for f in conf['files'])
files = (f.replace('{{name}}', event.name()) for f in files)
return list(zip(conf['flavors'], files))
|
def function[recording_command, parameter[event]]:
constant[Run the actual command to record the a/v material.
]
variable[conf] assign[=] call[name[config], parameter[constant[capture]]]
variable[cmd] assign[=] call[name[conf]][constant[command]]
variable[cmd] assign[=] call[name[cmd].replace, parameter[constant[{{time}}], call[name[str], parameter[call[name[event].remaining_duration, parameter[call[name[timestamp], parameter[]]]]]]]]
variable[cmd] assign[=] call[name[cmd].replace, parameter[constant[{{dir}}], call[name[event].directory, parameter[]]]]
variable[cmd] assign[=] call[name[cmd].replace, parameter[constant[{{name}}], call[name[event].name, parameter[]]]]
variable[cmd] assign[=] call[name[cmd].replace, parameter[constant[{{previewdir}}], call[name[conf]][constant[preview_dir]]]]
variable[sigterm_time] assign[=] call[name[conf]][constant[sigterm_time]]
variable[sigkill_time] assign[=] call[name[conf]][constant[sigkill_time]]
variable[sigcustom_time] assign[=] call[name[conf]][constant[sigcustom_time]]
variable[sigcustom_time] assign[=] <ast.IfExp object at 0x7da20c6aa410>
variable[sigterm_time] assign[=] <ast.IfExp object at 0x7da20c6abfd0>
variable[sigkill_time] assign[=] <ast.IfExp object at 0x7da20c6ab5e0>
call[name[logger].info, parameter[name[cmd]]]
variable[args] assign[=] call[name[shlex].split, parameter[name[cmd]]]
variable[DEVNULL] assign[=] call[name[getattr], parameter[name[subprocess], constant[DEVNULL], call[name[os].open, parameter[name[os].devnull, name[os].O_RDWR]]]]
variable[captureproc] assign[=] call[name[subprocess].Popen, parameter[name[args]]]
<ast.BoolOp object at 0x7da20c6aa920>
call[name[notify].notify, parameter[constant[STATUS=Capturing]]]
while compare[call[name[captureproc].poll, parameter[]] is constant[None]] begin[:]
call[name[notify].notify, parameter[constant[WATCHDOG=1]]]
if <ast.BoolOp object at 0x7da20c6a8370> begin[:]
call[name[logger].info, parameter[constant[Sending custom signal to capture process]]]
call[name[captureproc].send_signal, parameter[call[name[conf]][constant[sigcustom]]]]
variable[sigcustom_time] assign[=] constant[0]
if <ast.BoolOp object at 0x7da20c6aaa70> begin[:]
call[name[logger].info, parameter[constant[Terminating capture process]]]
call[name[captureproc].terminate, parameter[]]
variable[sigterm_time] assign[=] constant[0]
call[name[time].sleep, parameter[constant[0.1]]]
for taget[name[preview]] in starred[call[name[conf]][constant[preview]]] begin[:]
<ast.Try object at 0x7da20c6a99c0>
variable[exitcode] assign[=] call[call[call[name[config], parameter[]]][constant[capture]]][constant[exit_code]]
if <ast.BoolOp object at 0x7da1b02a6230> begin[:]
<ast.Raise object at 0x7da1b02a4ee0>
call[name[notify].notify, parameter[constant[STATUS=Waiting]]]
variable[files] assign[=] <ast.GeneratorExp object at 0x7da1b02a6050>
variable[files] assign[=] <ast.GeneratorExp object at 0x7da1b02a4e80>
return[call[name[list], parameter[call[name[zip], parameter[call[name[conf]][constant[flavors]], name[files]]]]]]
|
keyword[def] identifier[recording_command] ( identifier[event] ):
literal[string]
identifier[conf] = identifier[config] ( literal[string] )
identifier[cmd] = identifier[conf] [ literal[string] ]
identifier[cmd] = identifier[cmd] . identifier[replace] ( literal[string] , identifier[str] ( identifier[event] . identifier[remaining_duration] ( identifier[timestamp] ())))
identifier[cmd] = identifier[cmd] . identifier[replace] ( literal[string] , identifier[event] . identifier[directory] ())
identifier[cmd] = identifier[cmd] . identifier[replace] ( literal[string] , identifier[event] . identifier[name] ())
identifier[cmd] = identifier[cmd] . identifier[replace] ( literal[string] , identifier[conf] [ literal[string] ])
identifier[sigterm_time] = identifier[conf] [ literal[string] ]
identifier[sigkill_time] = identifier[conf] [ literal[string] ]
identifier[sigcustom_time] = identifier[conf] [ literal[string] ]
identifier[sigcustom_time] = literal[int] keyword[if] identifier[sigcustom_time] < literal[int] keyword[else] identifier[event] . identifier[end] + identifier[sigcustom_time]
identifier[sigterm_time] = literal[int] keyword[if] identifier[sigterm_time] < literal[int] keyword[else] identifier[event] . identifier[end] + identifier[sigterm_time]
identifier[sigkill_time] = literal[int] keyword[if] identifier[sigkill_time] < literal[int] keyword[else] identifier[event] . identifier[end] + identifier[sigkill_time]
identifier[logger] . identifier[info] ( identifier[cmd] )
identifier[args] = identifier[shlex] . identifier[split] ( identifier[cmd] )
identifier[DEVNULL] = identifier[getattr] ( identifier[subprocess] , literal[string] , identifier[os] . identifier[open] ( identifier[os] . identifier[devnull] , identifier[os] . identifier[O_RDWR] ))
identifier[captureproc] = identifier[subprocess] . identifier[Popen] ( identifier[args] , identifier[stdin] = identifier[DEVNULL] )
identifier[hasattr] ( identifier[subprocess] , literal[string] ) keyword[or] identifier[os] . identifier[close] ( identifier[DEVNULL] )
identifier[notify] . identifier[notify] ( literal[string] )
keyword[while] identifier[captureproc] . identifier[poll] () keyword[is] keyword[None] :
identifier[notify] . identifier[notify] ( literal[string] )
keyword[if] identifier[sigcustom_time] keyword[and] identifier[timestamp] ()> identifier[sigcustom_time] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[captureproc] . identifier[send_signal] ( identifier[conf] [ literal[string] ])
identifier[sigcustom_time] = literal[int]
keyword[if] identifier[sigterm_time] keyword[and] identifier[timestamp] ()> identifier[sigterm_time] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[captureproc] . identifier[terminate] ()
identifier[sigterm_time] = literal[int]
keyword[elif] identifier[sigkill_time] keyword[and] identifier[timestamp] ()> identifier[sigkill_time] :
identifier[logger] . identifier[warning] ( literal[string] )
identifier[captureproc] . identifier[kill] ()
identifier[sigkill_time] = literal[int]
identifier[time] . identifier[sleep] ( literal[int] )
keyword[for] identifier[preview] keyword[in] identifier[conf] [ literal[string] ]:
keyword[try] :
identifier[os] . identifier[remove] ( identifier[preview] . identifier[replace] ( literal[string] , identifier[conf] [ literal[string] ]))
keyword[except] identifier[OSError] :
identifier[logger] . identifier[warning] ( literal[string] )
identifier[logger] . identifier[warning] ( identifier[traceback] . identifier[format_exc] ())
identifier[exitcode] = identifier[config] ()[ literal[string] ][ literal[string] ]
keyword[if] identifier[captureproc] . identifier[poll] ()> literal[int] keyword[and] identifier[captureproc] . identifier[returncode] != identifier[exitcode] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[captureproc] . identifier[returncode] )
identifier[notify] . identifier[notify] ( literal[string] )
identifier[files] =( identifier[f] . identifier[replace] ( literal[string] , identifier[event] . identifier[directory] ()) keyword[for] identifier[f] keyword[in] identifier[conf] [ literal[string] ])
identifier[files] =( identifier[f] . identifier[replace] ( literal[string] , identifier[event] . identifier[name] ()) keyword[for] identifier[f] keyword[in] identifier[files] )
keyword[return] identifier[list] ( identifier[zip] ( identifier[conf] [ literal[string] ], identifier[files] ))
|
def recording_command(event):
"""Run the actual command to record the a/v material.
"""
conf = config('capture')
# Prepare command line
cmd = conf['command']
cmd = cmd.replace('{{time}}', str(event.remaining_duration(timestamp())))
cmd = cmd.replace('{{dir}}', event.directory())
cmd = cmd.replace('{{name}}', event.name())
cmd = cmd.replace('{{previewdir}}', conf['preview_dir'])
# Signal configuration
sigterm_time = conf['sigterm_time']
sigkill_time = conf['sigkill_time']
sigcustom_time = conf['sigcustom_time']
sigcustom_time = 0 if sigcustom_time < 0 else event.end + sigcustom_time
sigterm_time = 0 if sigterm_time < 0 else event.end + sigterm_time
sigkill_time = 0 if sigkill_time < 0 else event.end + sigkill_time
# Launch capture command
logger.info(cmd)
args = shlex.split(cmd)
DEVNULL = getattr(subprocess, 'DEVNULL', os.open(os.devnull, os.O_RDWR))
captureproc = subprocess.Popen(args, stdin=DEVNULL)
hasattr(subprocess, 'DEVNULL') or os.close(DEVNULL)
# Set systemd status
notify.notify('STATUS=Capturing')
# Check process
while captureproc.poll() is None:
notify.notify('WATCHDOG=1')
if sigcustom_time and timestamp() > sigcustom_time:
logger.info('Sending custom signal to capture process')
captureproc.send_signal(conf['sigcustom'])
sigcustom_time = 0 # send only once # depends on [control=['if'], data=[]]
if sigterm_time and timestamp() > sigterm_time:
logger.info('Terminating capture process')
captureproc.terminate()
sigterm_time = 0 # send only once # depends on [control=['if'], data=[]]
elif sigkill_time and timestamp() > sigkill_time:
logger.warning('Killing capture process')
captureproc.kill()
sigkill_time = 0 # send only once # depends on [control=['if'], data=[]]
time.sleep(0.1) # depends on [control=['while'], data=[]]
# Remove preview files:
for preview in conf['preview']:
try:
os.remove(preview.replace('{{previewdir}}', conf['preview_dir'])) # depends on [control=['try'], data=[]]
except OSError:
logger.warning('Could not remove preview files')
logger.warning(traceback.format_exc()) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['preview']]
# Check process for errors
exitcode = config()['capture']['exit_code']
if captureproc.poll() > 0 and captureproc.returncode != exitcode:
raise RuntimeError('Recording failed (%i)' % captureproc.returncode) # depends on [control=['if'], data=[]]
# Reset systemd status
notify.notify('STATUS=Waiting')
# Return [(flavor,path),…]
files = (f.replace('{{dir}}', event.directory()) for f in conf['files'])
files = (f.replace('{{name}}', event.name()) for f in files)
return list(zip(conf['flavors'], files))
|
def ping():
'''
Is the chassis responding?
:return: Returns False if the chassis didn't respond, True otherwise.
'''
r = __salt__['dracr.system_info'](host=DETAILS['host'],
admin_username=DETAILS['admin_username'],
admin_password=DETAILS['admin_password'])
if r.get('retcode', 0) == 1:
return False
else:
return True
try:
return r['dict'].get('ret', False)
except Exception:
return False
|
def function[ping, parameter[]]:
constant[
Is the chassis responding?
:return: Returns False if the chassis didn't respond, True otherwise.
]
variable[r] assign[=] call[call[name[__salt__]][constant[dracr.system_info]], parameter[]]
if compare[call[name[r].get, parameter[constant[retcode], constant[0]]] equal[==] constant[1]] begin[:]
return[constant[False]]
<ast.Try object at 0x7da18ede4820>
|
keyword[def] identifier[ping] ():
literal[string]
identifier[r] = identifier[__salt__] [ literal[string] ]( identifier[host] = identifier[DETAILS] [ literal[string] ],
identifier[admin_username] = identifier[DETAILS] [ literal[string] ],
identifier[admin_password] = identifier[DETAILS] [ literal[string] ])
keyword[if] identifier[r] . identifier[get] ( literal[string] , literal[int] )== literal[int] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[True]
keyword[try] :
keyword[return] identifier[r] [ literal[string] ]. identifier[get] ( literal[string] , keyword[False] )
keyword[except] identifier[Exception] :
keyword[return] keyword[False]
|
def ping():
"""
Is the chassis responding?
:return: Returns False if the chassis didn't respond, True otherwise.
"""
r = __salt__['dracr.system_info'](host=DETAILS['host'], admin_username=DETAILS['admin_username'], admin_password=DETAILS['admin_password'])
if r.get('retcode', 0) == 1:
return False # depends on [control=['if'], data=[]]
else:
return True
try:
return r['dict'].get('ret', False) # depends on [control=['try'], data=[]]
except Exception:
return False # depends on [control=['except'], data=[]]
|
def _next_cTn_id(self):
"""Return the next available unique ID (int) for p:cTn element."""
cTn_id_strs = self.xpath('/p:sld/p:timing//p:cTn/@id')
ids = [int(id_str) for id_str in cTn_id_strs]
return max(ids) + 1
|
def function[_next_cTn_id, parameter[self]]:
constant[Return the next available unique ID (int) for p:cTn element.]
variable[cTn_id_strs] assign[=] call[name[self].xpath, parameter[constant[/p:sld/p:timing//p:cTn/@id]]]
variable[ids] assign[=] <ast.ListComp object at 0x7da20c76d0f0>
return[binary_operation[call[name[max], parameter[name[ids]]] + constant[1]]]
|
keyword[def] identifier[_next_cTn_id] ( identifier[self] ):
literal[string]
identifier[cTn_id_strs] = identifier[self] . identifier[xpath] ( literal[string] )
identifier[ids] =[ identifier[int] ( identifier[id_str] ) keyword[for] identifier[id_str] keyword[in] identifier[cTn_id_strs] ]
keyword[return] identifier[max] ( identifier[ids] )+ literal[int]
|
def _next_cTn_id(self):
"""Return the next available unique ID (int) for p:cTn element."""
cTn_id_strs = self.xpath('/p:sld/p:timing//p:cTn/@id')
ids = [int(id_str) for id_str in cTn_id_strs]
return max(ids) + 1
|
def first(self):
"""
First chunk
"""
return self.values[tuple(zeros(len(self.values.shape)))]
|
def function[first, parameter[self]]:
constant[
First chunk
]
return[call[name[self].values][call[name[tuple], parameter[call[name[zeros], parameter[call[name[len], parameter[name[self].values.shape]]]]]]]]
|
keyword[def] identifier[first] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[values] [ identifier[tuple] ( identifier[zeros] ( identifier[len] ( identifier[self] . identifier[values] . identifier[shape] )))]
|
def first(self):
"""
First chunk
"""
return self.values[tuple(zeros(len(self.values.shape)))]
|
def _path_hash(path, transform, kwargs):
"""
Generate a hash of source file path + transform + args
"""
sortedargs = ["%s:%r:%s" % (key, value, type(value))
for key, value in sorted(iteritems(kwargs))]
srcinfo = "{path}:{transform}:{{{kwargs}}}".format(path=os.path.abspath(path),
transform=transform,
kwargs=",".join(sortedargs))
return digest_string(srcinfo)
|
def function[_path_hash, parameter[path, transform, kwargs]]:
constant[
Generate a hash of source file path + transform + args
]
variable[sortedargs] assign[=] <ast.ListComp object at 0x7da1b1279b40>
variable[srcinfo] assign[=] call[constant[{path}:{transform}:{{{kwargs}}}].format, parameter[]]
return[call[name[digest_string], parameter[name[srcinfo]]]]
|
keyword[def] identifier[_path_hash] ( identifier[path] , identifier[transform] , identifier[kwargs] ):
literal[string]
identifier[sortedargs] =[ literal[string] %( identifier[key] , identifier[value] , identifier[type] ( identifier[value] ))
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[sorted] ( identifier[iteritems] ( identifier[kwargs] ))]
identifier[srcinfo] = literal[string] . identifier[format] ( identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] ),
identifier[transform] = identifier[transform] ,
identifier[kwargs] = literal[string] . identifier[join] ( identifier[sortedargs] ))
keyword[return] identifier[digest_string] ( identifier[srcinfo] )
|
def _path_hash(path, transform, kwargs):
"""
Generate a hash of source file path + transform + args
"""
sortedargs = ['%s:%r:%s' % (key, value, type(value)) for (key, value) in sorted(iteritems(kwargs))]
srcinfo = '{path}:{transform}:{{{kwargs}}}'.format(path=os.path.abspath(path), transform=transform, kwargs=','.join(sortedargs))
return digest_string(srcinfo)
|
def main(argv=None):
"""Execute each module in the same interpreter.
Args:
argv: Each item of argv will be treated as a separate
module with potential arguments
each item may be a string or a sequence of strings.
If a given argument is a string, then treat string as
shell arguments and split accordingly.
If the given argument is a tuple or list, then assume
that the given arguments are already parsed.
The first item of each argument should be a module or module path
"""
if argv is None:
argv = sys.argv[1:]
args = _get_parser().parse_args(argv)
mand(args.module_seq)
|
def function[main, parameter[argv]]:
constant[Execute each module in the same interpreter.
Args:
argv: Each item of argv will be treated as a separate
module with potential arguments
each item may be a string or a sequence of strings.
If a given argument is a string, then treat string as
shell arguments and split accordingly.
If the given argument is a tuple or list, then assume
that the given arguments are already parsed.
The first item of each argument should be a module or module path
]
if compare[name[argv] is constant[None]] begin[:]
variable[argv] assign[=] call[name[sys].argv][<ast.Slice object at 0x7da20c6c4460>]
variable[args] assign[=] call[call[name[_get_parser], parameter[]].parse_args, parameter[name[argv]]]
call[name[mand], parameter[name[args].module_seq]]
|
keyword[def] identifier[main] ( identifier[argv] = keyword[None] ):
literal[string]
keyword[if] identifier[argv] keyword[is] keyword[None] :
identifier[argv] = identifier[sys] . identifier[argv] [ literal[int] :]
identifier[args] = identifier[_get_parser] (). identifier[parse_args] ( identifier[argv] )
identifier[mand] ( identifier[args] . identifier[module_seq] )
|
def main(argv=None):
"""Execute each module in the same interpreter.
Args:
argv: Each item of argv will be treated as a separate
module with potential arguments
each item may be a string or a sequence of strings.
If a given argument is a string, then treat string as
shell arguments and split accordingly.
If the given argument is a tuple or list, then assume
that the given arguments are already parsed.
The first item of each argument should be a module or module path
"""
if argv is None:
argv = sys.argv[1:] # depends on [control=['if'], data=['argv']]
args = _get_parser().parse_args(argv)
mand(args.module_seq)
|
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already in use.
Also validates that the username is not listed in
ACCOUNTS_FORBIDDEN_USERNAMES list.
"""
try:
get_user_model().objects.get(
username__iexact=self.cleaned_data['username'])
except get_user_model().DoesNotExist:
pass
else:
raise forms.ValidationError(_('This username is already taken.'))
if self.cleaned_data['username'].lower() in accounts_settings.ACCOUNTS_FORBIDDEN_USERNAMES:
raise forms.ValidationError(_('This username is not allowed.'))
return self.cleaned_data['username']
|
def function[clean_username, parameter[self]]:
constant[
Validate that the username is alphanumeric and is not already in use.
Also validates that the username is not listed in
ACCOUNTS_FORBIDDEN_USERNAMES list.
]
<ast.Try object at 0x7da18f00ca30>
if compare[call[call[name[self].cleaned_data][constant[username]].lower, parameter[]] in name[accounts_settings].ACCOUNTS_FORBIDDEN_USERNAMES] begin[:]
<ast.Raise object at 0x7da1b0b72500>
return[call[name[self].cleaned_data][constant[username]]]
|
keyword[def] identifier[clean_username] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[get_user_model] (). identifier[objects] . identifier[get] (
identifier[username__iexact] = identifier[self] . identifier[cleaned_data] [ literal[string] ])
keyword[except] identifier[get_user_model] (). identifier[DoesNotExist] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[forms] . identifier[ValidationError] ( identifier[_] ( literal[string] ))
keyword[if] identifier[self] . identifier[cleaned_data] [ literal[string] ]. identifier[lower] () keyword[in] identifier[accounts_settings] . identifier[ACCOUNTS_FORBIDDEN_USERNAMES] :
keyword[raise] identifier[forms] . identifier[ValidationError] ( identifier[_] ( literal[string] ))
keyword[return] identifier[self] . identifier[cleaned_data] [ literal[string] ]
|
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already in use.
Also validates that the username is not listed in
ACCOUNTS_FORBIDDEN_USERNAMES list.
"""
try:
get_user_model().objects.get(username__iexact=self.cleaned_data['username']) # depends on [control=['try'], data=[]]
except get_user_model().DoesNotExist:
pass # depends on [control=['except'], data=[]]
else:
raise forms.ValidationError(_('This username is already taken.'))
if self.cleaned_data['username'].lower() in accounts_settings.ACCOUNTS_FORBIDDEN_USERNAMES:
raise forms.ValidationError(_('This username is not allowed.')) # depends on [control=['if'], data=[]]
return self.cleaned_data['username']
|
def normal_h3(size: int = 10000) -> HistogramND:
"""A simple 3D histogram with normal distribution.
Parameters
----------
size : Number of points
"""
data1 = np.random.normal(0, 1, (size,))
data2 = np.random.normal(0, 1, (size,))
data3 = np.random.normal(0, 1, (size,))
return h3([data1, data2, data3], name="normal", axis_names=tuple("xyz"), title="3D normal distribution")
|
def function[normal_h3, parameter[size]]:
constant[A simple 3D histogram with normal distribution.
Parameters
----------
size : Number of points
]
variable[data1] assign[=] call[name[np].random.normal, parameter[constant[0], constant[1], tuple[[<ast.Name object at 0x7da20c794d60>]]]]
variable[data2] assign[=] call[name[np].random.normal, parameter[constant[0], constant[1], tuple[[<ast.Name object at 0x7da20c796320>]]]]
variable[data3] assign[=] call[name[np].random.normal, parameter[constant[0], constant[1], tuple[[<ast.Name object at 0x7da20c795030>]]]]
return[call[name[h3], parameter[list[[<ast.Name object at 0x7da1b26ac1f0>, <ast.Name object at 0x7da1b26ad930>, <ast.Name object at 0x7da1b26aded0>]]]]]
|
keyword[def] identifier[normal_h3] ( identifier[size] : identifier[int] = literal[int] )-> identifier[HistogramND] :
literal[string]
identifier[data1] = identifier[np] . identifier[random] . identifier[normal] ( literal[int] , literal[int] ,( identifier[size] ,))
identifier[data2] = identifier[np] . identifier[random] . identifier[normal] ( literal[int] , literal[int] ,( identifier[size] ,))
identifier[data3] = identifier[np] . identifier[random] . identifier[normal] ( literal[int] , literal[int] ,( identifier[size] ,))
keyword[return] identifier[h3] ([ identifier[data1] , identifier[data2] , identifier[data3] ], identifier[name] = literal[string] , identifier[axis_names] = identifier[tuple] ( literal[string] ), identifier[title] = literal[string] )
|
def normal_h3(size: int=10000) -> HistogramND:
"""A simple 3D histogram with normal distribution.
Parameters
----------
size : Number of points
"""
data1 = np.random.normal(0, 1, (size,))
data2 = np.random.normal(0, 1, (size,))
data3 = np.random.normal(0, 1, (size,))
return h3([data1, data2, data3], name='normal', axis_names=tuple('xyz'), title='3D normal distribution')
|
def erase_sector(self, address):
"""!
@brief Erase one sector.
@exception FlashEraseFailure
"""
assert self._active_operation == self.Operation.ERASE
# update core register to execute the erase_sector subroutine
result = self._call_function_and_wait(self.flash_algo['pc_erase_sector'], address)
# check the return code
if result != 0:
raise FlashEraseFailure('erase_sector(0x%x) error: %i' % (address, result), address, result)
|
def function[erase_sector, parameter[self, address]]:
constant[!
@brief Erase one sector.
@exception FlashEraseFailure
]
assert[compare[name[self]._active_operation equal[==] name[self].Operation.ERASE]]
variable[result] assign[=] call[name[self]._call_function_and_wait, parameter[call[name[self].flash_algo][constant[pc_erase_sector]], name[address]]]
if compare[name[result] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da18f7210c0>
|
keyword[def] identifier[erase_sector] ( identifier[self] , identifier[address] ):
literal[string]
keyword[assert] identifier[self] . identifier[_active_operation] == identifier[self] . identifier[Operation] . identifier[ERASE]
identifier[result] = identifier[self] . identifier[_call_function_and_wait] ( identifier[self] . identifier[flash_algo] [ literal[string] ], identifier[address] )
keyword[if] identifier[result] != literal[int] :
keyword[raise] identifier[FlashEraseFailure] ( literal[string] %( identifier[address] , identifier[result] ), identifier[address] , identifier[result] )
|
def erase_sector(self, address):
"""!
@brief Erase one sector.
@exception FlashEraseFailure
"""
assert self._active_operation == self.Operation.ERASE
# update core register to execute the erase_sector subroutine
result = self._call_function_and_wait(self.flash_algo['pc_erase_sector'], address)
# check the return code
if result != 0:
raise FlashEraseFailure('erase_sector(0x%x) error: %i' % (address, result), address, result) # depends on [control=['if'], data=['result']]
|
def crop(self, min, max):
"""
Crop a region by removing coordinates outside bounds.
Follows normal slice indexing conventions.
Parameters
----------
min : tuple
Minimum or starting bounds for each axis.
max : tuple
Maximum or ending bounds for each axis.
"""
new = [c for c in self.coordinates if all(c >= min) and all(c < max)]
return one(new)
|
def function[crop, parameter[self, min, max]]:
constant[
Crop a region by removing coordinates outside bounds.
Follows normal slice indexing conventions.
Parameters
----------
min : tuple
Minimum or starting bounds for each axis.
max : tuple
Maximum or ending bounds for each axis.
]
variable[new] assign[=] <ast.ListComp object at 0x7da1b095de10>
return[call[name[one], parameter[name[new]]]]
|
keyword[def] identifier[crop] ( identifier[self] , identifier[min] , identifier[max] ):
literal[string]
identifier[new] =[ identifier[c] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[coordinates] keyword[if] identifier[all] ( identifier[c] >= identifier[min] ) keyword[and] identifier[all] ( identifier[c] < identifier[max] )]
keyword[return] identifier[one] ( identifier[new] )
|
def crop(self, min, max):
"""
Crop a region by removing coordinates outside bounds.
Follows normal slice indexing conventions.
Parameters
----------
min : tuple
Minimum or starting bounds for each axis.
max : tuple
Maximum or ending bounds for each axis.
"""
new = [c for c in self.coordinates if all(c >= min) and all(c < max)]
return one(new)
|
def _on_github_user(self, future, access_token, response):
"""Invoked as a callback when self.github_request returns the response
to the request for user data.
:param method future: The callback method to pass along
:param str access_token: The access token for the user's use
:param dict response: The HTTP response already decoded
"""
response['access_token'] = access_token
future.set_result(response)
|
def function[_on_github_user, parameter[self, future, access_token, response]]:
constant[Invoked as a callback when self.github_request returns the response
to the request for user data.
:param method future: The callback method to pass along
:param str access_token: The access token for the user's use
:param dict response: The HTTP response already decoded
]
call[name[response]][constant[access_token]] assign[=] name[access_token]
call[name[future].set_result, parameter[name[response]]]
|
keyword[def] identifier[_on_github_user] ( identifier[self] , identifier[future] , identifier[access_token] , identifier[response] ):
literal[string]
identifier[response] [ literal[string] ]= identifier[access_token]
identifier[future] . identifier[set_result] ( identifier[response] )
|
def _on_github_user(self, future, access_token, response):
"""Invoked as a callback when self.github_request returns the response
to the request for user data.
:param method future: The callback method to pass along
:param str access_token: The access token for the user's use
:param dict response: The HTTP response already decoded
"""
response['access_token'] = access_token
future.set_result(response)
|
def api_url(self):
'''return the api url of self'''
return pathjoin(Bin.path, self.name, url=self.service.url)
|
def function[api_url, parameter[self]]:
constant[return the api url of self]
return[call[name[pathjoin], parameter[name[Bin].path, name[self].name]]]
|
keyword[def] identifier[api_url] ( identifier[self] ):
literal[string]
keyword[return] identifier[pathjoin] ( identifier[Bin] . identifier[path] , identifier[self] . identifier[name] , identifier[url] = identifier[self] . identifier[service] . identifier[url] )
|
def api_url(self):
"""return the api url of self"""
return pathjoin(Bin.path, self.name, url=self.service.url)
|
def on_train_begin(self, **kwargs:Any)->None:
"Initialize inner arguments."
self.wait, self.opt = 0, self.learn.opt
super().on_train_begin(**kwargs)
|
def function[on_train_begin, parameter[self]]:
constant[Initialize inner arguments.]
<ast.Tuple object at 0x7da1b1dd9330> assign[=] tuple[[<ast.Constant object at 0x7da1b1ddae30>, <ast.Attribute object at 0x7da1b1ddb2e0>]]
call[call[name[super], parameter[]].on_train_begin, parameter[]]
|
keyword[def] identifier[on_train_begin] ( identifier[self] ,** identifier[kwargs] : identifier[Any] )-> keyword[None] :
literal[string]
identifier[self] . identifier[wait] , identifier[self] . identifier[opt] = literal[int] , identifier[self] . identifier[learn] . identifier[opt]
identifier[super] (). identifier[on_train_begin] (** identifier[kwargs] )
|
def on_train_begin(self, **kwargs: Any) -> None:
"""Initialize inner arguments."""
(self.wait, self.opt) = (0, self.learn.opt)
super().on_train_begin(**kwargs)
|
def _set_widget_background_color(widget, color):
"""
Changes the base color of a widget (background).
:param widget: widget to modify
:param color: the color to apply
"""
pal = widget.palette()
pal.setColor(pal.Base, color)
widget.setPalette(pal)
|
def function[_set_widget_background_color, parameter[widget, color]]:
constant[
Changes the base color of a widget (background).
:param widget: widget to modify
:param color: the color to apply
]
variable[pal] assign[=] call[name[widget].palette, parameter[]]
call[name[pal].setColor, parameter[name[pal].Base, name[color]]]
call[name[widget].setPalette, parameter[name[pal]]]
|
keyword[def] identifier[_set_widget_background_color] ( identifier[widget] , identifier[color] ):
literal[string]
identifier[pal] = identifier[widget] . identifier[palette] ()
identifier[pal] . identifier[setColor] ( identifier[pal] . identifier[Base] , identifier[color] )
identifier[widget] . identifier[setPalette] ( identifier[pal] )
|
def _set_widget_background_color(widget, color):
"""
Changes the base color of a widget (background).
:param widget: widget to modify
:param color: the color to apply
"""
pal = widget.palette()
pal.setColor(pal.Base, color)
widget.setPalette(pal)
|
def nb_r_deriv(r, data_row):
"""
Derivative of log-likelihood wrt r (formula from wikipedia)
Args:
r (float): the R paramemter in the NB distribution
data_row (array): 1d array of length cells
"""
n = len(data_row)
d = sum(digamma(data_row + r)) - n*digamma(r) + n*np.log(r/(r+np.mean(data_row)))
return d
|
def function[nb_r_deriv, parameter[r, data_row]]:
constant[
Derivative of log-likelihood wrt r (formula from wikipedia)
Args:
r (float): the R paramemter in the NB distribution
data_row (array): 1d array of length cells
]
variable[n] assign[=] call[name[len], parameter[name[data_row]]]
variable[d] assign[=] binary_operation[binary_operation[call[name[sum], parameter[call[name[digamma], parameter[binary_operation[name[data_row] + name[r]]]]]] - binary_operation[name[n] * call[name[digamma], parameter[name[r]]]]] + binary_operation[name[n] * call[name[np].log, parameter[binary_operation[name[r] / binary_operation[name[r] + call[name[np].mean, parameter[name[data_row]]]]]]]]]
return[name[d]]
|
keyword[def] identifier[nb_r_deriv] ( identifier[r] , identifier[data_row] ):
literal[string]
identifier[n] = identifier[len] ( identifier[data_row] )
identifier[d] = identifier[sum] ( identifier[digamma] ( identifier[data_row] + identifier[r] ))- identifier[n] * identifier[digamma] ( identifier[r] )+ identifier[n] * identifier[np] . identifier[log] ( identifier[r] /( identifier[r] + identifier[np] . identifier[mean] ( identifier[data_row] )))
keyword[return] identifier[d]
|
def nb_r_deriv(r, data_row):
"""
Derivative of log-likelihood wrt r (formula from wikipedia)
Args:
r (float): the R paramemter in the NB distribution
data_row (array): 1d array of length cells
"""
n = len(data_row)
d = sum(digamma(data_row + r)) - n * digamma(r) + n * np.log(r / (r + np.mean(data_row)))
return d
|
def set_timestamp(self, time: Union[str, datetime.datetime] = None,
now: bool = False) -> None:
"""
Sets the timestamp of the embed.
Parameters
----------
time: str or :class:`datetime.datetime`
The ``ISO 8601`` timestamp from the embed.
now: bool
Defaults to :class:`False`.
If set to :class:`True` the current time is used for the timestamp.
"""
if now:
self.timestamp = str(datetime.datetime.utcnow())
else:
self.timestamp = str(time)
|
def function[set_timestamp, parameter[self, time, now]]:
constant[
Sets the timestamp of the embed.
Parameters
----------
time: str or :class:`datetime.datetime`
The ``ISO 8601`` timestamp from the embed.
now: bool
Defaults to :class:`False`.
If set to :class:`True` the current time is used for the timestamp.
]
if name[now] begin[:]
name[self].timestamp assign[=] call[name[str], parameter[call[name[datetime].datetime.utcnow, parameter[]]]]
|
keyword[def] identifier[set_timestamp] ( identifier[self] , identifier[time] : identifier[Union] [ identifier[str] , identifier[datetime] . identifier[datetime] ]= keyword[None] ,
identifier[now] : identifier[bool] = keyword[False] )-> keyword[None] :
literal[string]
keyword[if] identifier[now] :
identifier[self] . identifier[timestamp] = identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[utcnow] ())
keyword[else] :
identifier[self] . identifier[timestamp] = identifier[str] ( identifier[time] )
|
def set_timestamp(self, time: Union[str, datetime.datetime]=None, now: bool=False) -> None:
"""
Sets the timestamp of the embed.
Parameters
----------
time: str or :class:`datetime.datetime`
The ``ISO 8601`` timestamp from the embed.
now: bool
Defaults to :class:`False`.
If set to :class:`True` the current time is used for the timestamp.
"""
if now:
self.timestamp = str(datetime.datetime.utcnow()) # depends on [control=['if'], data=[]]
else:
self.timestamp = str(time)
|
def get_mean_values(self, C, sites, rup, dists, a1100):
"""
Returns the mean values for a specific IMT
"""
if isinstance(a1100, np.ndarray):
# Site model defined
temp_vs30 = sites.vs30
temp_z2pt5 = sites.z2pt5
else:
# Default site and basin model
temp_vs30 = 1100.0 * np.ones(len(sites.vs30))
temp_z2pt5 = self._select_basin_model(1100.0) *\
np.ones_like(temp_vs30)
return (self._get_magnitude_term(C, rup.mag) +
self._get_geometric_attenuation_term(C, rup.mag, dists.rrup) +
self._get_style_of_faulting_term(C, rup) +
self._get_hanging_wall_term(C, rup, dists) +
self._get_shallow_site_response_term(C, temp_vs30, a1100) +
self._get_basin_response_term(C, temp_z2pt5) +
self._get_hypocentral_depth_term(C, rup) +
self._get_fault_dip_term(C, rup) +
self._get_anelastic_attenuation_term(C, dists.rrup))
|
def function[get_mean_values, parameter[self, C, sites, rup, dists, a1100]]:
constant[
Returns the mean values for a specific IMT
]
if call[name[isinstance], parameter[name[a1100], name[np].ndarray]] begin[:]
variable[temp_vs30] assign[=] name[sites].vs30
variable[temp_z2pt5] assign[=] name[sites].z2pt5
return[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[self]._get_magnitude_term, parameter[name[C], name[rup].mag]] + call[name[self]._get_geometric_attenuation_term, parameter[name[C], name[rup].mag, name[dists].rrup]]] + call[name[self]._get_style_of_faulting_term, parameter[name[C], name[rup]]]] + call[name[self]._get_hanging_wall_term, parameter[name[C], name[rup], name[dists]]]] + call[name[self]._get_shallow_site_response_term, parameter[name[C], name[temp_vs30], name[a1100]]]] + call[name[self]._get_basin_response_term, parameter[name[C], name[temp_z2pt5]]]] + call[name[self]._get_hypocentral_depth_term, parameter[name[C], name[rup]]]] + call[name[self]._get_fault_dip_term, parameter[name[C], name[rup]]]] + call[name[self]._get_anelastic_attenuation_term, parameter[name[C], name[dists].rrup]]]]
|
keyword[def] identifier[get_mean_values] ( identifier[self] , identifier[C] , identifier[sites] , identifier[rup] , identifier[dists] , identifier[a1100] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[a1100] , identifier[np] . identifier[ndarray] ):
identifier[temp_vs30] = identifier[sites] . identifier[vs30]
identifier[temp_z2pt5] = identifier[sites] . identifier[z2pt5]
keyword[else] :
identifier[temp_vs30] = literal[int] * identifier[np] . identifier[ones] ( identifier[len] ( identifier[sites] . identifier[vs30] ))
identifier[temp_z2pt5] = identifier[self] . identifier[_select_basin_model] ( literal[int] )* identifier[np] . identifier[ones_like] ( identifier[temp_vs30] )
keyword[return] ( identifier[self] . identifier[_get_magnitude_term] ( identifier[C] , identifier[rup] . identifier[mag] )+
identifier[self] . identifier[_get_geometric_attenuation_term] ( identifier[C] , identifier[rup] . identifier[mag] , identifier[dists] . identifier[rrup] )+
identifier[self] . identifier[_get_style_of_faulting_term] ( identifier[C] , identifier[rup] )+
identifier[self] . identifier[_get_hanging_wall_term] ( identifier[C] , identifier[rup] , identifier[dists] )+
identifier[self] . identifier[_get_shallow_site_response_term] ( identifier[C] , identifier[temp_vs30] , identifier[a1100] )+
identifier[self] . identifier[_get_basin_response_term] ( identifier[C] , identifier[temp_z2pt5] )+
identifier[self] . identifier[_get_hypocentral_depth_term] ( identifier[C] , identifier[rup] )+
identifier[self] . identifier[_get_fault_dip_term] ( identifier[C] , identifier[rup] )+
identifier[self] . identifier[_get_anelastic_attenuation_term] ( identifier[C] , identifier[dists] . identifier[rrup] ))
|
def get_mean_values(self, C, sites, rup, dists, a1100):
"""
Returns the mean values for a specific IMT
"""
if isinstance(a1100, np.ndarray):
# Site model defined
temp_vs30 = sites.vs30
temp_z2pt5 = sites.z2pt5 # depends on [control=['if'], data=[]]
else:
# Default site and basin model
temp_vs30 = 1100.0 * np.ones(len(sites.vs30))
temp_z2pt5 = self._select_basin_model(1100.0) * np.ones_like(temp_vs30)
return self._get_magnitude_term(C, rup.mag) + self._get_geometric_attenuation_term(C, rup.mag, dists.rrup) + self._get_style_of_faulting_term(C, rup) + self._get_hanging_wall_term(C, rup, dists) + self._get_shallow_site_response_term(C, temp_vs30, a1100) + self._get_basin_response_term(C, temp_z2pt5) + self._get_hypocentral_depth_term(C, rup) + self._get_fault_dip_term(C, rup) + self._get_anelastic_attenuation_term(C, dists.rrup)
|
def update_probs(self):
"""Update the internal probability values given the counts."""
# We deal with the prior probsfirst
# This is a fixed assumed value for systematic error
syst_error = 0.05
prior_probs = {'syst': {}, 'rand': {}}
for source, (p, n) in self.prior_counts.items():
# Skip if there are no actual counts
if n + p == 0:
continue
prior_probs['syst'][source] = syst_error
prior_probs['rand'][source] = \
1 - min((float(p) / (n + p), 1-syst_error)) - syst_error
# Next we deal with subtype probs based on counts
subtype_probs = {}
for source, entry in self.subtype_counts.items():
for rule, (p, n) in entry.items():
# Skip if there are no actual counts
if n + p == 0:
continue
if source not in subtype_probs:
subtype_probs[source] = {}
subtype_probs[source][rule] = \
1 - min((float(p) / (n + p), 1-syst_error)) - syst_error
# Finally we propagate this into the full probability
# data structures of the parent class
super(BayesianScorer, self).update_probs(prior_probs, subtype_probs)
|
def function[update_probs, parameter[self]]:
constant[Update the internal probability values given the counts.]
variable[syst_error] assign[=] constant[0.05]
variable[prior_probs] assign[=] dictionary[[<ast.Constant object at 0x7da207f00ca0>, <ast.Constant object at 0x7da207f01ed0>], [<ast.Dict object at 0x7da207f00f40>, <ast.Dict object at 0x7da207f03c70>]]
for taget[tuple[[<ast.Name object at 0x7da207f00b50>, <ast.Tuple object at 0x7da207f00610>]]] in starred[call[name[self].prior_counts.items, parameter[]]] begin[:]
if compare[binary_operation[name[n] + name[p]] equal[==] constant[0]] begin[:]
continue
call[call[name[prior_probs]][constant[syst]]][name[source]] assign[=] name[syst_error]
call[call[name[prior_probs]][constant[rand]]][name[source]] assign[=] binary_operation[binary_operation[constant[1] - call[name[min], parameter[tuple[[<ast.BinOp object at 0x7da207f00a90>, <ast.BinOp object at 0x7da207f02a70>]]]]] - name[syst_error]]
variable[subtype_probs] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da207f02230>, <ast.Name object at 0x7da207f02ec0>]]] in starred[call[name[self].subtype_counts.items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da207f002b0>, <ast.Tuple object at 0x7da207f02590>]]] in starred[call[name[entry].items, parameter[]]] begin[:]
if compare[binary_operation[name[n] + name[p]] equal[==] constant[0]] begin[:]
continue
if compare[name[source] <ast.NotIn object at 0x7da2590d7190> name[subtype_probs]] begin[:]
call[name[subtype_probs]][name[source]] assign[=] dictionary[[], []]
call[call[name[subtype_probs]][name[source]]][name[rule]] assign[=] binary_operation[binary_operation[constant[1] - call[name[min], parameter[tuple[[<ast.BinOp object at 0x7da207f025c0>, <ast.BinOp object at 0x7da207f03190>]]]]] - name[syst_error]]
call[call[name[super], parameter[name[BayesianScorer], name[self]]].update_probs, parameter[name[prior_probs], name[subtype_probs]]]
|
keyword[def] identifier[update_probs] ( identifier[self] ):
literal[string]
identifier[syst_error] = literal[int]
identifier[prior_probs] ={ literal[string] :{}, literal[string] :{}}
keyword[for] identifier[source] ,( identifier[p] , identifier[n] ) keyword[in] identifier[self] . identifier[prior_counts] . identifier[items] ():
keyword[if] identifier[n] + identifier[p] == literal[int] :
keyword[continue]
identifier[prior_probs] [ literal[string] ][ identifier[source] ]= identifier[syst_error]
identifier[prior_probs] [ literal[string] ][ identifier[source] ]= literal[int] - identifier[min] (( identifier[float] ( identifier[p] )/( identifier[n] + identifier[p] ), literal[int] - identifier[syst_error] ))- identifier[syst_error]
identifier[subtype_probs] ={}
keyword[for] identifier[source] , identifier[entry] keyword[in] identifier[self] . identifier[subtype_counts] . identifier[items] ():
keyword[for] identifier[rule] ,( identifier[p] , identifier[n] ) keyword[in] identifier[entry] . identifier[items] ():
keyword[if] identifier[n] + identifier[p] == literal[int] :
keyword[continue]
keyword[if] identifier[source] keyword[not] keyword[in] identifier[subtype_probs] :
identifier[subtype_probs] [ identifier[source] ]={}
identifier[subtype_probs] [ identifier[source] ][ identifier[rule] ]= literal[int] - identifier[min] (( identifier[float] ( identifier[p] )/( identifier[n] + identifier[p] ), literal[int] - identifier[syst_error] ))- identifier[syst_error]
identifier[super] ( identifier[BayesianScorer] , identifier[self] ). identifier[update_probs] ( identifier[prior_probs] , identifier[subtype_probs] )
|
def update_probs(self):
"""Update the internal probability values given the counts."""
# We deal with the prior probsfirst
# This is a fixed assumed value for systematic error
syst_error = 0.05
prior_probs = {'syst': {}, 'rand': {}}
for (source, (p, n)) in self.prior_counts.items():
# Skip if there are no actual counts
if n + p == 0:
continue # depends on [control=['if'], data=[]]
prior_probs['syst'][source] = syst_error
prior_probs['rand'][source] = 1 - min((float(p) / (n + p), 1 - syst_error)) - syst_error # depends on [control=['for'], data=[]]
# Next we deal with subtype probs based on counts
subtype_probs = {}
for (source, entry) in self.subtype_counts.items():
for (rule, (p, n)) in entry.items():
# Skip if there are no actual counts
if n + p == 0:
continue # depends on [control=['if'], data=[]]
if source not in subtype_probs:
subtype_probs[source] = {} # depends on [control=['if'], data=['source', 'subtype_probs']]
subtype_probs[source][rule] = 1 - min((float(p) / (n + p), 1 - syst_error)) - syst_error # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
# Finally we propagate this into the full probability
# data structures of the parent class
super(BayesianScorer, self).update_probs(prior_probs, subtype_probs)
|
def parse_JSON(self, JSON_string):
"""
Parses a `pyowm.alertapi30.trigger.Trigger` instance out of raw JSON
data. As per OWM documentation, start and end times are expressed with
respect to the moment when you create/update the Trigger. By design,
PyOWM will only allow users to specify *absolute* datetimes - which is, with the `exact` expression -
for start/end timestamps (will otherwise result in a `ParseResponseError` be raised)
:param JSON_string: a raw JSON string
:type JSON_string: str
:return: a `pyowm.alertapi30.trigger.Trigger` instance or ``None``
if no data is available
:raises: *ParseResponseError* if it is impossible to find or parse the
data needed to build the result
"""
if JSON_string is None:
raise parse_response_error.ParseResponseError('JSON data is None')
d = json.loads(JSON_string)
try:
# trigger id
trigger_id = d.get('_id', None)
# start timestamp
start_dict = d['time_period']['start']
expr = start_dict['expression']
if expr != 'after':
raise ValueError('Invalid time expression: "%s" on start timestamp. Only: "after" is supported' % expr)
start = start_dict['amount']
# end timestamp
end_dict = d['time_period']['end']
expr = end_dict['expression']
if expr != 'after':
raise ValueError('Invalid time expression: "%s" on end timestamp. Only: "after" is supported' % expr)
end = end_dict['amount']
# conditions
conditions = [Condition.from_dict(c) for c in d['conditions']]
# alerts
alerts_dict = d['alerts']
alerts = list()
for key in alerts_dict:
alert_id = key
alert_data = alerts_dict[alert_id]
alert_last_update = alert_data['last_update']
alert_met_conds = [
dict(current_value=c['current_value']['min'], condition=Condition.from_dict(c['condition']))
for c in alert_data['conditions']
]
alert_coords = alert_data['coordinates']
alert = Alert(alert_id, trigger_id, alert_met_conds, alert_coords, last_update=alert_last_update)
alerts.append(alert)
# area
area_list = d['area']
area = [GeometryBuilder.build(a_dict) for a_dict in area_list]
# alert channels
alert_channels = None # defaulting
except ValueError as e:
raise parse_response_error.ParseResponseError('Impossible to parse JSON: %s' % e)
except KeyError as e:
raise parse_response_error.ParseResponseError('Impossible to parse JSON: %s' % e)
return Trigger(start, end, conditions, area=area, alerts=alerts, alert_channels=alert_channels, id=trigger_id)
|
def function[parse_JSON, parameter[self, JSON_string]]:
constant[
Parses a `pyowm.alertapi30.trigger.Trigger` instance out of raw JSON
data. As per OWM documentation, start and end times are expressed with
respect to the moment when you create/update the Trigger. By design,
PyOWM will only allow users to specify *absolute* datetimes - which is, with the `exact` expression -
for start/end timestamps (will otherwise result in a `ParseResponseError` be raised)
:param JSON_string: a raw JSON string
:type JSON_string: str
:return: a `pyowm.alertapi30.trigger.Trigger` instance or ``None``
if no data is available
:raises: *ParseResponseError* if it is impossible to find or parse the
data needed to build the result
]
if compare[name[JSON_string] is constant[None]] begin[:]
<ast.Raise object at 0x7da20e957e50>
variable[d] assign[=] call[name[json].loads, parameter[name[JSON_string]]]
<ast.Try object at 0x7da20e955f00>
return[call[name[Trigger], parameter[name[start], name[end], name[conditions]]]]
|
keyword[def] identifier[parse_JSON] ( identifier[self] , identifier[JSON_string] ):
literal[string]
keyword[if] identifier[JSON_string] keyword[is] keyword[None] :
keyword[raise] identifier[parse_response_error] . identifier[ParseResponseError] ( literal[string] )
identifier[d] = identifier[json] . identifier[loads] ( identifier[JSON_string] )
keyword[try] :
identifier[trigger_id] = identifier[d] . identifier[get] ( literal[string] , keyword[None] )
identifier[start_dict] = identifier[d] [ literal[string] ][ literal[string] ]
identifier[expr] = identifier[start_dict] [ literal[string] ]
keyword[if] identifier[expr] != literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[expr] )
identifier[start] = identifier[start_dict] [ literal[string] ]
identifier[end_dict] = identifier[d] [ literal[string] ][ literal[string] ]
identifier[expr] = identifier[end_dict] [ literal[string] ]
keyword[if] identifier[expr] != literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[expr] )
identifier[end] = identifier[end_dict] [ literal[string] ]
identifier[conditions] =[ identifier[Condition] . identifier[from_dict] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[d] [ literal[string] ]]
identifier[alerts_dict] = identifier[d] [ literal[string] ]
identifier[alerts] = identifier[list] ()
keyword[for] identifier[key] keyword[in] identifier[alerts_dict] :
identifier[alert_id] = identifier[key]
identifier[alert_data] = identifier[alerts_dict] [ identifier[alert_id] ]
identifier[alert_last_update] = identifier[alert_data] [ literal[string] ]
identifier[alert_met_conds] =[
identifier[dict] ( identifier[current_value] = identifier[c] [ literal[string] ][ literal[string] ], identifier[condition] = identifier[Condition] . identifier[from_dict] ( identifier[c] [ literal[string] ]))
keyword[for] identifier[c] keyword[in] identifier[alert_data] [ literal[string] ]
]
identifier[alert_coords] = identifier[alert_data] [ literal[string] ]
identifier[alert] = identifier[Alert] ( identifier[alert_id] , identifier[trigger_id] , identifier[alert_met_conds] , identifier[alert_coords] , identifier[last_update] = identifier[alert_last_update] )
identifier[alerts] . identifier[append] ( identifier[alert] )
identifier[area_list] = identifier[d] [ literal[string] ]
identifier[area] =[ identifier[GeometryBuilder] . identifier[build] ( identifier[a_dict] ) keyword[for] identifier[a_dict] keyword[in] identifier[area_list] ]
identifier[alert_channels] = keyword[None]
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[raise] identifier[parse_response_error] . identifier[ParseResponseError] ( literal[string] % identifier[e] )
keyword[except] identifier[KeyError] keyword[as] identifier[e] :
keyword[raise] identifier[parse_response_error] . identifier[ParseResponseError] ( literal[string] % identifier[e] )
keyword[return] identifier[Trigger] ( identifier[start] , identifier[end] , identifier[conditions] , identifier[area] = identifier[area] , identifier[alerts] = identifier[alerts] , identifier[alert_channels] = identifier[alert_channels] , identifier[id] = identifier[trigger_id] )
|
def parse_JSON(self, JSON_string):
"""
Parses a `pyowm.alertapi30.trigger.Trigger` instance out of raw JSON
data. As per OWM documentation, start and end times are expressed with
respect to the moment when you create/update the Trigger. By design,
PyOWM will only allow users to specify *absolute* datetimes - which is, with the `exact` expression -
for start/end timestamps (will otherwise result in a `ParseResponseError` be raised)
:param JSON_string: a raw JSON string
:type JSON_string: str
:return: a `pyowm.alertapi30.trigger.Trigger` instance or ``None``
if no data is available
:raises: *ParseResponseError* if it is impossible to find or parse the
data needed to build the result
"""
if JSON_string is None:
raise parse_response_error.ParseResponseError('JSON data is None') # depends on [control=['if'], data=[]]
d = json.loads(JSON_string)
try:
# trigger id
trigger_id = d.get('_id', None)
# start timestamp
start_dict = d['time_period']['start']
expr = start_dict['expression']
if expr != 'after':
raise ValueError('Invalid time expression: "%s" on start timestamp. Only: "after" is supported' % expr) # depends on [control=['if'], data=['expr']]
start = start_dict['amount']
# end timestamp
end_dict = d['time_period']['end']
expr = end_dict['expression']
if expr != 'after':
raise ValueError('Invalid time expression: "%s" on end timestamp. Only: "after" is supported' % expr) # depends on [control=['if'], data=['expr']]
end = end_dict['amount']
# conditions
conditions = [Condition.from_dict(c) for c in d['conditions']]
# alerts
alerts_dict = d['alerts']
alerts = list()
for key in alerts_dict:
alert_id = key
alert_data = alerts_dict[alert_id]
alert_last_update = alert_data['last_update']
alert_met_conds = [dict(current_value=c['current_value']['min'], condition=Condition.from_dict(c['condition'])) for c in alert_data['conditions']]
alert_coords = alert_data['coordinates']
alert = Alert(alert_id, trigger_id, alert_met_conds, alert_coords, last_update=alert_last_update)
alerts.append(alert) # depends on [control=['for'], data=['key']]
# area
area_list = d['area']
area = [GeometryBuilder.build(a_dict) for a_dict in area_list]
# alert channels
alert_channels = None # defaulting # depends on [control=['try'], data=[]]
except ValueError as e:
raise parse_response_error.ParseResponseError('Impossible to parse JSON: %s' % e) # depends on [control=['except'], data=['e']]
except KeyError as e:
raise parse_response_error.ParseResponseError('Impossible to parse JSON: %s' % e) # depends on [control=['except'], data=['e']]
return Trigger(start, end, conditions, area=area, alerts=alerts, alert_channels=alert_channels, id=trigger_id)
|
def argmin(self):
"""
Return the co-ordinates of the bin centre containing the
minimum value. Same as numpy.argmin(), converting the
indexes to bin co-ordinates.
"""
return tuple(centres[index] for centres, index in
zip(self.centres(), numpy.unravel_index(self.array.argmin(),
self.array.shape)))
|
def function[argmin, parameter[self]]:
constant[
Return the co-ordinates of the bin centre containing the
minimum value. Same as numpy.argmin(), converting the
indexes to bin co-ordinates.
]
return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da20c6aa290>]]]
|
keyword[def] identifier[argmin] ( identifier[self] ):
literal[string]
keyword[return] identifier[tuple] ( identifier[centres] [ identifier[index] ] keyword[for] identifier[centres] , identifier[index] keyword[in]
identifier[zip] ( identifier[self] . identifier[centres] (), identifier[numpy] . identifier[unravel_index] ( identifier[self] . identifier[array] . identifier[argmin] (),
identifier[self] . identifier[array] . identifier[shape] )))
|
def argmin(self):
"""
Return the co-ordinates of the bin centre containing the
minimum value. Same as numpy.argmin(), converting the
indexes to bin co-ordinates.
"""
return tuple((centres[index] for (centres, index) in zip(self.centres(), numpy.unravel_index(self.array.argmin(), self.array.shape))))
|
def update_graph(self, dep_id=None, success=True):
"""dep_id just finished. Update our dependency
graph and submit any jobs that just became runable.
Called with dep_id=None to update entire graph for hwm, but without finishing
a task.
"""
# print ("\n\n***********")
# pprint (dep_id)
# pprint (self.graph)
# pprint (self.depending)
# pprint (self.all_completed)
# pprint (self.all_failed)
# print ("\n\n***********\n\n")
# update any jobs that depended on the dependency
jobs = self.graph.pop(dep_id, [])
# recheck *all* jobs if
# a) we have HWM and an engine just become no longer full
# or b) dep_id was given as None
if dep_id is None or self.hwm and any( [ load==self.hwm-1 for load in self.loads ]):
jobs = self.depending.keys()
for msg_id in sorted(jobs, key=lambda msg_id: self.depending[msg_id].timestamp):
job = self.depending[msg_id]
if job.after.unreachable(self.all_completed, self.all_failed)\
or job.follow.unreachable(self.all_completed, self.all_failed):
self.fail_unreachable(msg_id)
elif job.after.check(self.all_completed, self.all_failed): # time deps met, maybe run
if self.maybe_run(job):
self.depending.pop(msg_id)
for mid in job.dependents:
if mid in self.graph:
self.graph[mid].remove(msg_id)
|
def function[update_graph, parameter[self, dep_id, success]]:
constant[dep_id just finished. Update our dependency
graph and submit any jobs that just became runable.
Called with dep_id=None to update entire graph for hwm, but without finishing
a task.
]
variable[jobs] assign[=] call[name[self].graph.pop, parameter[name[dep_id], list[[]]]]
if <ast.BoolOp object at 0x7da207f9a500> begin[:]
variable[jobs] assign[=] call[name[self].depending.keys, parameter[]]
for taget[name[msg_id]] in starred[call[name[sorted], parameter[name[jobs]]]] begin[:]
variable[job] assign[=] call[name[self].depending][name[msg_id]]
if <ast.BoolOp object at 0x7da207f990c0> begin[:]
call[name[self].fail_unreachable, parameter[name[msg_id]]]
|
keyword[def] identifier[update_graph] ( identifier[self] , identifier[dep_id] = keyword[None] , identifier[success] = keyword[True] ):
literal[string]
identifier[jobs] = identifier[self] . identifier[graph] . identifier[pop] ( identifier[dep_id] ,[])
keyword[if] identifier[dep_id] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[hwm] keyword[and] identifier[any] ([ identifier[load] == identifier[self] . identifier[hwm] - literal[int] keyword[for] identifier[load] keyword[in] identifier[self] . identifier[loads] ]):
identifier[jobs] = identifier[self] . identifier[depending] . identifier[keys] ()
keyword[for] identifier[msg_id] keyword[in] identifier[sorted] ( identifier[jobs] , identifier[key] = keyword[lambda] identifier[msg_id] : identifier[self] . identifier[depending] [ identifier[msg_id] ]. identifier[timestamp] ):
identifier[job] = identifier[self] . identifier[depending] [ identifier[msg_id] ]
keyword[if] identifier[job] . identifier[after] . identifier[unreachable] ( identifier[self] . identifier[all_completed] , identifier[self] . identifier[all_failed] ) keyword[or] identifier[job] . identifier[follow] . identifier[unreachable] ( identifier[self] . identifier[all_completed] , identifier[self] . identifier[all_failed] ):
identifier[self] . identifier[fail_unreachable] ( identifier[msg_id] )
keyword[elif] identifier[job] . identifier[after] . identifier[check] ( identifier[self] . identifier[all_completed] , identifier[self] . identifier[all_failed] ):
keyword[if] identifier[self] . identifier[maybe_run] ( identifier[job] ):
identifier[self] . identifier[depending] . identifier[pop] ( identifier[msg_id] )
keyword[for] identifier[mid] keyword[in] identifier[job] . identifier[dependents] :
keyword[if] identifier[mid] keyword[in] identifier[self] . identifier[graph] :
identifier[self] . identifier[graph] [ identifier[mid] ]. identifier[remove] ( identifier[msg_id] )
|
def update_graph(self, dep_id=None, success=True):
"""dep_id just finished. Update our dependency
graph and submit any jobs that just became runable.
Called with dep_id=None to update entire graph for hwm, but without finishing
a task.
"""
# print ("\n\n***********")
# pprint (dep_id)
# pprint (self.graph)
# pprint (self.depending)
# pprint (self.all_completed)
# pprint (self.all_failed)
# print ("\n\n***********\n\n")
# update any jobs that depended on the dependency
jobs = self.graph.pop(dep_id, [])
# recheck *all* jobs if
# a) we have HWM and an engine just become no longer full
# or b) dep_id was given as None
if dep_id is None or (self.hwm and any([load == self.hwm - 1 for load in self.loads])):
jobs = self.depending.keys() # depends on [control=['if'], data=[]]
for msg_id in sorted(jobs, key=lambda msg_id: self.depending[msg_id].timestamp):
job = self.depending[msg_id]
if job.after.unreachable(self.all_completed, self.all_failed) or job.follow.unreachable(self.all_completed, self.all_failed):
self.fail_unreachable(msg_id) # depends on [control=['if'], data=[]]
elif job.after.check(self.all_completed, self.all_failed): # time deps met, maybe run
if self.maybe_run(job):
self.depending.pop(msg_id)
for mid in job.dependents:
if mid in self.graph:
self.graph[mid].remove(msg_id) # depends on [control=['if'], data=['mid']] # depends on [control=['for'], data=['mid']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['msg_id']]
|
def getComponent(self, innerFlag=False):
"""Return currently assigned component of the |ASN.1| object.
Returns
-------
: :py:class:`~pyasn1.type.base.PyAsn1Item`
a PyASN1 object
"""
if self._currentIdx is None:
raise error.PyAsn1Error('Component not chosen')
else:
c = self._componentValues[self._currentIdx]
if innerFlag and isinstance(c, Choice):
return c.getComponent(innerFlag)
else:
return c
|
def function[getComponent, parameter[self, innerFlag]]:
constant[Return currently assigned component of the |ASN.1| object.
Returns
-------
: :py:class:`~pyasn1.type.base.PyAsn1Item`
a PyASN1 object
]
if compare[name[self]._currentIdx is constant[None]] begin[:]
<ast.Raise object at 0x7da1b07cbc40>
|
keyword[def] identifier[getComponent] ( identifier[self] , identifier[innerFlag] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[_currentIdx] keyword[is] keyword[None] :
keyword[raise] identifier[error] . identifier[PyAsn1Error] ( literal[string] )
keyword[else] :
identifier[c] = identifier[self] . identifier[_componentValues] [ identifier[self] . identifier[_currentIdx] ]
keyword[if] identifier[innerFlag] keyword[and] identifier[isinstance] ( identifier[c] , identifier[Choice] ):
keyword[return] identifier[c] . identifier[getComponent] ( identifier[innerFlag] )
keyword[else] :
keyword[return] identifier[c]
|
def getComponent(self, innerFlag=False):
"""Return currently assigned component of the |ASN.1| object.
Returns
-------
: :py:class:`~pyasn1.type.base.PyAsn1Item`
a PyASN1 object
"""
if self._currentIdx is None:
raise error.PyAsn1Error('Component not chosen') # depends on [control=['if'], data=[]]
else:
c = self._componentValues[self._currentIdx]
if innerFlag and isinstance(c, Choice):
return c.getComponent(innerFlag) # depends on [control=['if'], data=[]]
else:
return c
|
def walk_up(bottom):
""" mimic os.walk, but walk 'up' instead of down the directory tree
:param bottom:
:return:
"""
import os
from os import path
bottom = path.realpath(bottom)
# get files in current dir
try:
names = os.listdir(bottom)
except Exception as e:
raise e
dirs, nondirs = [], []
for name in names:
if path.isdir(path.join(bottom, name)):
dirs.append(name)
else:
nondirs.append(name)
yield bottom, dirs, nondirs
new_path = path.realpath(path.join(bottom, '..'))
# see if we are at the top
if new_path == bottom:
return
for x in walk_up(new_path):
yield x
|
def function[walk_up, parameter[bottom]]:
constant[ mimic os.walk, but walk 'up' instead of down the directory tree
:param bottom:
:return:
]
import module[os]
from relative_module[os] import module[path]
variable[bottom] assign[=] call[name[path].realpath, parameter[name[bottom]]]
<ast.Try object at 0x7da1b19afbb0>
<ast.Tuple object at 0x7da1b19ae170> assign[=] tuple[[<ast.List object at 0x7da1b19ae200>, <ast.List object at 0x7da1b19ae230>]]
for taget[name[name]] in starred[name[names]] begin[:]
if call[name[path].isdir, parameter[call[name[path].join, parameter[name[bottom], name[name]]]]] begin[:]
call[name[dirs].append, parameter[name[name]]]
<ast.Yield object at 0x7da1b19adba0>
variable[new_path] assign[=] call[name[path].realpath, parameter[call[name[path].join, parameter[name[bottom], constant[..]]]]]
if compare[name[new_path] equal[==] name[bottom]] begin[:]
return[None]
for taget[name[x]] in starred[call[name[walk_up], parameter[name[new_path]]]] begin[:]
<ast.Yield object at 0x7da1b19ad480>
|
keyword[def] identifier[walk_up] ( identifier[bottom] ):
literal[string]
keyword[import] identifier[os]
keyword[from] identifier[os] keyword[import] identifier[path]
identifier[bottom] = identifier[path] . identifier[realpath] ( identifier[bottom] )
keyword[try] :
identifier[names] = identifier[os] . identifier[listdir] ( identifier[bottom] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[raise] identifier[e]
identifier[dirs] , identifier[nondirs] =[],[]
keyword[for] identifier[name] keyword[in] identifier[names] :
keyword[if] identifier[path] . identifier[isdir] ( identifier[path] . identifier[join] ( identifier[bottom] , identifier[name] )):
identifier[dirs] . identifier[append] ( identifier[name] )
keyword[else] :
identifier[nondirs] . identifier[append] ( identifier[name] )
keyword[yield] identifier[bottom] , identifier[dirs] , identifier[nondirs]
identifier[new_path] = identifier[path] . identifier[realpath] ( identifier[path] . identifier[join] ( identifier[bottom] , literal[string] ))
keyword[if] identifier[new_path] == identifier[bottom] :
keyword[return]
keyword[for] identifier[x] keyword[in] identifier[walk_up] ( identifier[new_path] ):
keyword[yield] identifier[x]
|
def walk_up(bottom):
""" mimic os.walk, but walk 'up' instead of down the directory tree
:param bottom:
:return:
"""
import os
from os import path
bottom = path.realpath(bottom)
# get files in current dir
try:
names = os.listdir(bottom) # depends on [control=['try'], data=[]]
except Exception as e:
raise e # depends on [control=['except'], data=['e']]
(dirs, nondirs) = ([], [])
for name in names:
if path.isdir(path.join(bottom, name)):
dirs.append(name) # depends on [control=['if'], data=[]]
else:
nondirs.append(name) # depends on [control=['for'], data=['name']]
yield (bottom, dirs, nondirs)
new_path = path.realpath(path.join(bottom, '..'))
# see if we are at the top
if new_path == bottom:
return # depends on [control=['if'], data=[]]
for x in walk_up(new_path):
yield x # depends on [control=['for'], data=['x']]
|
def write_gexf(docgraph, output_file):
"""
takes a document graph, converts it into GEXF format and writes it to
a file.
"""
dg_copy = deepcopy(docgraph)
remove_root_metadata(dg_copy)
layerset2str(dg_copy)
attriblist2str(dg_copy)
nx_write_gexf(dg_copy, output_file)
|
def function[write_gexf, parameter[docgraph, output_file]]:
constant[
takes a document graph, converts it into GEXF format and writes it to
a file.
]
variable[dg_copy] assign[=] call[name[deepcopy], parameter[name[docgraph]]]
call[name[remove_root_metadata], parameter[name[dg_copy]]]
call[name[layerset2str], parameter[name[dg_copy]]]
call[name[attriblist2str], parameter[name[dg_copy]]]
call[name[nx_write_gexf], parameter[name[dg_copy], name[output_file]]]
|
keyword[def] identifier[write_gexf] ( identifier[docgraph] , identifier[output_file] ):
literal[string]
identifier[dg_copy] = identifier[deepcopy] ( identifier[docgraph] )
identifier[remove_root_metadata] ( identifier[dg_copy] )
identifier[layerset2str] ( identifier[dg_copy] )
identifier[attriblist2str] ( identifier[dg_copy] )
identifier[nx_write_gexf] ( identifier[dg_copy] , identifier[output_file] )
|
def write_gexf(docgraph, output_file):
"""
takes a document graph, converts it into GEXF format and writes it to
a file.
"""
dg_copy = deepcopy(docgraph)
remove_root_metadata(dg_copy)
layerset2str(dg_copy)
attriblist2str(dg_copy)
nx_write_gexf(dg_copy, output_file)
|
def get_authorization_url(self):
"""Get the authorization Url for the current client."""
return self._format_url(
OAUTH2_ROOT + 'authorize',
query = {
'response_type': 'code',
'client_id': self.client.get('client_id', ''),
'redirect_uri': self.client.get('redirect_uri', '')
})
|
def function[get_authorization_url, parameter[self]]:
constant[Get the authorization Url for the current client.]
return[call[name[self]._format_url, parameter[binary_operation[name[OAUTH2_ROOT] + constant[authorize]]]]]
|
keyword[def] identifier[get_authorization_url] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[_format_url] (
identifier[OAUTH2_ROOT] + literal[string] ,
identifier[query] ={
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[client] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[self] . identifier[client] . identifier[get] ( literal[string] , literal[string] )
})
|
def get_authorization_url(self):
"""Get the authorization Url for the current client."""
return self._format_url(OAUTH2_ROOT + 'authorize', query={'response_type': 'code', 'client_id': self.client.get('client_id', ''), 'redirect_uri': self.client.get('redirect_uri', '')})
|
def get_next_step(self):
"""Find the proper step when user clicks the Next button.
:returns: The step to be switched to
:rtype: WizardStep instance or None
"""
layer_purpose = self.parent.step_kw_purpose.selected_purpose()
if layer_purpose['key'] != layer_purpose_aggregation['key']:
subcategory = self.parent.step_kw_subcategory. \
selected_subcategory()
else:
subcategory = {'key': None}
# We don't use field mapping for populated place exposure. Use the
# population point instead.
# inasafe_fields = self.get_inasafe_fields()
# If population field is set, must go to field mapping step first.
# if population_count_field['key'] in inasafe_fields.keys():
# return self.parent.step_kw_fields_mapping
# Check if it can go to inasafe default field step
default_inasafe_fields = get_fields(
layer_purpose['key'],
subcategory['key'],
replace_null=True,
in_group=False)
if default_inasafe_fields:
return self.parent.step_kw_default_inasafe_fields
# Any other case
return self.parent.step_kw_source
|
def function[get_next_step, parameter[self]]:
constant[Find the proper step when user clicks the Next button.
:returns: The step to be switched to
:rtype: WizardStep instance or None
]
variable[layer_purpose] assign[=] call[name[self].parent.step_kw_purpose.selected_purpose, parameter[]]
if compare[call[name[layer_purpose]][constant[key]] not_equal[!=] call[name[layer_purpose_aggregation]][constant[key]]] begin[:]
variable[subcategory] assign[=] call[name[self].parent.step_kw_subcategory.selected_subcategory, parameter[]]
variable[default_inasafe_fields] assign[=] call[name[get_fields], parameter[call[name[layer_purpose]][constant[key]], call[name[subcategory]][constant[key]]]]
if name[default_inasafe_fields] begin[:]
return[name[self].parent.step_kw_default_inasafe_fields]
return[name[self].parent.step_kw_source]
|
keyword[def] identifier[get_next_step] ( identifier[self] ):
literal[string]
identifier[layer_purpose] = identifier[self] . identifier[parent] . identifier[step_kw_purpose] . identifier[selected_purpose] ()
keyword[if] identifier[layer_purpose] [ literal[string] ]!= identifier[layer_purpose_aggregation] [ literal[string] ]:
identifier[subcategory] = identifier[self] . identifier[parent] . identifier[step_kw_subcategory] . identifier[selected_subcategory] ()
keyword[else] :
identifier[subcategory] ={ literal[string] : keyword[None] }
identifier[default_inasafe_fields] = identifier[get_fields] (
identifier[layer_purpose] [ literal[string] ],
identifier[subcategory] [ literal[string] ],
identifier[replace_null] = keyword[True] ,
identifier[in_group] = keyword[False] )
keyword[if] identifier[default_inasafe_fields] :
keyword[return] identifier[self] . identifier[parent] . identifier[step_kw_default_inasafe_fields]
keyword[return] identifier[self] . identifier[parent] . identifier[step_kw_source]
|
def get_next_step(self):
"""Find the proper step when user clicks the Next button.
:returns: The step to be switched to
:rtype: WizardStep instance or None
"""
layer_purpose = self.parent.step_kw_purpose.selected_purpose()
if layer_purpose['key'] != layer_purpose_aggregation['key']:
subcategory = self.parent.step_kw_subcategory.selected_subcategory() # depends on [control=['if'], data=[]]
else:
subcategory = {'key': None}
# We don't use field mapping for populated place exposure. Use the
# population point instead.
# inasafe_fields = self.get_inasafe_fields()
# If population field is set, must go to field mapping step first.
# if population_count_field['key'] in inasafe_fields.keys():
# return self.parent.step_kw_fields_mapping
# Check if it can go to inasafe default field step
default_inasafe_fields = get_fields(layer_purpose['key'], subcategory['key'], replace_null=True, in_group=False)
if default_inasafe_fields:
return self.parent.step_kw_default_inasafe_fields # depends on [control=['if'], data=[]]
# Any other case
return self.parent.step_kw_source
|
def regenerate(self):
"""Regenerate the session id.
This function creates a new session id and stores all information
associated with the current id in that new id. It then destroys the
old session id. This is useful for preventing session fixation attacks
and should be done whenever someone uses a login to obtain additional
authorizaiton.
"""
oldhash = self.session_hash
self.new_session_id()
try:
self.rdb.rename(oldhash,self.session_hash)
self.rdb.expire(self.session_hash,self.ttl)
except:
pass
|
def function[regenerate, parameter[self]]:
constant[Regenerate the session id.
This function creates a new session id and stores all information
associated with the current id in that new id. It then destroys the
old session id. This is useful for preventing session fixation attacks
and should be done whenever someone uses a login to obtain additional
authorizaiton.
]
variable[oldhash] assign[=] name[self].session_hash
call[name[self].new_session_id, parameter[]]
<ast.Try object at 0x7da18ede7820>
|
keyword[def] identifier[regenerate] ( identifier[self] ):
literal[string]
identifier[oldhash] = identifier[self] . identifier[session_hash]
identifier[self] . identifier[new_session_id] ()
keyword[try] :
identifier[self] . identifier[rdb] . identifier[rename] ( identifier[oldhash] , identifier[self] . identifier[session_hash] )
identifier[self] . identifier[rdb] . identifier[expire] ( identifier[self] . identifier[session_hash] , identifier[self] . identifier[ttl] )
keyword[except] :
keyword[pass]
|
def regenerate(self):
"""Regenerate the session id.
This function creates a new session id and stores all information
associated with the current id in that new id. It then destroys the
old session id. This is useful for preventing session fixation attacks
and should be done whenever someone uses a login to obtain additional
authorizaiton.
"""
oldhash = self.session_hash
self.new_session_id()
try:
self.rdb.rename(oldhash, self.session_hash)
self.rdb.expire(self.session_hash, self.ttl) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
|
def request(self):
""" Returns an OAuth2 Session to be used to make requests.
Returns None if a token hasn't yet been received."""
headers = {'Accept': 'application/json'}
# Use API Key if possible
if self.api_key:
headers['X-API-KEY'] = self.api_key
return requests,headers
else:
# Try to use OAuth
if self.token:
return OAuth2Session(self.client_id, token=self.token),headers
else:
raise APIError("No API key and no OAuth session available")
|
def function[request, parameter[self]]:
constant[ Returns an OAuth2 Session to be used to make requests.
Returns None if a token hasn't yet been received.]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da204565570>], [<ast.Constant object at 0x7da204564e20>]]
if name[self].api_key begin[:]
call[name[headers]][constant[X-API-KEY]] assign[=] name[self].api_key
return[tuple[[<ast.Name object at 0x7da2046232b0>, <ast.Name object at 0x7da204623040>]]]
|
keyword[def] identifier[request] ( identifier[self] ):
literal[string]
identifier[headers] ={ literal[string] : literal[string] }
keyword[if] identifier[self] . identifier[api_key] :
identifier[headers] [ literal[string] ]= identifier[self] . identifier[api_key]
keyword[return] identifier[requests] , identifier[headers]
keyword[else] :
keyword[if] identifier[self] . identifier[token] :
keyword[return] identifier[OAuth2Session] ( identifier[self] . identifier[client_id] , identifier[token] = identifier[self] . identifier[token] ), identifier[headers]
keyword[else] :
keyword[raise] identifier[APIError] ( literal[string] )
|
def request(self):
""" Returns an OAuth2 Session to be used to make requests.
Returns None if a token hasn't yet been received."""
headers = {'Accept': 'application/json'}
# Use API Key if possible
if self.api_key:
headers['X-API-KEY'] = self.api_key
return (requests, headers) # depends on [control=['if'], data=[]]
# Try to use OAuth
elif self.token:
return (OAuth2Session(self.client_id, token=self.token), headers) # depends on [control=['if'], data=[]]
else:
raise APIError('No API key and no OAuth session available')
|
def returnModPositions(peptide, indexStart=1, removeModString='UNIMOD:'):
"""Determines the amino acid positions of all present modifications.
:param peptide: peptide sequence, modifications have to be written in the
format "[modificationName]"
:param indexStart: returned amino acids positions of the peptide start with
this number (first amino acid position = indexStart)
:param removeModString: string to remove from the returned modification name
:return: {modificationName:[position1, position2, ...], ...}
#TODO: adapt removeModString to the new unimod ids in
#maspy.constants.aaModComp ("UNIMOD:X" -> "u:X") -> also change unit tests.
"""
unidmodPositionDict = dict()
while peptide.find('[') != -1:
currModification = peptide.split('[')[1].split(']')[0]
currPosition = peptide.find('[') - 1
if currPosition == -1: # move n-terminal modifications to first position
currPosition = 0
currPosition += indexStart
peptide = peptide.replace('['+currModification+']', '', 1)
if removeModString:
currModification = currModification.replace(removeModString, '')
unidmodPositionDict.setdefault(currModification,list())
unidmodPositionDict[currModification].append(currPosition)
return unidmodPositionDict
|
def function[returnModPositions, parameter[peptide, indexStart, removeModString]]:
constant[Determines the amino acid positions of all present modifications.
:param peptide: peptide sequence, modifications have to be written in the
format "[modificationName]"
:param indexStart: returned amino acids positions of the peptide start with
this number (first amino acid position = indexStart)
:param removeModString: string to remove from the returned modification name
:return: {modificationName:[position1, position2, ...], ...}
#TODO: adapt removeModString to the new unimod ids in
#maspy.constants.aaModComp ("UNIMOD:X" -> "u:X") -> also change unit tests.
]
variable[unidmodPositionDict] assign[=] call[name[dict], parameter[]]
while compare[call[name[peptide].find, parameter[constant[[]]] not_equal[!=] <ast.UnaryOp object at 0x7da1b28ed720>] begin[:]
variable[currModification] assign[=] call[call[call[call[name[peptide].split, parameter[constant[[]]]][constant[1]].split, parameter[constant[]]]]][constant[0]]
variable[currPosition] assign[=] binary_operation[call[name[peptide].find, parameter[constant[[]]] - constant[1]]
if compare[name[currPosition] equal[==] <ast.UnaryOp object at 0x7da1b28ae9b0>] begin[:]
variable[currPosition] assign[=] constant[0]
<ast.AugAssign object at 0x7da1b28accd0>
variable[peptide] assign[=] call[name[peptide].replace, parameter[binary_operation[binary_operation[constant[[] + name[currModification]] + constant[]]], constant[], constant[1]]]
if name[removeModString] begin[:]
variable[currModification] assign[=] call[name[currModification].replace, parameter[name[removeModString], constant[]]]
call[name[unidmodPositionDict].setdefault, parameter[name[currModification], call[name[list], parameter[]]]]
call[call[name[unidmodPositionDict]][name[currModification]].append, parameter[name[currPosition]]]
return[name[unidmodPositionDict]]
|
keyword[def] identifier[returnModPositions] ( identifier[peptide] , identifier[indexStart] = literal[int] , identifier[removeModString] = literal[string] ):
literal[string]
identifier[unidmodPositionDict] = identifier[dict] ()
keyword[while] identifier[peptide] . identifier[find] ( literal[string] )!=- literal[int] :
identifier[currModification] = identifier[peptide] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]
identifier[currPosition] = identifier[peptide] . identifier[find] ( literal[string] )- literal[int]
keyword[if] identifier[currPosition] ==- literal[int] :
identifier[currPosition] = literal[int]
identifier[currPosition] += identifier[indexStart]
identifier[peptide] = identifier[peptide] . identifier[replace] ( literal[string] + identifier[currModification] + literal[string] , literal[string] , literal[int] )
keyword[if] identifier[removeModString] :
identifier[currModification] = identifier[currModification] . identifier[replace] ( identifier[removeModString] , literal[string] )
identifier[unidmodPositionDict] . identifier[setdefault] ( identifier[currModification] , identifier[list] ())
identifier[unidmodPositionDict] [ identifier[currModification] ]. identifier[append] ( identifier[currPosition] )
keyword[return] identifier[unidmodPositionDict]
|
def returnModPositions(peptide, indexStart=1, removeModString='UNIMOD:'):
"""Determines the amino acid positions of all present modifications.
:param peptide: peptide sequence, modifications have to be written in the
format "[modificationName]"
:param indexStart: returned amino acids positions of the peptide start with
this number (first amino acid position = indexStart)
:param removeModString: string to remove from the returned modification name
:return: {modificationName:[position1, position2, ...], ...}
#TODO: adapt removeModString to the new unimod ids in
#maspy.constants.aaModComp ("UNIMOD:X" -> "u:X") -> also change unit tests.
"""
unidmodPositionDict = dict()
while peptide.find('[') != -1:
currModification = peptide.split('[')[1].split(']')[0]
currPosition = peptide.find('[') - 1
if currPosition == -1: # move n-terminal modifications to first position
currPosition = 0 # depends on [control=['if'], data=['currPosition']]
currPosition += indexStart
peptide = peptide.replace('[' + currModification + ']', '', 1)
if removeModString:
currModification = currModification.replace(removeModString, '') # depends on [control=['if'], data=[]]
unidmodPositionDict.setdefault(currModification, list())
unidmodPositionDict[currModification].append(currPosition) # depends on [control=['while'], data=[]]
return unidmodPositionDict
|
def update_kwargs(self, request, **kwargs):
"""
Adds variables to the context that are expected by the
base cms templates.
* **navigation** - The side navigation for this bundle and user.
* **dashboard** - The list of dashboard links for this user.
* **object_header** - If no 'object_header' was passed in the \
current context and the current bundle is set to get it's \
object_header from it's parent, this will get that view and render \
it as a string. Otherwise 'object_header will remain unset.
* **subitem** - This is set to true if we rendered a new object_header \
and the object used to render that string is not present in the \
context args as 'obj'. This effects navigation and wording in the \
templates.
"""
kwargs = super(CMSRender, self).update_kwargs(request, **kwargs)
# Check if we need to to include a separate object
# bundle for the title
bundle = kwargs.get('bundle')
url_kwargs = kwargs.get('url_params')
view = None
if bundle:
view, name = bundle.get_object_header_view(request, url_kwargs, parent_only=True)
kwargs['dashboard'] = bundle.admin_site.get_dashboard_urls(request)
if view:
obj = view.get_object()
if not 'object_header' in kwargs:
kwargs['object_header'] = bundle._render_view_as_string(view, name, request, url_kwargs)
if obj and obj != kwargs.get('obj'):
kwargs['subitem'] = True
return kwargs
|
def function[update_kwargs, parameter[self, request]]:
constant[
Adds variables to the context that are expected by the
base cms templates.
* **navigation** - The side navigation for this bundle and user.
* **dashboard** - The list of dashboard links for this user.
* **object_header** - If no 'object_header' was passed in the current context and the current bundle is set to get it's object_header from it's parent, this will get that view and render it as a string. Otherwise 'object_header will remain unset.
* **subitem** - This is set to true if we rendered a new object_header and the object used to render that string is not present in the context args as 'obj'. This effects navigation and wording in the templates.
]
variable[kwargs] assign[=] call[call[name[super], parameter[name[CMSRender], name[self]]].update_kwargs, parameter[name[request]]]
variable[bundle] assign[=] call[name[kwargs].get, parameter[constant[bundle]]]
variable[url_kwargs] assign[=] call[name[kwargs].get, parameter[constant[url_params]]]
variable[view] assign[=] constant[None]
if name[bundle] begin[:]
<ast.Tuple object at 0x7da1b0a855a0> assign[=] call[name[bundle].get_object_header_view, parameter[name[request], name[url_kwargs]]]
call[name[kwargs]][constant[dashboard]] assign[=] call[name[bundle].admin_site.get_dashboard_urls, parameter[name[request]]]
if name[view] begin[:]
variable[obj] assign[=] call[name[view].get_object, parameter[]]
if <ast.UnaryOp object at 0x7da18f00f5e0> begin[:]
call[name[kwargs]][constant[object_header]] assign[=] call[name[bundle]._render_view_as_string, parameter[name[view], name[name], name[request], name[url_kwargs]]]
if <ast.BoolOp object at 0x7da1b0a867a0> begin[:]
call[name[kwargs]][constant[subitem]] assign[=] constant[True]
return[name[kwargs]]
|
keyword[def] identifier[update_kwargs] ( identifier[self] , identifier[request] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] = identifier[super] ( identifier[CMSRender] , identifier[self] ). identifier[update_kwargs] ( identifier[request] ,** identifier[kwargs] )
identifier[bundle] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[url_kwargs] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[view] = keyword[None]
keyword[if] identifier[bundle] :
identifier[view] , identifier[name] = identifier[bundle] . identifier[get_object_header_view] ( identifier[request] , identifier[url_kwargs] , identifier[parent_only] = keyword[True] )
identifier[kwargs] [ literal[string] ]= identifier[bundle] . identifier[admin_site] . identifier[get_dashboard_urls] ( identifier[request] )
keyword[if] identifier[view] :
identifier[obj] = identifier[view] . identifier[get_object] ()
keyword[if] keyword[not] literal[string] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]= identifier[bundle] . identifier[_render_view_as_string] ( identifier[view] , identifier[name] , identifier[request] , identifier[url_kwargs] )
keyword[if] identifier[obj] keyword[and] identifier[obj] != identifier[kwargs] . identifier[get] ( literal[string] ):
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[return] identifier[kwargs]
|
def update_kwargs(self, request, **kwargs):
"""
Adds variables to the context that are expected by the
base cms templates.
* **navigation** - The side navigation for this bundle and user.
* **dashboard** - The list of dashboard links for this user.
* **object_header** - If no 'object_header' was passed in the current context and the current bundle is set to get it's object_header from it's parent, this will get that view and render it as a string. Otherwise 'object_header will remain unset.
* **subitem** - This is set to true if we rendered a new object_header and the object used to render that string is not present in the context args as 'obj'. This effects navigation and wording in the templates.
"""
kwargs = super(CMSRender, self).update_kwargs(request, **kwargs)
# Check if we need to to include a separate object
# bundle for the title
bundle = kwargs.get('bundle')
url_kwargs = kwargs.get('url_params')
view = None
if bundle:
(view, name) = bundle.get_object_header_view(request, url_kwargs, parent_only=True) # depends on [control=['if'], data=[]]
kwargs['dashboard'] = bundle.admin_site.get_dashboard_urls(request)
if view:
obj = view.get_object()
if not 'object_header' in kwargs:
kwargs['object_header'] = bundle._render_view_as_string(view, name, request, url_kwargs) # depends on [control=['if'], data=[]]
if obj and obj != kwargs.get('obj'):
kwargs['subitem'] = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return kwargs
|
def disconnect_all(self):
"""
Disconnect all nodes
:return:
"""
rhs = 'b:' + self.definition['node_class'].__label__
rel = _rel_helper(lhs='a', rhs=rhs, ident='r', **self.definition)
q = 'MATCH (a) WHERE id(a)={self} MATCH ' + rel + ' DELETE r'
self.source.cypher(q)
|
def function[disconnect_all, parameter[self]]:
constant[
Disconnect all nodes
:return:
]
variable[rhs] assign[=] binary_operation[constant[b:] + call[name[self].definition][constant[node_class]].__label__]
variable[rel] assign[=] call[name[_rel_helper], parameter[]]
variable[q] assign[=] binary_operation[binary_operation[constant[MATCH (a) WHERE id(a)={self} MATCH ] + name[rel]] + constant[ DELETE r]]
call[name[self].source.cypher, parameter[name[q]]]
|
keyword[def] identifier[disconnect_all] ( identifier[self] ):
literal[string]
identifier[rhs] = literal[string] + identifier[self] . identifier[definition] [ literal[string] ]. identifier[__label__]
identifier[rel] = identifier[_rel_helper] ( identifier[lhs] = literal[string] , identifier[rhs] = identifier[rhs] , identifier[ident] = literal[string] ,** identifier[self] . identifier[definition] )
identifier[q] = literal[string] + identifier[rel] + literal[string]
identifier[self] . identifier[source] . identifier[cypher] ( identifier[q] )
|
def disconnect_all(self):
"""
Disconnect all nodes
:return:
"""
rhs = 'b:' + self.definition['node_class'].__label__
rel = _rel_helper(lhs='a', rhs=rhs, ident='r', **self.definition)
q = 'MATCH (a) WHERE id(a)={self} MATCH ' + rel + ' DELETE r'
self.source.cypher(q)
|
def readfmt(self, fmt):
"""Read a specified object, using a struct format string."""
size = struct.calcsize(fmt)
blob = self.read(size)
obj, = struct.unpack(fmt, blob)
return obj
|
def function[readfmt, parameter[self, fmt]]:
constant[Read a specified object, using a struct format string.]
variable[size] assign[=] call[name[struct].calcsize, parameter[name[fmt]]]
variable[blob] assign[=] call[name[self].read, parameter[name[size]]]
<ast.Tuple object at 0x7da1b1240ca0> assign[=] call[name[struct].unpack, parameter[name[fmt], name[blob]]]
return[name[obj]]
|
keyword[def] identifier[readfmt] ( identifier[self] , identifier[fmt] ):
literal[string]
identifier[size] = identifier[struct] . identifier[calcsize] ( identifier[fmt] )
identifier[blob] = identifier[self] . identifier[read] ( identifier[size] )
identifier[obj] ,= identifier[struct] . identifier[unpack] ( identifier[fmt] , identifier[blob] )
keyword[return] identifier[obj]
|
def readfmt(self, fmt):
"""Read a specified object, using a struct format string."""
size = struct.calcsize(fmt)
blob = self.read(size)
(obj,) = struct.unpack(fmt, blob)
return obj
|
def _read_or_calc_samples(
sampler,
modelidx=0,
n_samples=100,
last_step=False,
e_range=None,
e_npoints=100,
threads=None,
):
"""Get samples from blob or compute them from chain and sampler.modelfn
"""
if not e_range:
# return the results saved in blobs
modelx, model = _process_blob(sampler, modelidx, last_step=last_step)
else:
# prepare bogus data for calculation
e_range = validate_array(
"e_range", u.Quantity(e_range), physical_type="energy"
)
e_unit = e_range.unit
energy = (
np.logspace(
np.log10(e_range[0].value),
np.log10(e_range[1].value),
e_npoints,
)
* e_unit
)
data = {
"energy": energy,
"flux": np.zeros(energy.shape) * sampler.data["flux"].unit,
}
# init pool and select parameters
chain = sampler.chain[-1] if last_step else sampler.flatchain
pars = chain[np.random.randint(len(chain), size=n_samples)]
blobs = []
p = Pool(threads)
modelouts = p.map(partial(sampler.modelfn, data=data), pars)
p.close()
p.terminate()
for modelout in modelouts:
if isinstance(modelout, np.ndarray):
blobs.append([modelout])
else:
blobs.append(modelout)
modelx, model = _process_blob(
blobs, modelidx=modelidx, energy=data["energy"]
)
return modelx, model
|
def function[_read_or_calc_samples, parameter[sampler, modelidx, n_samples, last_step, e_range, e_npoints, threads]]:
constant[Get samples from blob or compute them from chain and sampler.modelfn
]
if <ast.UnaryOp object at 0x7da1b0c52050> begin[:]
<ast.Tuple object at 0x7da1b0c521a0> assign[=] call[name[_process_blob], parameter[name[sampler], name[modelidx]]]
return[tuple[[<ast.Name object at 0x7da1b0e25ed0>, <ast.Name object at 0x7da1b0e24670>]]]
|
keyword[def] identifier[_read_or_calc_samples] (
identifier[sampler] ,
identifier[modelidx] = literal[int] ,
identifier[n_samples] = literal[int] ,
identifier[last_step] = keyword[False] ,
identifier[e_range] = keyword[None] ,
identifier[e_npoints] = literal[int] ,
identifier[threads] = keyword[None] ,
):
literal[string]
keyword[if] keyword[not] identifier[e_range] :
identifier[modelx] , identifier[model] = identifier[_process_blob] ( identifier[sampler] , identifier[modelidx] , identifier[last_step] = identifier[last_step] )
keyword[else] :
identifier[e_range] = identifier[validate_array] (
literal[string] , identifier[u] . identifier[Quantity] ( identifier[e_range] ), identifier[physical_type] = literal[string]
)
identifier[e_unit] = identifier[e_range] . identifier[unit]
identifier[energy] =(
identifier[np] . identifier[logspace] (
identifier[np] . identifier[log10] ( identifier[e_range] [ literal[int] ]. identifier[value] ),
identifier[np] . identifier[log10] ( identifier[e_range] [ literal[int] ]. identifier[value] ),
identifier[e_npoints] ,
)
* identifier[e_unit]
)
identifier[data] ={
literal[string] : identifier[energy] ,
literal[string] : identifier[np] . identifier[zeros] ( identifier[energy] . identifier[shape] )* identifier[sampler] . identifier[data] [ literal[string] ]. identifier[unit] ,
}
identifier[chain] = identifier[sampler] . identifier[chain] [- literal[int] ] keyword[if] identifier[last_step] keyword[else] identifier[sampler] . identifier[flatchain]
identifier[pars] = identifier[chain] [ identifier[np] . identifier[random] . identifier[randint] ( identifier[len] ( identifier[chain] ), identifier[size] = identifier[n_samples] )]
identifier[blobs] =[]
identifier[p] = identifier[Pool] ( identifier[threads] )
identifier[modelouts] = identifier[p] . identifier[map] ( identifier[partial] ( identifier[sampler] . identifier[modelfn] , identifier[data] = identifier[data] ), identifier[pars] )
identifier[p] . identifier[close] ()
identifier[p] . identifier[terminate] ()
keyword[for] identifier[modelout] keyword[in] identifier[modelouts] :
keyword[if] identifier[isinstance] ( identifier[modelout] , identifier[np] . identifier[ndarray] ):
identifier[blobs] . identifier[append] ([ identifier[modelout] ])
keyword[else] :
identifier[blobs] . identifier[append] ( identifier[modelout] )
identifier[modelx] , identifier[model] = identifier[_process_blob] (
identifier[blobs] , identifier[modelidx] = identifier[modelidx] , identifier[energy] = identifier[data] [ literal[string] ]
)
keyword[return] identifier[modelx] , identifier[model]
|
def _read_or_calc_samples(sampler, modelidx=0, n_samples=100, last_step=False, e_range=None, e_npoints=100, threads=None):
"""Get samples from blob or compute them from chain and sampler.modelfn
"""
if not e_range:
# return the results saved in blobs
(modelx, model) = _process_blob(sampler, modelidx, last_step=last_step) # depends on [control=['if'], data=[]]
else:
# prepare bogus data for calculation
e_range = validate_array('e_range', u.Quantity(e_range), physical_type='energy')
e_unit = e_range.unit
energy = np.logspace(np.log10(e_range[0].value), np.log10(e_range[1].value), e_npoints) * e_unit
data = {'energy': energy, 'flux': np.zeros(energy.shape) * sampler.data['flux'].unit}
# init pool and select parameters
chain = sampler.chain[-1] if last_step else sampler.flatchain
pars = chain[np.random.randint(len(chain), size=n_samples)]
blobs = []
p = Pool(threads)
modelouts = p.map(partial(sampler.modelfn, data=data), pars)
p.close()
p.terminate()
for modelout in modelouts:
if isinstance(modelout, np.ndarray):
blobs.append([modelout]) # depends on [control=['if'], data=[]]
else:
blobs.append(modelout) # depends on [control=['for'], data=['modelout']]
(modelx, model) = _process_blob(blobs, modelidx=modelidx, energy=data['energy'])
return (modelx, model)
|
def do_lsfolders(self, subcmd, opts):
"""${cmd_name}: list the sub folders of the maildir.
${cmd_usage}
"""
client = MdClient(self.maildir, filesystem=self.filesystem)
client.lsfolders(stream=self.stdout)
|
def function[do_lsfolders, parameter[self, subcmd, opts]]:
constant[${cmd_name}: list the sub folders of the maildir.
${cmd_usage}
]
variable[client] assign[=] call[name[MdClient], parameter[name[self].maildir]]
call[name[client].lsfolders, parameter[]]
|
keyword[def] identifier[do_lsfolders] ( identifier[self] , identifier[subcmd] , identifier[opts] ):
literal[string]
identifier[client] = identifier[MdClient] ( identifier[self] . identifier[maildir] , identifier[filesystem] = identifier[self] . identifier[filesystem] )
identifier[client] . identifier[lsfolders] ( identifier[stream] = identifier[self] . identifier[stdout] )
|
def do_lsfolders(self, subcmd, opts):
"""${cmd_name}: list the sub folders of the maildir.
${cmd_usage}
"""
client = MdClient(self.maildir, filesystem=self.filesystem)
client.lsfolders(stream=self.stdout)
|
def list_views(app, appbuilder):
"""
List all registered views
"""
_appbuilder = import_application(app, appbuilder)
echo_header("List of registered views")
for view in _appbuilder.baseviews:
click.echo(
"View:{0} | Route:{1} | Perms:{2}".format(
view.__class__.__name__, view.route_base, view.base_permissions
)
)
|
def function[list_views, parameter[app, appbuilder]]:
constant[
List all registered views
]
variable[_appbuilder] assign[=] call[name[import_application], parameter[name[app], name[appbuilder]]]
call[name[echo_header], parameter[constant[List of registered views]]]
for taget[name[view]] in starred[name[_appbuilder].baseviews] begin[:]
call[name[click].echo, parameter[call[constant[View:{0} | Route:{1} | Perms:{2}].format, parameter[name[view].__class__.__name__, name[view].route_base, name[view].base_permissions]]]]
|
keyword[def] identifier[list_views] ( identifier[app] , identifier[appbuilder] ):
literal[string]
identifier[_appbuilder] = identifier[import_application] ( identifier[app] , identifier[appbuilder] )
identifier[echo_header] ( literal[string] )
keyword[for] identifier[view] keyword[in] identifier[_appbuilder] . identifier[baseviews] :
identifier[click] . identifier[echo] (
literal[string] . identifier[format] (
identifier[view] . identifier[__class__] . identifier[__name__] , identifier[view] . identifier[route_base] , identifier[view] . identifier[base_permissions]
)
)
|
def list_views(app, appbuilder):
"""
List all registered views
"""
_appbuilder = import_application(app, appbuilder)
echo_header('List of registered views')
for view in _appbuilder.baseviews:
click.echo('View:{0} | Route:{1} | Perms:{2}'.format(view.__class__.__name__, view.route_base, view.base_permissions)) # depends on [control=['for'], data=['view']]
|
def _send(self, message):
"""A helper method that does the actual sending."""
charset='UTF-8'
params = {
'action' : 'sendsms',
'user' : self.get_username(),
'password' : self.get_password(),
'from' : message.from_phone,
'to' : ",".join(message.to),
'text' : message.body,
'clientcharset' : charset,
'detectcharset' : 1,
'maxsplit': int(math.ceil(len(message.body) / 160))
}
req = urllib2.Request(SMSGLOBAL_API_URL_SENDSMS, urllib.urlencode(params))
result_page = urllib2.urlopen(req).read()
results = self._parse_response(result_page)
if results is None:
if not self.fail_silently:
raise Exception("Error determining response: [" + result_page + "]")
return False
code, sendqmsgid, msgid = results
if code != '0':
if not self.fail_silently:
raise Exception("Error sending sms: [%s], extracted results(code, sendqmsgid, msgid): [%s]" % (result_page, results))
return False
else:
logger.info('SENT to: %s; sender: %s; code: %s; sendqmsgid: %s; msgid: %s; message: %s' % (
message.to,
message.from_phone,
code,
sendqmsgid,
msgid,
message.body
))
return True
|
def function[_send, parameter[self, message]]:
constant[A helper method that does the actual sending.]
variable[charset] assign[=] constant[UTF-8]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e7dc0>, <ast.Constant object at 0x7da20c6e6890>, <ast.Constant object at 0x7da20c6e5cc0>, <ast.Constant object at 0x7da20c6e77f0>, <ast.Constant object at 0x7da20c6e54e0>, <ast.Constant object at 0x7da20c6e6860>, <ast.Constant object at 0x7da20c6e7580>, <ast.Constant object at 0x7da20c6e64a0>, <ast.Constant object at 0x7da20c6e5870>], [<ast.Constant object at 0x7da20c6e7e50>, <ast.Call object at 0x7da20c6e6d70>, <ast.Call object at 0x7da20c6e4340>, <ast.Attribute object at 0x7da20c6e65c0>, <ast.Call object at 0x7da20c6e4730>, <ast.Attribute object at 0x7da20c6e7730>, <ast.Name object at 0x7da20c6e7d30>, <ast.Constant object at 0x7da20c6e6bf0>, <ast.Call object at 0x7da20c6e74f0>]]
variable[req] assign[=] call[name[urllib2].Request, parameter[name[SMSGLOBAL_API_URL_SENDSMS], call[name[urllib].urlencode, parameter[name[params]]]]]
variable[result_page] assign[=] call[call[name[urllib2].urlopen, parameter[name[req]]].read, parameter[]]
variable[results] assign[=] call[name[self]._parse_response, parameter[name[result_page]]]
if compare[name[results] is constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da18fe90640> begin[:]
<ast.Raise object at 0x7da18fe93550>
return[constant[False]]
<ast.Tuple object at 0x7da18fe92350> assign[=] name[results]
if compare[name[code] not_equal[!=] constant[0]] begin[:]
if <ast.UnaryOp object at 0x7da20c6c5c30> begin[:]
<ast.Raise object at 0x7da20c6c7880>
return[constant[False]]
|
keyword[def] identifier[_send] ( identifier[self] , identifier[message] ):
literal[string]
identifier[charset] = literal[string]
identifier[params] ={
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[get_username] (),
literal[string] : identifier[self] . identifier[get_password] (),
literal[string] : identifier[message] . identifier[from_phone] ,
literal[string] : literal[string] . identifier[join] ( identifier[message] . identifier[to] ),
literal[string] : identifier[message] . identifier[body] ,
literal[string] : identifier[charset] ,
literal[string] : literal[int] ,
literal[string] : identifier[int] ( identifier[math] . identifier[ceil] ( identifier[len] ( identifier[message] . identifier[body] )/ literal[int] ))
}
identifier[req] = identifier[urllib2] . identifier[Request] ( identifier[SMSGLOBAL_API_URL_SENDSMS] , identifier[urllib] . identifier[urlencode] ( identifier[params] ))
identifier[result_page] = identifier[urllib2] . identifier[urlopen] ( identifier[req] ). identifier[read] ()
identifier[results] = identifier[self] . identifier[_parse_response] ( identifier[result_page] )
keyword[if] identifier[results] keyword[is] keyword[None] :
keyword[if] keyword[not] identifier[self] . identifier[fail_silently] :
keyword[raise] identifier[Exception] ( literal[string] + identifier[result_page] + literal[string] )
keyword[return] keyword[False]
identifier[code] , identifier[sendqmsgid] , identifier[msgid] = identifier[results]
keyword[if] identifier[code] != literal[string] :
keyword[if] keyword[not] identifier[self] . identifier[fail_silently] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[result_page] , identifier[results] ))
keyword[return] keyword[False]
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] %(
identifier[message] . identifier[to] ,
identifier[message] . identifier[from_phone] ,
identifier[code] ,
identifier[sendqmsgid] ,
identifier[msgid] ,
identifier[message] . identifier[body]
))
keyword[return] keyword[True]
|
def _send(self, message):
"""A helper method that does the actual sending."""
charset = 'UTF-8'
params = {'action': 'sendsms', 'user': self.get_username(), 'password': self.get_password(), 'from': message.from_phone, 'to': ','.join(message.to), 'text': message.body, 'clientcharset': charset, 'detectcharset': 1, 'maxsplit': int(math.ceil(len(message.body) / 160))}
req = urllib2.Request(SMSGLOBAL_API_URL_SENDSMS, urllib.urlencode(params))
result_page = urllib2.urlopen(req).read()
results = self._parse_response(result_page)
if results is None:
if not self.fail_silently:
raise Exception('Error determining response: [' + result_page + ']') # depends on [control=['if'], data=[]]
return False # depends on [control=['if'], data=[]]
(code, sendqmsgid, msgid) = results
if code != '0':
if not self.fail_silently:
raise Exception('Error sending sms: [%s], extracted results(code, sendqmsgid, msgid): [%s]' % (result_page, results)) # depends on [control=['if'], data=[]]
return False # depends on [control=['if'], data=[]]
else:
logger.info('SENT to: %s; sender: %s; code: %s; sendqmsgid: %s; msgid: %s; message: %s' % (message.to, message.from_phone, code, sendqmsgid, msgid, message.body))
return True
|
def colorspace(im, bw=False, replace_alpha=False, **kwargs):
"""
Convert images to the correct color space.
A passive option (i.e. always processed) of this method is that all images
(unless grayscale) are converted to RGB colorspace.
This processor should be listed before :func:`scale_and_crop` so palette is
changed before the image is resized.
bw
Make the thumbnail grayscale (not really just black & white).
replace_alpha
Replace any transparency layer with a solid color. For example,
``replace_alpha='#fff'`` would replace the transparency layer with
white.
"""
if im.mode == 'I':
# PIL (and pillow) have can't convert 16 bit grayscale images to lower
# modes, so manually convert them to an 8 bit grayscale.
im = im.point(list(_points_table()), 'L')
is_transparent = utils.is_transparent(im)
is_grayscale = im.mode in ('L', 'LA')
new_mode = im.mode
if is_grayscale or bw:
new_mode = 'L'
else:
new_mode = 'RGB'
if is_transparent:
if replace_alpha:
if im.mode != 'RGBA':
im = im.convert('RGBA')
base = Image.new('RGBA', im.size, replace_alpha)
base.paste(im, mask=im)
im = base
else:
new_mode = new_mode + 'A'
if im.mode != new_mode:
im = im.convert(new_mode)
return im
|
def function[colorspace, parameter[im, bw, replace_alpha]]:
constant[
Convert images to the correct color space.
A passive option (i.e. always processed) of this method is that all images
(unless grayscale) are converted to RGB colorspace.
This processor should be listed before :func:`scale_and_crop` so palette is
changed before the image is resized.
bw
Make the thumbnail grayscale (not really just black & white).
replace_alpha
Replace any transparency layer with a solid color. For example,
``replace_alpha='#fff'`` would replace the transparency layer with
white.
]
if compare[name[im].mode equal[==] constant[I]] begin[:]
variable[im] assign[=] call[name[im].point, parameter[call[name[list], parameter[call[name[_points_table], parameter[]]]], constant[L]]]
variable[is_transparent] assign[=] call[name[utils].is_transparent, parameter[name[im]]]
variable[is_grayscale] assign[=] compare[name[im].mode in tuple[[<ast.Constant object at 0x7da18f8116f0>, <ast.Constant object at 0x7da18f8132e0>]]]
variable[new_mode] assign[=] name[im].mode
if <ast.BoolOp object at 0x7da18f8117b0> begin[:]
variable[new_mode] assign[=] constant[L]
if name[is_transparent] begin[:]
if name[replace_alpha] begin[:]
if compare[name[im].mode not_equal[!=] constant[RGBA]] begin[:]
variable[im] assign[=] call[name[im].convert, parameter[constant[RGBA]]]
variable[base] assign[=] call[name[Image].new, parameter[constant[RGBA], name[im].size, name[replace_alpha]]]
call[name[base].paste, parameter[name[im]]]
variable[im] assign[=] name[base]
if compare[name[im].mode not_equal[!=] name[new_mode]] begin[:]
variable[im] assign[=] call[name[im].convert, parameter[name[new_mode]]]
return[name[im]]
|
keyword[def] identifier[colorspace] ( identifier[im] , identifier[bw] = keyword[False] , identifier[replace_alpha] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[im] . identifier[mode] == literal[string] :
identifier[im] = identifier[im] . identifier[point] ( identifier[list] ( identifier[_points_table] ()), literal[string] )
identifier[is_transparent] = identifier[utils] . identifier[is_transparent] ( identifier[im] )
identifier[is_grayscale] = identifier[im] . identifier[mode] keyword[in] ( literal[string] , literal[string] )
identifier[new_mode] = identifier[im] . identifier[mode]
keyword[if] identifier[is_grayscale] keyword[or] identifier[bw] :
identifier[new_mode] = literal[string]
keyword[else] :
identifier[new_mode] = literal[string]
keyword[if] identifier[is_transparent] :
keyword[if] identifier[replace_alpha] :
keyword[if] identifier[im] . identifier[mode] != literal[string] :
identifier[im] = identifier[im] . identifier[convert] ( literal[string] )
identifier[base] = identifier[Image] . identifier[new] ( literal[string] , identifier[im] . identifier[size] , identifier[replace_alpha] )
identifier[base] . identifier[paste] ( identifier[im] , identifier[mask] = identifier[im] )
identifier[im] = identifier[base]
keyword[else] :
identifier[new_mode] = identifier[new_mode] + literal[string]
keyword[if] identifier[im] . identifier[mode] != identifier[new_mode] :
identifier[im] = identifier[im] . identifier[convert] ( identifier[new_mode] )
keyword[return] identifier[im]
|
def colorspace(im, bw=False, replace_alpha=False, **kwargs):
"""
Convert images to the correct color space.
A passive option (i.e. always processed) of this method is that all images
(unless grayscale) are converted to RGB colorspace.
This processor should be listed before :func:`scale_and_crop` so palette is
changed before the image is resized.
bw
Make the thumbnail grayscale (not really just black & white).
replace_alpha
Replace any transparency layer with a solid color. For example,
``replace_alpha='#fff'`` would replace the transparency layer with
white.
"""
if im.mode == 'I':
# PIL (and pillow) have can't convert 16 bit grayscale images to lower
# modes, so manually convert them to an 8 bit grayscale.
im = im.point(list(_points_table()), 'L') # depends on [control=['if'], data=[]]
is_transparent = utils.is_transparent(im)
is_grayscale = im.mode in ('L', 'LA')
new_mode = im.mode
if is_grayscale or bw:
new_mode = 'L' # depends on [control=['if'], data=[]]
else:
new_mode = 'RGB'
if is_transparent:
if replace_alpha:
if im.mode != 'RGBA':
im = im.convert('RGBA') # depends on [control=['if'], data=[]]
base = Image.new('RGBA', im.size, replace_alpha)
base.paste(im, mask=im)
im = base # depends on [control=['if'], data=[]]
else:
new_mode = new_mode + 'A' # depends on [control=['if'], data=[]]
if im.mode != new_mode:
im = im.convert(new_mode) # depends on [control=['if'], data=['new_mode']]
return im
|
def configurations(self):
"""
Property for accessing :class:`ConfigurationManager` instance, which is used to manage configurations.
:rtype: yagocd.resources.configuration.ConfigurationManager
"""
if self._configuration_manager is None:
self._configuration_manager = ConfigurationManager(session=self._session)
return self._configuration_manager
|
def function[configurations, parameter[self]]:
constant[
Property for accessing :class:`ConfigurationManager` instance, which is used to manage configurations.
:rtype: yagocd.resources.configuration.ConfigurationManager
]
if compare[name[self]._configuration_manager is constant[None]] begin[:]
name[self]._configuration_manager assign[=] call[name[ConfigurationManager], parameter[]]
return[name[self]._configuration_manager]
|
keyword[def] identifier[configurations] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_configuration_manager] keyword[is] keyword[None] :
identifier[self] . identifier[_configuration_manager] = identifier[ConfigurationManager] ( identifier[session] = identifier[self] . identifier[_session] )
keyword[return] identifier[self] . identifier[_configuration_manager]
|
def configurations(self):
"""
Property for accessing :class:`ConfigurationManager` instance, which is used to manage configurations.
:rtype: yagocd.resources.configuration.ConfigurationManager
"""
if self._configuration_manager is None:
self._configuration_manager = ConfigurationManager(session=self._session) # depends on [control=['if'], data=[]]
return self._configuration_manager
|
def disable(self):
"""
Disable the button, if in non-expert mode.
"""
w.ActButton.disable(self)
g = get_root(self).globals
if self._expert:
self.config(bg=g.COL['start'])
else:
self.config(bg=g.COL['startD'])
|
def function[disable, parameter[self]]:
constant[
Disable the button, if in non-expert mode.
]
call[name[w].ActButton.disable, parameter[name[self]]]
variable[g] assign[=] call[name[get_root], parameter[name[self]]].globals
if name[self]._expert begin[:]
call[name[self].config, parameter[]]
|
keyword[def] identifier[disable] ( identifier[self] ):
literal[string]
identifier[w] . identifier[ActButton] . identifier[disable] ( identifier[self] )
identifier[g] = identifier[get_root] ( identifier[self] ). identifier[globals]
keyword[if] identifier[self] . identifier[_expert] :
identifier[self] . identifier[config] ( identifier[bg] = identifier[g] . identifier[COL] [ literal[string] ])
keyword[else] :
identifier[self] . identifier[config] ( identifier[bg] = identifier[g] . identifier[COL] [ literal[string] ])
|
def disable(self):
"""
Disable the button, if in non-expert mode.
"""
w.ActButton.disable(self)
g = get_root(self).globals
if self._expert:
self.config(bg=g.COL['start']) # depends on [control=['if'], data=[]]
else:
self.config(bg=g.COL['startD'])
|
def convert_param(self, method, param, value):
"""Converts the parameter using the function 'convert' function of the
validation rules. Same parameters as the `validate_param` method, so
it might have just been added there. But lumping together the two
functionalities would make overwriting harder.
:param method: A function to get the validation information from (done
using :func:`_get_validation`).
:type method: Python function
:param param: Name of the parameter to validate the value for.
:type param: str
:param value: Value passed in for the given parameter.
:type value: Any valid Python value
:raises: :class:`wsgiservice.exceptions.ValidationException` if the
value is invalid for the given method and parameter.
"""
rules = self._get_validation(method, param)
if not rules or not rules.get('convert'):
return value
try:
return rules['convert'](value)
except ValueError:
raise ValidationException(
"{0} value {1} does not validate.".format(param, value))
|
def function[convert_param, parameter[self, method, param, value]]:
constant[Converts the parameter using the function 'convert' function of the
validation rules. Same parameters as the `validate_param` method, so
it might have just been added there. But lumping together the two
functionalities would make overwriting harder.
:param method: A function to get the validation information from (done
using :func:`_get_validation`).
:type method: Python function
:param param: Name of the parameter to validate the value for.
:type param: str
:param value: Value passed in for the given parameter.
:type value: Any valid Python value
:raises: :class:`wsgiservice.exceptions.ValidationException` if the
value is invalid for the given method and parameter.
]
variable[rules] assign[=] call[name[self]._get_validation, parameter[name[method], name[param]]]
if <ast.BoolOp object at 0x7da18c4cd330> begin[:]
return[name[value]]
<ast.Try object at 0x7da20c795240>
|
keyword[def] identifier[convert_param] ( identifier[self] , identifier[method] , identifier[param] , identifier[value] ):
literal[string]
identifier[rules] = identifier[self] . identifier[_get_validation] ( identifier[method] , identifier[param] )
keyword[if] keyword[not] identifier[rules] keyword[or] keyword[not] identifier[rules] . identifier[get] ( literal[string] ):
keyword[return] identifier[value]
keyword[try] :
keyword[return] identifier[rules] [ literal[string] ]( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValidationException] (
literal[string] . identifier[format] ( identifier[param] , identifier[value] ))
|
def convert_param(self, method, param, value):
"""Converts the parameter using the function 'convert' function of the
validation rules. Same parameters as the `validate_param` method, so
it might have just been added there. But lumping together the two
functionalities would make overwriting harder.
:param method: A function to get the validation information from (done
using :func:`_get_validation`).
:type method: Python function
:param param: Name of the parameter to validate the value for.
:type param: str
:param value: Value passed in for the given parameter.
:type value: Any valid Python value
:raises: :class:`wsgiservice.exceptions.ValidationException` if the
value is invalid for the given method and parameter.
"""
rules = self._get_validation(method, param)
if not rules or not rules.get('convert'):
return value # depends on [control=['if'], data=[]]
try:
return rules['convert'](value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValidationException('{0} value {1} does not validate.'.format(param, value)) # depends on [control=['except'], data=[]]
|
def validate(opts):
"""
Client-facing validate method. Checks to see if the passed in opts
argument is either a list or a namespace containing the attribute
'extensions' and runs validations on it accordingly. If opts is neither
of those things, this will raise a ValueError
:param opts: either a list of strings or a namespace with the attribute
'extensions'
:raises ValueError: if the value passed in is not a list or a namespace
with the attribute 'extensions'
:raises ValidationException: if the extensions fail validations
:return: True if extensions pass the validations
"""
if hasattr(opts, 'extensions'):
return _validate(opts.extensions)
elif isinstance(opts, list):
return _validate(opts)
else:
raise ValueError("Value passed into extension validation must either "
"be a list of strings or a namespace with an "
"attribute of 'extensions'")
|
def function[validate, parameter[opts]]:
constant[
Client-facing validate method. Checks to see if the passed in opts
argument is either a list or a namespace containing the attribute
'extensions' and runs validations on it accordingly. If opts is neither
of those things, this will raise a ValueError
:param opts: either a list of strings or a namespace with the attribute
'extensions'
:raises ValueError: if the value passed in is not a list or a namespace
with the attribute 'extensions'
:raises ValidationException: if the extensions fail validations
:return: True if extensions pass the validations
]
if call[name[hasattr], parameter[name[opts], constant[extensions]]] begin[:]
return[call[name[_validate], parameter[name[opts].extensions]]]
|
keyword[def] identifier[validate] ( identifier[opts] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[opts] , literal[string] ):
keyword[return] identifier[_validate] ( identifier[opts] . identifier[extensions] )
keyword[elif] identifier[isinstance] ( identifier[opts] , identifier[list] ):
keyword[return] identifier[_validate] ( identifier[opts] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string] )
|
def validate(opts):
"""
Client-facing validate method. Checks to see if the passed in opts
argument is either a list or a namespace containing the attribute
'extensions' and runs validations on it accordingly. If opts is neither
of those things, this will raise a ValueError
:param opts: either a list of strings or a namespace with the attribute
'extensions'
:raises ValueError: if the value passed in is not a list or a namespace
with the attribute 'extensions'
:raises ValidationException: if the extensions fail validations
:return: True if extensions pass the validations
"""
if hasattr(opts, 'extensions'):
return _validate(opts.extensions) # depends on [control=['if'], data=[]]
elif isinstance(opts, list):
return _validate(opts) # depends on [control=['if'], data=[]]
else:
raise ValueError("Value passed into extension validation must either be a list of strings or a namespace with an attribute of 'extensions'")
|
def close_connection(self, connection: str) -> None:
""" Close the connection"""
if connection not in self.connections:
raise ConnectionNotOpen(connection)
self.connections.pop(connection).close()
|
def function[close_connection, parameter[self, connection]]:
constant[ Close the connection]
if compare[name[connection] <ast.NotIn object at 0x7da2590d7190> name[self].connections] begin[:]
<ast.Raise object at 0x7da1b1cc3b50>
call[call[name[self].connections.pop, parameter[name[connection]]].close, parameter[]]
|
keyword[def] identifier[close_connection] ( identifier[self] , identifier[connection] : identifier[str] )-> keyword[None] :
literal[string]
keyword[if] identifier[connection] keyword[not] keyword[in] identifier[self] . identifier[connections] :
keyword[raise] identifier[ConnectionNotOpen] ( identifier[connection] )
identifier[self] . identifier[connections] . identifier[pop] ( identifier[connection] ). identifier[close] ()
|
def close_connection(self, connection: str) -> None:
""" Close the connection"""
if connection not in self.connections:
raise ConnectionNotOpen(connection) # depends on [control=['if'], data=['connection']]
self.connections.pop(connection).close()
|
def _has_expired(self):
""" Has this HIT expired yet? """
expired = False
if hasattr(self, 'Expiration'):
now = datetime.datetime.utcnow()
expiration = datetime.datetime.strptime(self.Expiration, '%Y-%m-%dT%H:%M:%SZ')
expired = (now >= expiration)
else:
raise ValueError("ERROR: Request for expired property, but no Expiration in HIT!")
return expired
|
def function[_has_expired, parameter[self]]:
constant[ Has this HIT expired yet? ]
variable[expired] assign[=] constant[False]
if call[name[hasattr], parameter[name[self], constant[Expiration]]] begin[:]
variable[now] assign[=] call[name[datetime].datetime.utcnow, parameter[]]
variable[expiration] assign[=] call[name[datetime].datetime.strptime, parameter[name[self].Expiration, constant[%Y-%m-%dT%H:%M:%SZ]]]
variable[expired] assign[=] compare[name[now] greater_or_equal[>=] name[expiration]]
return[name[expired]]
|
keyword[def] identifier[_has_expired] ( identifier[self] ):
literal[string]
identifier[expired] = keyword[False]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[now] = identifier[datetime] . identifier[datetime] . identifier[utcnow] ()
identifier[expiration] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[self] . identifier[Expiration] , literal[string] )
identifier[expired] =( identifier[now] >= identifier[expiration] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[expired]
|
def _has_expired(self):
""" Has this HIT expired yet? """
expired = False
if hasattr(self, 'Expiration'):
now = datetime.datetime.utcnow()
expiration = datetime.datetime.strptime(self.Expiration, '%Y-%m-%dT%H:%M:%SZ')
expired = now >= expiration # depends on [control=['if'], data=[]]
else:
raise ValueError('ERROR: Request for expired property, but no Expiration in HIT!')
return expired
|
def update_scoped_package(self, package_version_details, feed_id, package_scope, unscoped_package_name, package_version):
"""UpdateScopedPackage.
[Preview API]
:param :class:`<PackageVersionDetails> <azure.devops.v5_0.npm.models.PackageVersionDetails>` package_version_details:
:param str feed_id:
:param str package_scope:
:param str unscoped_package_name:
:param str package_version:
:rtype: :class:`<Package> <azure.devops.v5_0.npm.models.Package>`
"""
route_values = {}
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_scope is not None:
route_values['packageScope'] = self._serialize.url('package_scope', package_scope, 'str')
if unscoped_package_name is not None:
route_values['unscopedPackageName'] = self._serialize.url('unscoped_package_name', unscoped_package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
content = self._serialize.body(package_version_details, 'PackageVersionDetails')
response = self._send(http_method='PATCH',
location_id='e93d9ec3-4022-401e-96b0-83ea5d911e09',
version='5.0-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Package', response)
|
def function[update_scoped_package, parameter[self, package_version_details, feed_id, package_scope, unscoped_package_name, package_version]]:
constant[UpdateScopedPackage.
[Preview API]
:param :class:`<PackageVersionDetails> <azure.devops.v5_0.npm.models.PackageVersionDetails>` package_version_details:
:param str feed_id:
:param str package_scope:
:param str unscoped_package_name:
:param str package_version:
:rtype: :class:`<Package> <azure.devops.v5_0.npm.models.Package>`
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[feed_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[feedId]] assign[=] call[name[self]._serialize.url, parameter[constant[feed_id], name[feed_id], constant[str]]]
if compare[name[package_scope] is_not constant[None]] begin[:]
call[name[route_values]][constant[packageScope]] assign[=] call[name[self]._serialize.url, parameter[constant[package_scope], name[package_scope], constant[str]]]
if compare[name[unscoped_package_name] is_not constant[None]] begin[:]
call[name[route_values]][constant[unscopedPackageName]] assign[=] call[name[self]._serialize.url, parameter[constant[unscoped_package_name], name[unscoped_package_name], constant[str]]]
if compare[name[package_version] is_not constant[None]] begin[:]
call[name[route_values]][constant[packageVersion]] assign[=] call[name[self]._serialize.url, parameter[constant[package_version], name[package_version], constant[str]]]
variable[content] assign[=] call[name[self]._serialize.body, parameter[name[package_version_details], constant[PackageVersionDetails]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[Package], name[response]]]]
|
keyword[def] identifier[update_scoped_package] ( identifier[self] , identifier[package_version_details] , identifier[feed_id] , identifier[package_scope] , identifier[unscoped_package_name] , identifier[package_version] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[feed_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[feed_id] , literal[string] )
keyword[if] identifier[package_scope] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[package_scope] , literal[string] )
keyword[if] identifier[unscoped_package_name] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[unscoped_package_name] , literal[string] )
keyword[if] identifier[package_version] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[package_version] , literal[string] )
identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[package_version_details] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[content] = identifier[content] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
|
def update_scoped_package(self, package_version_details, feed_id, package_scope, unscoped_package_name, package_version):
"""UpdateScopedPackage.
[Preview API]
:param :class:`<PackageVersionDetails> <azure.devops.v5_0.npm.models.PackageVersionDetails>` package_version_details:
:param str feed_id:
:param str package_scope:
:param str unscoped_package_name:
:param str package_version:
:rtype: :class:`<Package> <azure.devops.v5_0.npm.models.Package>`
"""
route_values = {}
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str') # depends on [control=['if'], data=['feed_id']]
if package_scope is not None:
route_values['packageScope'] = self._serialize.url('package_scope', package_scope, 'str') # depends on [control=['if'], data=['package_scope']]
if unscoped_package_name is not None:
route_values['unscopedPackageName'] = self._serialize.url('unscoped_package_name', unscoped_package_name, 'str') # depends on [control=['if'], data=['unscoped_package_name']]
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str') # depends on [control=['if'], data=['package_version']]
content = self._serialize.body(package_version_details, 'PackageVersionDetails')
response = self._send(http_method='PATCH', location_id='e93d9ec3-4022-401e-96b0-83ea5d911e09', version='5.0-preview.1', route_values=route_values, content=content)
return self._deserialize('Package', response)
|
def get_appliances(self, location_id):
"""Get the appliances added for a specified location.
Args:
location_id (string): identifiying string of appliance
Returns:
list: dictionary objects containing appliances data
"""
url = "https://api.neur.io/v1/appliances"
headers = self.__gen_headers()
headers["Content-Type"] = "application/json"
params = {
"locationId": location_id,
}
url = self.__append_url_params(url, params)
r = requests.get(url, headers=headers)
return r.json()
|
def function[get_appliances, parameter[self, location_id]]:
constant[Get the appliances added for a specified location.
Args:
location_id (string): identifiying string of appliance
Returns:
list: dictionary objects containing appliances data
]
variable[url] assign[=] constant[https://api.neur.io/v1/appliances]
variable[headers] assign[=] call[name[self].__gen_headers, parameter[]]
call[name[headers]][constant[Content-Type]] assign[=] constant[application/json]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20e955cc0>], [<ast.Name object at 0x7da20e9563e0>]]
variable[url] assign[=] call[name[self].__append_url_params, parameter[name[url], name[params]]]
variable[r] assign[=] call[name[requests].get, parameter[name[url]]]
return[call[name[r].json, parameter[]]]
|
keyword[def] identifier[get_appliances] ( identifier[self] , identifier[location_id] ):
literal[string]
identifier[url] = literal[string]
identifier[headers] = identifier[self] . identifier[__gen_headers] ()
identifier[headers] [ literal[string] ]= literal[string]
identifier[params] ={
literal[string] : identifier[location_id] ,
}
identifier[url] = identifier[self] . identifier[__append_url_params] ( identifier[url] , identifier[params] )
identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[headers] )
keyword[return] identifier[r] . identifier[json] ()
|
def get_appliances(self, location_id):
"""Get the appliances added for a specified location.
Args:
location_id (string): identifiying string of appliance
Returns:
list: dictionary objects containing appliances data
"""
url = 'https://api.neur.io/v1/appliances'
headers = self.__gen_headers()
headers['Content-Type'] = 'application/json'
params = {'locationId': location_id}
url = self.__append_url_params(url, params)
r = requests.get(url, headers=headers)
return r.json()
|
def run(**kwargs):
'''
Run a single module function or a range of module functions in a batch.
Supersedes ``module.run`` function, which requires ``m_`` prefix to
function-specific parameters.
:param returner:
Specify a common returner for the whole batch to send the return data
:param kwargs:
Pass any arguments needed to execute the function(s)
.. code-block:: yaml
some_id_of_state:
module.run:
- network.ip_addrs:
- interface: eth0
- cloud.create:
- names:
- test-isbm-1
- test-isbm-2
- ssh_username: sles
- image: sles12sp2
- securitygroup: default
- size: 'c3.large'
- location: ap-northeast-1
- delvol_on_destroy: True
:return:
'''
if 'name' in kwargs:
kwargs.pop('name')
ret = {
'name': list(kwargs),
'changes': {},
'comment': '',
'result': None,
}
functions = [func for func in kwargs if '.' in func]
missing = []
tests = []
for func in functions:
func = func.split(':')[0]
if func not in __salt__:
missing.append(func)
elif __opts__['test']:
tests.append(func)
if tests or missing:
ret['comment'] = ' '.join([
missing and "Unavailable function{plr}: "
"{func}.".format(plr=(len(missing) > 1 or ''),
func=(', '.join(missing) or '')) or '',
tests and "Function{plr} {func} to be "
"executed.".format(plr=(len(tests) > 1 or ''),
func=(', '.join(tests)) or '') or '',
]).strip()
ret['result'] = not (missing or not tests)
if ret['result'] is None:
ret['result'] = True
failures = []
success = []
for func in functions:
_func = func.split(':')[0]
try:
func_ret = _call_function(_func, returner=kwargs.get('returner'),
func_args=kwargs.get(func))
if not _get_result(func_ret, ret['changes'].get('ret', {})):
if isinstance(func_ret, dict):
failures.append("'{0}' failed: {1}".format(
func, func_ret.get('comment', '(error message N/A)')))
else:
success.append('{0}: {1}'.format(
func, func_ret.get('comment', 'Success') if isinstance(func_ret, dict) else func_ret))
ret['changes'][func] = func_ret
except (SaltInvocationError, TypeError) as ex:
failures.append("'{0}' failed: {1}".format(func, ex))
ret['comment'] = ', '.join(failures + success)
ret['result'] = not bool(failures)
return ret
|
def function[run, parameter[]]:
constant[
Run a single module function or a range of module functions in a batch.
Supersedes ``module.run`` function, which requires ``m_`` prefix to
function-specific parameters.
:param returner:
Specify a common returner for the whole batch to send the return data
:param kwargs:
Pass any arguments needed to execute the function(s)
.. code-block:: yaml
some_id_of_state:
module.run:
- network.ip_addrs:
- interface: eth0
- cloud.create:
- names:
- test-isbm-1
- test-isbm-2
- ssh_username: sles
- image: sles12sp2
- securitygroup: default
- size: 'c3.large'
- location: ap-northeast-1
- delvol_on_destroy: True
:return:
]
if compare[constant[name] in name[kwargs]] begin[:]
call[name[kwargs].pop, parameter[constant[name]]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2185f30>, <ast.Constant object at 0x7da1b2185210>, <ast.Constant object at 0x7da1b21869e0>, <ast.Constant object at 0x7da1b2186830>], [<ast.Call object at 0x7da1b2184be0>, <ast.Dict object at 0x7da1b2184610>, <ast.Constant object at 0x7da1b2184760>, <ast.Constant object at 0x7da1b2184a30>]]
variable[functions] assign[=] <ast.ListComp object at 0x7da1b2186d10>
variable[missing] assign[=] list[[]]
variable[tests] assign[=] list[[]]
for taget[name[func]] in starred[name[functions]] begin[:]
variable[func] assign[=] call[call[name[func].split, parameter[constant[:]]]][constant[0]]
if compare[name[func] <ast.NotIn object at 0x7da2590d7190> name[__salt__]] begin[:]
call[name[missing].append, parameter[name[func]]]
if <ast.BoolOp object at 0x7da1b2184790> begin[:]
call[name[ret]][constant[comment]] assign[=] call[call[constant[ ].join, parameter[list[[<ast.BoolOp object at 0x7da1b2187a00>, <ast.BoolOp object at 0x7da1b21875e0>]]]].strip, parameter[]]
call[name[ret]][constant[result]] assign[=] <ast.UnaryOp object at 0x7da1b2185f60>
if compare[call[name[ret]][constant[result]] is constant[None]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
variable[failures] assign[=] list[[]]
variable[success] assign[=] list[[]]
for taget[name[func]] in starred[name[functions]] begin[:]
variable[_func] assign[=] call[call[name[func].split, parameter[constant[:]]]][constant[0]]
<ast.Try object at 0x7da1b2185d80>
call[name[ret]][constant[comment]] assign[=] call[constant[, ].join, parameter[binary_operation[name[failures] + name[success]]]]
call[name[ret]][constant[result]] assign[=] <ast.UnaryOp object at 0x7da1b21a5510>
return[name[ret]]
|
keyword[def] identifier[run] (** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[ret] ={
literal[string] : identifier[list] ( identifier[kwargs] ),
literal[string] :{},
literal[string] : literal[string] ,
literal[string] : keyword[None] ,
}
identifier[functions] =[ identifier[func] keyword[for] identifier[func] keyword[in] identifier[kwargs] keyword[if] literal[string] keyword[in] identifier[func] ]
identifier[missing] =[]
identifier[tests] =[]
keyword[for] identifier[func] keyword[in] identifier[functions] :
identifier[func] = identifier[func] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[func] keyword[not] keyword[in] identifier[__salt__] :
identifier[missing] . identifier[append] ( identifier[func] )
keyword[elif] identifier[__opts__] [ literal[string] ]:
identifier[tests] . identifier[append] ( identifier[func] )
keyword[if] identifier[tests] keyword[or] identifier[missing] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ([
identifier[missing] keyword[and] literal[string]
literal[string] . identifier[format] ( identifier[plr] =( identifier[len] ( identifier[missing] )> literal[int] keyword[or] literal[string] ),
identifier[func] =( literal[string] . identifier[join] ( identifier[missing] ) keyword[or] literal[string] )) keyword[or] literal[string] ,
identifier[tests] keyword[and] literal[string]
literal[string] . identifier[format] ( identifier[plr] =( identifier[len] ( identifier[tests] )> literal[int] keyword[or] literal[string] ),
identifier[func] =( literal[string] . identifier[join] ( identifier[tests] )) keyword[or] literal[string] ) keyword[or] literal[string] ,
]). identifier[strip] ()
identifier[ret] [ literal[string] ]= keyword[not] ( identifier[missing] keyword[or] keyword[not] identifier[tests] )
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[None] :
identifier[ret] [ literal[string] ]= keyword[True]
identifier[failures] =[]
identifier[success] =[]
keyword[for] identifier[func] keyword[in] identifier[functions] :
identifier[_func] = identifier[func] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[try] :
identifier[func_ret] = identifier[_call_function] ( identifier[_func] , identifier[returner] = identifier[kwargs] . identifier[get] ( literal[string] ),
identifier[func_args] = identifier[kwargs] . identifier[get] ( identifier[func] ))
keyword[if] keyword[not] identifier[_get_result] ( identifier[func_ret] , identifier[ret] [ literal[string] ]. identifier[get] ( literal[string] ,{})):
keyword[if] identifier[isinstance] ( identifier[func_ret] , identifier[dict] ):
identifier[failures] . identifier[append] ( literal[string] . identifier[format] (
identifier[func] , identifier[func_ret] . identifier[get] ( literal[string] , literal[string] )))
keyword[else] :
identifier[success] . identifier[append] ( literal[string] . identifier[format] (
identifier[func] , identifier[func_ret] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[isinstance] ( identifier[func_ret] , identifier[dict] ) keyword[else] identifier[func_ret] ))
identifier[ret] [ literal[string] ][ identifier[func] ]= identifier[func_ret]
keyword[except] ( identifier[SaltInvocationError] , identifier[TypeError] ) keyword[as] identifier[ex] :
identifier[failures] . identifier[append] ( literal[string] . identifier[format] ( identifier[func] , identifier[ex] ))
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ( identifier[failures] + identifier[success] )
identifier[ret] [ literal[string] ]= keyword[not] identifier[bool] ( identifier[failures] )
keyword[return] identifier[ret]
|
def run(**kwargs):
"""
Run a single module function or a range of module functions in a batch.
Supersedes ``module.run`` function, which requires ``m_`` prefix to
function-specific parameters.
:param returner:
Specify a common returner for the whole batch to send the return data
:param kwargs:
Pass any arguments needed to execute the function(s)
.. code-block:: yaml
some_id_of_state:
module.run:
- network.ip_addrs:
- interface: eth0
- cloud.create:
- names:
- test-isbm-1
- test-isbm-2
- ssh_username: sles
- image: sles12sp2
- securitygroup: default
- size: 'c3.large'
- location: ap-northeast-1
- delvol_on_destroy: True
:return:
"""
if 'name' in kwargs:
kwargs.pop('name') # depends on [control=['if'], data=['kwargs']]
ret = {'name': list(kwargs), 'changes': {}, 'comment': '', 'result': None}
functions = [func for func in kwargs if '.' in func]
missing = []
tests = []
for func in functions:
func = func.split(':')[0]
if func not in __salt__:
missing.append(func) # depends on [control=['if'], data=['func']]
elif __opts__['test']:
tests.append(func) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['func']]
if tests or missing:
ret['comment'] = ' '.join([missing and 'Unavailable function{plr}: {func}.'.format(plr=len(missing) > 1 or '', func=', '.join(missing) or '') or '', tests and 'Function{plr} {func} to be executed.'.format(plr=len(tests) > 1 or '', func=', '.join(tests) or '') or '']).strip()
ret['result'] = not (missing or not tests) # depends on [control=['if'], data=[]]
if ret['result'] is None:
ret['result'] = True
failures = []
success = []
for func in functions:
_func = func.split(':')[0]
try:
func_ret = _call_function(_func, returner=kwargs.get('returner'), func_args=kwargs.get(func))
if not _get_result(func_ret, ret['changes'].get('ret', {})):
if isinstance(func_ret, dict):
failures.append("'{0}' failed: {1}".format(func, func_ret.get('comment', '(error message N/A)'))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
success.append('{0}: {1}'.format(func, func_ret.get('comment', 'Success') if isinstance(func_ret, dict) else func_ret))
ret['changes'][func] = func_ret # depends on [control=['try'], data=[]]
except (SaltInvocationError, TypeError) as ex:
failures.append("'{0}' failed: {1}".format(func, ex)) # depends on [control=['except'], data=['ex']] # depends on [control=['for'], data=['func']]
ret['comment'] = ', '.join(failures + success)
ret['result'] = not bool(failures) # depends on [control=['if'], data=[]]
return ret
|
def dsync_files(self, source, target):
'''Sync directory to directory.'''
src_s3_url = S3URL.is_valid(source)
dst_s3_url = S3URL.is_valid(target)
source_list = self.relative_dir_walk(source)
if len(source_list) == 0 or '.' in source_list:
raise Failure('Sync command need to sync directory to directory.')
sync_list = [(os.path.join(source, f), os.path.join(target, f)) for f in source_list]
pool = ThreadPool(ThreadUtil, self.opt)
if src_s3_url and not dst_s3_url:
for src, dest in sync_list:
pool.download(src, dest)
elif not src_s3_url and dst_s3_url:
for src, dest in sync_list:
pool.upload(src, dest)
elif src_s3_url and dst_s3_url:
for src, dest in sync_list:
pool.copy(src, dest)
else:
raise InvalidArgument('Cannot sync two local directories.')
pool.join()
if self.opt.delete_removed:
target_list = self.relative_dir_walk(target)
remove_list = [os.path.join(target, f) for f in (set(target_list) - set(source_list))]
if S3URL.is_valid(target):
pool = ThreadPool(ThreadUtil, self.opt)
pool.batch_delete(remove_list)
pool.join()
else:
for f in remove_list:
try:
os.unlink(f)
message('Delete %s', f)
except:
pass
|
def function[dsync_files, parameter[self, source, target]]:
constant[Sync directory to directory.]
variable[src_s3_url] assign[=] call[name[S3URL].is_valid, parameter[name[source]]]
variable[dst_s3_url] assign[=] call[name[S3URL].is_valid, parameter[name[target]]]
variable[source_list] assign[=] call[name[self].relative_dir_walk, parameter[name[source]]]
if <ast.BoolOp object at 0x7da1b03a47c0> begin[:]
<ast.Raise object at 0x7da1b03a4940>
variable[sync_list] assign[=] <ast.ListComp object at 0x7da1b03a6290>
variable[pool] assign[=] call[name[ThreadPool], parameter[name[ThreadUtil], name[self].opt]]
if <ast.BoolOp object at 0x7da1b03a6110> begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b03a4be0>, <ast.Name object at 0x7da1b03a4af0>]]] in starred[name[sync_list]] begin[:]
call[name[pool].download, parameter[name[src], name[dest]]]
call[name[pool].join, parameter[]]
if name[self].opt.delete_removed begin[:]
variable[target_list] assign[=] call[name[self].relative_dir_walk, parameter[name[target]]]
variable[remove_list] assign[=] <ast.ListComp object at 0x7da1b020ec20>
if call[name[S3URL].is_valid, parameter[name[target]]] begin[:]
variable[pool] assign[=] call[name[ThreadPool], parameter[name[ThreadUtil], name[self].opt]]
call[name[pool].batch_delete, parameter[name[remove_list]]]
call[name[pool].join, parameter[]]
|
keyword[def] identifier[dsync_files] ( identifier[self] , identifier[source] , identifier[target] ):
literal[string]
identifier[src_s3_url] = identifier[S3URL] . identifier[is_valid] ( identifier[source] )
identifier[dst_s3_url] = identifier[S3URL] . identifier[is_valid] ( identifier[target] )
identifier[source_list] = identifier[self] . identifier[relative_dir_walk] ( identifier[source] )
keyword[if] identifier[len] ( identifier[source_list] )== literal[int] keyword[or] literal[string] keyword[in] identifier[source_list] :
keyword[raise] identifier[Failure] ( literal[string] )
identifier[sync_list] =[( identifier[os] . identifier[path] . identifier[join] ( identifier[source] , identifier[f] ), identifier[os] . identifier[path] . identifier[join] ( identifier[target] , identifier[f] )) keyword[for] identifier[f] keyword[in] identifier[source_list] ]
identifier[pool] = identifier[ThreadPool] ( identifier[ThreadUtil] , identifier[self] . identifier[opt] )
keyword[if] identifier[src_s3_url] keyword[and] keyword[not] identifier[dst_s3_url] :
keyword[for] identifier[src] , identifier[dest] keyword[in] identifier[sync_list] :
identifier[pool] . identifier[download] ( identifier[src] , identifier[dest] )
keyword[elif] keyword[not] identifier[src_s3_url] keyword[and] identifier[dst_s3_url] :
keyword[for] identifier[src] , identifier[dest] keyword[in] identifier[sync_list] :
identifier[pool] . identifier[upload] ( identifier[src] , identifier[dest] )
keyword[elif] identifier[src_s3_url] keyword[and] identifier[dst_s3_url] :
keyword[for] identifier[src] , identifier[dest] keyword[in] identifier[sync_list] :
identifier[pool] . identifier[copy] ( identifier[src] , identifier[dest] )
keyword[else] :
keyword[raise] identifier[InvalidArgument] ( literal[string] )
identifier[pool] . identifier[join] ()
keyword[if] identifier[self] . identifier[opt] . identifier[delete_removed] :
identifier[target_list] = identifier[self] . identifier[relative_dir_walk] ( identifier[target] )
identifier[remove_list] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[target] , identifier[f] ) keyword[for] identifier[f] keyword[in] ( identifier[set] ( identifier[target_list] )- identifier[set] ( identifier[source_list] ))]
keyword[if] identifier[S3URL] . identifier[is_valid] ( identifier[target] ):
identifier[pool] = identifier[ThreadPool] ( identifier[ThreadUtil] , identifier[self] . identifier[opt] )
identifier[pool] . identifier[batch_delete] ( identifier[remove_list] )
identifier[pool] . identifier[join] ()
keyword[else] :
keyword[for] identifier[f] keyword[in] identifier[remove_list] :
keyword[try] :
identifier[os] . identifier[unlink] ( identifier[f] )
identifier[message] ( literal[string] , identifier[f] )
keyword[except] :
keyword[pass]
|
def dsync_files(self, source, target):
"""Sync directory to directory."""
src_s3_url = S3URL.is_valid(source)
dst_s3_url = S3URL.is_valid(target)
source_list = self.relative_dir_walk(source)
if len(source_list) == 0 or '.' in source_list:
raise Failure('Sync command need to sync directory to directory.') # depends on [control=['if'], data=[]]
sync_list = [(os.path.join(source, f), os.path.join(target, f)) for f in source_list]
pool = ThreadPool(ThreadUtil, self.opt)
if src_s3_url and (not dst_s3_url):
for (src, dest) in sync_list:
pool.download(src, dest) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif not src_s3_url and dst_s3_url:
for (src, dest) in sync_list:
pool.upload(src, dest) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif src_s3_url and dst_s3_url:
for (src, dest) in sync_list:
pool.copy(src, dest) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
raise InvalidArgument('Cannot sync two local directories.')
pool.join()
if self.opt.delete_removed:
target_list = self.relative_dir_walk(target)
remove_list = [os.path.join(target, f) for f in set(target_list) - set(source_list)]
if S3URL.is_valid(target):
pool = ThreadPool(ThreadUtil, self.opt)
pool.batch_delete(remove_list)
pool.join() # depends on [control=['if'], data=[]]
else:
for f in remove_list:
try:
os.unlink(f)
message('Delete %s', f) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
|
def get_resource(self, resource_name, name):
# pylint: disable=too-many-locals, too-many-nested-blocks
"""Get a specific resource by name"""
try:
logger.info("Trying to get %s: '%s'", resource_name, name)
services_list = False
if resource_name == 'host' and '/' in name:
splitted_name = name.split('/')
services_list = True
name = splitted_name[0]
params = {'where': json.dumps({'name': name})}
if resource_name in ['host', 'service', 'user']:
params = {'where': json.dumps({'name': name, '_is_template': self.model})}
if resource_name == 'service' and '/' in name:
splitted_name = name.split('/')
# new_name = splitted_name[0] + '_' + splitted_name[1]
# name = splitted_name[1]
# Get host from name
response2 = self.backend.get(
'host', params={'where': json.dumps({'name': splitted_name[0]})})
if response2['_items']:
host = response2['_items'][0]
logger.info("Got host '%s' for the service '%s'",
splitted_name[0], splitted_name[1])
else:
logger.warning("Not found host '%s'!", splitted_name[0])
return False
params = {'where': json.dumps({'name': splitted_name[1],
'host': host['_id'],
'_is_template': self.model})}
if self.embedded and resource_name in self.embedded_resources:
params.update({'embedded': json.dumps(self.embedded_resources[resource_name])})
response = self.backend.get(resource_name, params=params)
if response['_items']:
response = response['_items'][0]
logger.info("-> found %s '%s': %s", resource_name, name, response['_id'])
if services_list:
# Get services for the host
params = {'where': json.dumps({'host': response['_id']})}
if self.embedded and 'service' in self.embedded_resources:
params.update(
{'embedded': json.dumps(self.embedded_resources['service'])})
response2 = self.backend.get('service', params=params)
if response2['_items']:
response['_services'] = response2['_items']
logger.info("Got %d services for host '%s'",
len(response2['_items']), splitted_name[0])
else:
logger.warning("Not found host '%s'!", splitted_name[0])
return False
# Exists in the backend, we got the element
if not self.dry_run:
logger.info("-> dumping %s: %s", resource_name, name)
# Filter fields prefixed with an _ (internal backend fields)
for field in list(response):
if field in ['_created', '_updated', '_etag', '_links', '_status']:
response.pop(field)
continue
# Filter fields prefixed with an _ in embedded items
if self.embedded and resource_name in self.embedded_resources and \
field in self.embedded_resources[resource_name]:
logger.info("-> embedded %s", field)
# Embedded items may be a list or a simple dictionary,
# always make it a list
embedded_items = response[field]
if not isinstance(response[field], list):
embedded_items = [response[field]]
# Filter fields in each embedded item
for embedded_item in embedded_items:
if not embedded_item:
continue
for embedded_field in list(embedded_item):
if embedded_field.startswith('_'):
embedded_item.pop(embedded_field)
dump = json.dumps(response, indent=4,
separators=(',', ': '), sort_keys=True)
if not self.quiet:
print(dump)
if resource_name == 'service' and '/' in name:
name = splitted_name[0] + '_' + splitted_name[1]
filename = self.file_dump(response,
'alignak-object-dump-%s-%s.json'
% (resource_name, name))
if filename:
logger.info("-> dumped %s '%s' to %s", resource_name, name, filename)
logger.info("-> dumped %s: %s", resource_name, name)
else:
if resource_name == 'service' and '/' in name:
name = splitted_name[0] + '_' + splitted_name[1]
logger.info("Dry-run mode: should have dumped an %s '%s'",
resource_name, name)
return True
else:
logger.warning("-> %s '%s' not found", resource_name, name)
return False
except BackendException as exp: # pragma: no cover, should never happen
logger.exception("Exception: %s", exp)
logger.error("Response: %s", exp.response)
print("Get error for '%s' : %s" % (resource_name, name))
print("~~~~~~~~~~~~~~~~~~~~~~~~~~")
print("Exiting with error code: 5")
return False
|
def function[get_resource, parameter[self, resource_name, name]]:
constant[Get a specific resource by name]
<ast.Try object at 0x7da20c795870>
|
keyword[def] identifier[get_resource] ( identifier[self] , identifier[resource_name] , identifier[name] ):
literal[string]
keyword[try] :
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] , identifier[name] )
identifier[services_list] = keyword[False]
keyword[if] identifier[resource_name] == literal[string] keyword[and] literal[string] keyword[in] identifier[name] :
identifier[splitted_name] = identifier[name] . identifier[split] ( literal[string] )
identifier[services_list] = keyword[True]
identifier[name] = identifier[splitted_name] [ literal[int] ]
identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[name] })}
keyword[if] identifier[resource_name] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[name] , literal[string] : identifier[self] . identifier[model] })}
keyword[if] identifier[resource_name] == literal[string] keyword[and] literal[string] keyword[in] identifier[name] :
identifier[splitted_name] = identifier[name] . identifier[split] ( literal[string] )
identifier[response2] = identifier[self] . identifier[backend] . identifier[get] (
literal[string] , identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[splitted_name] [ literal[int] ]})})
keyword[if] identifier[response2] [ literal[string] ]:
identifier[host] = identifier[response2] [ literal[string] ][ literal[int] ]
identifier[logger] . identifier[info] ( literal[string] ,
identifier[splitted_name] [ literal[int] ], identifier[splitted_name] [ literal[int] ])
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[splitted_name] [ literal[int] ])
keyword[return] keyword[False]
identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[splitted_name] [ literal[int] ],
literal[string] : identifier[host] [ literal[string] ],
literal[string] : identifier[self] . identifier[model] })}
keyword[if] identifier[self] . identifier[embedded] keyword[and] identifier[resource_name] keyword[in] identifier[self] . identifier[embedded_resources] :
identifier[params] . identifier[update] ({ literal[string] : identifier[json] . identifier[dumps] ( identifier[self] . identifier[embedded_resources] [ identifier[resource_name] ])})
identifier[response] = identifier[self] . identifier[backend] . identifier[get] ( identifier[resource_name] , identifier[params] = identifier[params] )
keyword[if] identifier[response] [ literal[string] ]:
identifier[response] = identifier[response] [ literal[string] ][ literal[int] ]
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] , identifier[name] , identifier[response] [ literal[string] ])
keyword[if] identifier[services_list] :
identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : identifier[response] [ literal[string] ]})}
keyword[if] identifier[self] . identifier[embedded] keyword[and] literal[string] keyword[in] identifier[self] . identifier[embedded_resources] :
identifier[params] . identifier[update] (
{ literal[string] : identifier[json] . identifier[dumps] ( identifier[self] . identifier[embedded_resources] [ literal[string] ])})
identifier[response2] = identifier[self] . identifier[backend] . identifier[get] ( literal[string] , identifier[params] = identifier[params] )
keyword[if] identifier[response2] [ literal[string] ]:
identifier[response] [ literal[string] ]= identifier[response2] [ literal[string] ]
identifier[logger] . identifier[info] ( literal[string] ,
identifier[len] ( identifier[response2] [ literal[string] ]), identifier[splitted_name] [ literal[int] ])
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[splitted_name] [ literal[int] ])
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[self] . identifier[dry_run] :
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] , identifier[name] )
keyword[for] identifier[field] keyword[in] identifier[list] ( identifier[response] ):
keyword[if] identifier[field] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[response] . identifier[pop] ( identifier[field] )
keyword[continue]
keyword[if] identifier[self] . identifier[embedded] keyword[and] identifier[resource_name] keyword[in] identifier[self] . identifier[embedded_resources] keyword[and] identifier[field] keyword[in] identifier[self] . identifier[embedded_resources] [ identifier[resource_name] ]:
identifier[logger] . identifier[info] ( literal[string] , identifier[field] )
identifier[embedded_items] = identifier[response] [ identifier[field] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[response] [ identifier[field] ], identifier[list] ):
identifier[embedded_items] =[ identifier[response] [ identifier[field] ]]
keyword[for] identifier[embedded_item] keyword[in] identifier[embedded_items] :
keyword[if] keyword[not] identifier[embedded_item] :
keyword[continue]
keyword[for] identifier[embedded_field] keyword[in] identifier[list] ( identifier[embedded_item] ):
keyword[if] identifier[embedded_field] . identifier[startswith] ( literal[string] ):
identifier[embedded_item] . identifier[pop] ( identifier[embedded_field] )
identifier[dump] = identifier[json] . identifier[dumps] ( identifier[response] , identifier[indent] = literal[int] ,
identifier[separators] =( literal[string] , literal[string] ), identifier[sort_keys] = keyword[True] )
keyword[if] keyword[not] identifier[self] . identifier[quiet] :
identifier[print] ( identifier[dump] )
keyword[if] identifier[resource_name] == literal[string] keyword[and] literal[string] keyword[in] identifier[name] :
identifier[name] = identifier[splitted_name] [ literal[int] ]+ literal[string] + identifier[splitted_name] [ literal[int] ]
identifier[filename] = identifier[self] . identifier[file_dump] ( identifier[response] ,
literal[string]
%( identifier[resource_name] , identifier[name] ))
keyword[if] identifier[filename] :
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] , identifier[name] , identifier[filename] )
identifier[logger] . identifier[info] ( literal[string] , identifier[resource_name] , identifier[name] )
keyword[else] :
keyword[if] identifier[resource_name] == literal[string] keyword[and] literal[string] keyword[in] identifier[name] :
identifier[name] = identifier[splitted_name] [ literal[int] ]+ literal[string] + identifier[splitted_name] [ literal[int] ]
identifier[logger] . identifier[info] ( literal[string] ,
identifier[resource_name] , identifier[name] )
keyword[return] keyword[True]
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[resource_name] , identifier[name] )
keyword[return] keyword[False]
keyword[except] identifier[BackendException] keyword[as] identifier[exp] :
identifier[logger] . identifier[exception] ( literal[string] , identifier[exp] )
identifier[logger] . identifier[error] ( literal[string] , identifier[exp] . identifier[response] )
identifier[print] ( literal[string] %( identifier[resource_name] , identifier[name] ))
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[return] keyword[False]
|
def get_resource(self, resource_name, name):
# pylint: disable=too-many-locals, too-many-nested-blocks
'Get a specific resource by name'
try:
logger.info("Trying to get %s: '%s'", resource_name, name)
services_list = False
if resource_name == 'host' and '/' in name:
splitted_name = name.split('/')
services_list = True
name = splitted_name[0] # depends on [control=['if'], data=[]]
params = {'where': json.dumps({'name': name})}
if resource_name in ['host', 'service', 'user']:
params = {'where': json.dumps({'name': name, '_is_template': self.model})} # depends on [control=['if'], data=[]]
if resource_name == 'service' and '/' in name:
splitted_name = name.split('/')
# new_name = splitted_name[0] + '_' + splitted_name[1]
# name = splitted_name[1]
# Get host from name
response2 = self.backend.get('host', params={'where': json.dumps({'name': splitted_name[0]})})
if response2['_items']:
host = response2['_items'][0]
logger.info("Got host '%s' for the service '%s'", splitted_name[0], splitted_name[1]) # depends on [control=['if'], data=[]]
else:
logger.warning("Not found host '%s'!", splitted_name[0])
return False
params = {'where': json.dumps({'name': splitted_name[1], 'host': host['_id'], '_is_template': self.model})} # depends on [control=['if'], data=[]]
if self.embedded and resource_name in self.embedded_resources:
params.update({'embedded': json.dumps(self.embedded_resources[resource_name])}) # depends on [control=['if'], data=[]]
response = self.backend.get(resource_name, params=params)
if response['_items']:
response = response['_items'][0]
logger.info("-> found %s '%s': %s", resource_name, name, response['_id'])
if services_list:
# Get services for the host
params = {'where': json.dumps({'host': response['_id']})}
if self.embedded and 'service' in self.embedded_resources:
params.update({'embedded': json.dumps(self.embedded_resources['service'])}) # depends on [control=['if'], data=[]]
response2 = self.backend.get('service', params=params)
if response2['_items']:
response['_services'] = response2['_items']
logger.info("Got %d services for host '%s'", len(response2['_items']), splitted_name[0]) # depends on [control=['if'], data=[]]
else:
logger.warning("Not found host '%s'!", splitted_name[0])
return False # depends on [control=['if'], data=[]]
# Exists in the backend, we got the element
if not self.dry_run:
logger.info('-> dumping %s: %s', resource_name, name)
# Filter fields prefixed with an _ (internal backend fields)
for field in list(response):
if field in ['_created', '_updated', '_etag', '_links', '_status']:
response.pop(field)
continue # depends on [control=['if'], data=['field']]
# Filter fields prefixed with an _ in embedded items
if self.embedded and resource_name in self.embedded_resources and (field in self.embedded_resources[resource_name]):
logger.info('-> embedded %s', field)
# Embedded items may be a list or a simple dictionary,
# always make it a list
embedded_items = response[field]
if not isinstance(response[field], list):
embedded_items = [response[field]] # depends on [control=['if'], data=[]]
# Filter fields in each embedded item
for embedded_item in embedded_items:
if not embedded_item:
continue # depends on [control=['if'], data=[]]
for embedded_field in list(embedded_item):
if embedded_field.startswith('_'):
embedded_item.pop(embedded_field) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['embedded_field']] # depends on [control=['for'], data=['embedded_item']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
dump = json.dumps(response, indent=4, separators=(',', ': '), sort_keys=True)
if not self.quiet:
print(dump) # depends on [control=['if'], data=[]]
if resource_name == 'service' and '/' in name:
name = splitted_name[0] + '_' + splitted_name[1] # depends on [control=['if'], data=[]]
filename = self.file_dump(response, 'alignak-object-dump-%s-%s.json' % (resource_name, name))
if filename:
logger.info("-> dumped %s '%s' to %s", resource_name, name, filename) # depends on [control=['if'], data=[]]
logger.info('-> dumped %s: %s', resource_name, name) # depends on [control=['if'], data=[]]
else:
if resource_name == 'service' and '/' in name:
name = splitted_name[0] + '_' + splitted_name[1] # depends on [control=['if'], data=[]]
logger.info("Dry-run mode: should have dumped an %s '%s'", resource_name, name)
return True # depends on [control=['if'], data=[]]
else:
logger.warning("-> %s '%s' not found", resource_name, name)
return False # depends on [control=['try'], data=[]]
except BackendException as exp: # pragma: no cover, should never happen
logger.exception('Exception: %s', exp)
logger.error('Response: %s', exp.response)
print("Get error for '%s' : %s" % (resource_name, name))
print('~~~~~~~~~~~~~~~~~~~~~~~~~~')
print('Exiting with error code: 5')
return False # depends on [control=['except'], data=['exp']]
|
def view_dupl_sources_time(token, dstore):
"""
Display the time spent computing duplicated sources
"""
info = dstore['source_info']
items = sorted(group_array(info.value, 'source_id').items())
tbl = []
tot_time = 0
for source_id, records in items:
if len(records) > 1: # dupl
calc_time = records['calc_time'].sum()
tot_time += calc_time + records['split_time'].sum()
tbl.append((source_id, calc_time, len(records)))
if tbl and info.attrs.get('has_dupl_sources'):
tot = info['calc_time'].sum() + info['split_time'].sum()
percent = tot_time / tot * 100
m = '\nTotal time in duplicated sources: %d/%d (%d%%)' % (
tot_time, tot, percent)
return rst_table(tbl, ['source_id', 'calc_time', 'num_dupl']) + m
else:
return 'There are no duplicated sources'
|
def function[view_dupl_sources_time, parameter[token, dstore]]:
constant[
Display the time spent computing duplicated sources
]
variable[info] assign[=] call[name[dstore]][constant[source_info]]
variable[items] assign[=] call[name[sorted], parameter[call[call[name[group_array], parameter[name[info].value, constant[source_id]]].items, parameter[]]]]
variable[tbl] assign[=] list[[]]
variable[tot_time] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da204620940>, <ast.Name object at 0x7da2046234f0>]]] in starred[name[items]] begin[:]
if compare[call[name[len], parameter[name[records]]] greater[>] constant[1]] begin[:]
variable[calc_time] assign[=] call[call[name[records]][constant[calc_time]].sum, parameter[]]
<ast.AugAssign object at 0x7da204623eb0>
call[name[tbl].append, parameter[tuple[[<ast.Name object at 0x7da1b133efe0>, <ast.Name object at 0x7da1b133fe80>, <ast.Call object at 0x7da1b133e260>]]]]
if <ast.BoolOp object at 0x7da1b133fa30> begin[:]
variable[tot] assign[=] binary_operation[call[call[name[info]][constant[calc_time]].sum, parameter[]] + call[call[name[info]][constant[split_time]].sum, parameter[]]]
variable[percent] assign[=] binary_operation[binary_operation[name[tot_time] / name[tot]] * constant[100]]
variable[m] assign[=] binary_operation[constant[
Total time in duplicated sources: %d/%d (%d%%)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b133c2e0>, <ast.Name object at 0x7da1b133cfa0>, <ast.Name object at 0x7da1b133f610>]]]
return[binary_operation[call[name[rst_table], parameter[name[tbl], list[[<ast.Constant object at 0x7da1b133e740>, <ast.Constant object at 0x7da1b133dff0>, <ast.Constant object at 0x7da1b133e530>]]]] + name[m]]]
|
keyword[def] identifier[view_dupl_sources_time] ( identifier[token] , identifier[dstore] ):
literal[string]
identifier[info] = identifier[dstore] [ literal[string] ]
identifier[items] = identifier[sorted] ( identifier[group_array] ( identifier[info] . identifier[value] , literal[string] ). identifier[items] ())
identifier[tbl] =[]
identifier[tot_time] = literal[int]
keyword[for] identifier[source_id] , identifier[records] keyword[in] identifier[items] :
keyword[if] identifier[len] ( identifier[records] )> literal[int] :
identifier[calc_time] = identifier[records] [ literal[string] ]. identifier[sum] ()
identifier[tot_time] += identifier[calc_time] + identifier[records] [ literal[string] ]. identifier[sum] ()
identifier[tbl] . identifier[append] (( identifier[source_id] , identifier[calc_time] , identifier[len] ( identifier[records] )))
keyword[if] identifier[tbl] keyword[and] identifier[info] . identifier[attrs] . identifier[get] ( literal[string] ):
identifier[tot] = identifier[info] [ literal[string] ]. identifier[sum] ()+ identifier[info] [ literal[string] ]. identifier[sum] ()
identifier[percent] = identifier[tot_time] / identifier[tot] * literal[int]
identifier[m] = literal[string] %(
identifier[tot_time] , identifier[tot] , identifier[percent] )
keyword[return] identifier[rst_table] ( identifier[tbl] ,[ literal[string] , literal[string] , literal[string] ])+ identifier[m]
keyword[else] :
keyword[return] literal[string]
|
def view_dupl_sources_time(token, dstore):
"""
Display the time spent computing duplicated sources
"""
info = dstore['source_info']
items = sorted(group_array(info.value, 'source_id').items())
tbl = []
tot_time = 0
for (source_id, records) in items:
if len(records) > 1: # dupl
calc_time = records['calc_time'].sum()
tot_time += calc_time + records['split_time'].sum()
tbl.append((source_id, calc_time, len(records))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if tbl and info.attrs.get('has_dupl_sources'):
tot = info['calc_time'].sum() + info['split_time'].sum()
percent = tot_time / tot * 100
m = '\nTotal time in duplicated sources: %d/%d (%d%%)' % (tot_time, tot, percent)
return rst_table(tbl, ['source_id', 'calc_time', 'num_dupl']) + m # depends on [control=['if'], data=[]]
else:
return 'There are no duplicated sources'
|
def late_filling(target, pressure='pore.pressure',
Pc_star='pore.pc_star',
Swp_star=0.2, eta=3):
r"""
Calculates the fraction of a pore or throat filled with invading fluid
based on the capillary pressure in the invading phase. The invading phase
volume is calculated from:
.. math::
S_{nwp} = 1 - S_{wp}^{*} (P^{*}/P_{c})^{\eta}
Parameters
----------
pressure : string
The capillary pressure in the non-wetting phase (Pc > 0).
Pc_star : string
The minimum pressure required to create an interface within the pore
body or throat. Typically this would be calculated using the Washburn
equation.
Swp_star : float
The residual wetting phase in an invaded pore or throat at a pressure
of ``pc_star``.
eta : float
Exponent controlling the rate at which wetting phase is displaced with
increasing pressure.
Returns
-------
An array containing the fraction of each pore or throat that would be
filled with non-wetting phase at the given phase pressure. This does not
account for whether or not the element is actually invaded, which requires
a percolation algorithm of some sort.
"""
element = pressure.split('.')[0]
network = target.project.network
phase = target.project.find_phase(target)
pc_star = phase[Pc_star]
Pc = phase[pressure]
# Remove any 0's from the Pc array to prevent numpy div by 0 warning
Pc = sp.maximum(Pc, 1e-9)
Swp = Swp_star*((pc_star/Pc)**eta)
values = sp.clip(1 - Swp, 0.0, 1.0)
# Now map element onto target object
if element == 'throat':
Ts = network.map_throats(throats=target.Ts, origin=target)
values = values[Ts]
else:
Ps = network.map_pores(pores=target.Ps, origin=target)
values = values[Ps]
return values
|
def function[late_filling, parameter[target, pressure, Pc_star, Swp_star, eta]]:
constant[
Calculates the fraction of a pore or throat filled with invading fluid
based on the capillary pressure in the invading phase. The invading phase
volume is calculated from:
.. math::
S_{nwp} = 1 - S_{wp}^{*} (P^{*}/P_{c})^{\eta}
Parameters
----------
pressure : string
The capillary pressure in the non-wetting phase (Pc > 0).
Pc_star : string
The minimum pressure required to create an interface within the pore
body or throat. Typically this would be calculated using the Washburn
equation.
Swp_star : float
The residual wetting phase in an invaded pore or throat at a pressure
of ``pc_star``.
eta : float
Exponent controlling the rate at which wetting phase is displaced with
increasing pressure.
Returns
-------
An array containing the fraction of each pore or throat that would be
filled with non-wetting phase at the given phase pressure. This does not
account for whether or not the element is actually invaded, which requires
a percolation algorithm of some sort.
]
variable[element] assign[=] call[call[name[pressure].split, parameter[constant[.]]]][constant[0]]
variable[network] assign[=] name[target].project.network
variable[phase] assign[=] call[name[target].project.find_phase, parameter[name[target]]]
variable[pc_star] assign[=] call[name[phase]][name[Pc_star]]
variable[Pc] assign[=] call[name[phase]][name[pressure]]
variable[Pc] assign[=] call[name[sp].maximum, parameter[name[Pc], constant[1e-09]]]
variable[Swp] assign[=] binary_operation[name[Swp_star] * binary_operation[binary_operation[name[pc_star] / name[Pc]] ** name[eta]]]
variable[values] assign[=] call[name[sp].clip, parameter[binary_operation[constant[1] - name[Swp]], constant[0.0], constant[1.0]]]
if compare[name[element] equal[==] constant[throat]] begin[:]
variable[Ts] assign[=] call[name[network].map_throats, parameter[]]
variable[values] assign[=] call[name[values]][name[Ts]]
return[name[values]]
|
keyword[def] identifier[late_filling] ( identifier[target] , identifier[pressure] = literal[string] ,
identifier[Pc_star] = literal[string] ,
identifier[Swp_star] = literal[int] , identifier[eta] = literal[int] ):
literal[string]
identifier[element] = identifier[pressure] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[network] = identifier[target] . identifier[project] . identifier[network]
identifier[phase] = identifier[target] . identifier[project] . identifier[find_phase] ( identifier[target] )
identifier[pc_star] = identifier[phase] [ identifier[Pc_star] ]
identifier[Pc] = identifier[phase] [ identifier[pressure] ]
identifier[Pc] = identifier[sp] . identifier[maximum] ( identifier[Pc] , literal[int] )
identifier[Swp] = identifier[Swp_star] *(( identifier[pc_star] / identifier[Pc] )** identifier[eta] )
identifier[values] = identifier[sp] . identifier[clip] ( literal[int] - identifier[Swp] , literal[int] , literal[int] )
keyword[if] identifier[element] == literal[string] :
identifier[Ts] = identifier[network] . identifier[map_throats] ( identifier[throats] = identifier[target] . identifier[Ts] , identifier[origin] = identifier[target] )
identifier[values] = identifier[values] [ identifier[Ts] ]
keyword[else] :
identifier[Ps] = identifier[network] . identifier[map_pores] ( identifier[pores] = identifier[target] . identifier[Ps] , identifier[origin] = identifier[target] )
identifier[values] = identifier[values] [ identifier[Ps] ]
keyword[return] identifier[values]
|
def late_filling(target, pressure='pore.pressure', Pc_star='pore.pc_star', Swp_star=0.2, eta=3):
"""
Calculates the fraction of a pore or throat filled with invading fluid
based on the capillary pressure in the invading phase. The invading phase
volume is calculated from:
.. math::
S_{nwp} = 1 - S_{wp}^{*} (P^{*}/P_{c})^{\\eta}
Parameters
----------
pressure : string
The capillary pressure in the non-wetting phase (Pc > 0).
Pc_star : string
The minimum pressure required to create an interface within the pore
body or throat. Typically this would be calculated using the Washburn
equation.
Swp_star : float
The residual wetting phase in an invaded pore or throat at a pressure
of ``pc_star``.
eta : float
Exponent controlling the rate at which wetting phase is displaced with
increasing pressure.
Returns
-------
An array containing the fraction of each pore or throat that would be
filled with non-wetting phase at the given phase pressure. This does not
account for whether or not the element is actually invaded, which requires
a percolation algorithm of some sort.
"""
element = pressure.split('.')[0]
network = target.project.network
phase = target.project.find_phase(target)
pc_star = phase[Pc_star]
Pc = phase[pressure]
# Remove any 0's from the Pc array to prevent numpy div by 0 warning
Pc = sp.maximum(Pc, 1e-09)
Swp = Swp_star * (pc_star / Pc) ** eta
values = sp.clip(1 - Swp, 0.0, 1.0)
# Now map element onto target object
if element == 'throat':
Ts = network.map_throats(throats=target.Ts, origin=target)
values = values[Ts] # depends on [control=['if'], data=[]]
else:
Ps = network.map_pores(pores=target.Ps, origin=target)
values = values[Ps]
return values
|
def is_identity_matrix(mat,
ignore_phase=False,
rtol=RTOL_DEFAULT,
atol=ATOL_DEFAULT):
"""Test if an array is an identity matrix."""
if atol is None:
atol = ATOL_DEFAULT
if rtol is None:
rtol = RTOL_DEFAULT
mat = np.array(mat)
if mat.ndim != 2:
return False
if ignore_phase:
# If the matrix is equal to an identity up to a phase, we can
# remove the phase by multiplying each entry by the complex
# conjugate of the phase of the [0, 0] entry.
theta = np.angle(mat[0, 0])
mat = np.exp(-1j * theta) * mat
# Check if square identity
iden = np.eye(len(mat))
return np.allclose(mat, iden, rtol=rtol, atol=atol)
|
def function[is_identity_matrix, parameter[mat, ignore_phase, rtol, atol]]:
constant[Test if an array is an identity matrix.]
if compare[name[atol] is constant[None]] begin[:]
variable[atol] assign[=] name[ATOL_DEFAULT]
if compare[name[rtol] is constant[None]] begin[:]
variable[rtol] assign[=] name[RTOL_DEFAULT]
variable[mat] assign[=] call[name[np].array, parameter[name[mat]]]
if compare[name[mat].ndim not_equal[!=] constant[2]] begin[:]
return[constant[False]]
if name[ignore_phase] begin[:]
variable[theta] assign[=] call[name[np].angle, parameter[call[name[mat]][tuple[[<ast.Constant object at 0x7da2047eada0>, <ast.Constant object at 0x7da2047e9480>]]]]]
variable[mat] assign[=] binary_operation[call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da18f09cd60> * name[theta]]]] * name[mat]]
variable[iden] assign[=] call[name[np].eye, parameter[call[name[len], parameter[name[mat]]]]]
return[call[name[np].allclose, parameter[name[mat], name[iden]]]]
|
keyword[def] identifier[is_identity_matrix] ( identifier[mat] ,
identifier[ignore_phase] = keyword[False] ,
identifier[rtol] = identifier[RTOL_DEFAULT] ,
identifier[atol] = identifier[ATOL_DEFAULT] ):
literal[string]
keyword[if] identifier[atol] keyword[is] keyword[None] :
identifier[atol] = identifier[ATOL_DEFAULT]
keyword[if] identifier[rtol] keyword[is] keyword[None] :
identifier[rtol] = identifier[RTOL_DEFAULT]
identifier[mat] = identifier[np] . identifier[array] ( identifier[mat] )
keyword[if] identifier[mat] . identifier[ndim] != literal[int] :
keyword[return] keyword[False]
keyword[if] identifier[ignore_phase] :
identifier[theta] = identifier[np] . identifier[angle] ( identifier[mat] [ literal[int] , literal[int] ])
identifier[mat] = identifier[np] . identifier[exp] (- literal[int] * identifier[theta] )* identifier[mat]
identifier[iden] = identifier[np] . identifier[eye] ( identifier[len] ( identifier[mat] ))
keyword[return] identifier[np] . identifier[allclose] ( identifier[mat] , identifier[iden] , identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] )
|
def is_identity_matrix(mat, ignore_phase=False, rtol=RTOL_DEFAULT, atol=ATOL_DEFAULT):
"""Test if an array is an identity matrix."""
if atol is None:
atol = ATOL_DEFAULT # depends on [control=['if'], data=['atol']]
if rtol is None:
rtol = RTOL_DEFAULT # depends on [control=['if'], data=['rtol']]
mat = np.array(mat)
if mat.ndim != 2:
return False # depends on [control=['if'], data=[]]
if ignore_phase:
# If the matrix is equal to an identity up to a phase, we can
# remove the phase by multiplying each entry by the complex
# conjugate of the phase of the [0, 0] entry.
theta = np.angle(mat[0, 0])
mat = np.exp(-1j * theta) * mat # depends on [control=['if'], data=[]]
# Check if square identity
iden = np.eye(len(mat))
return np.allclose(mat, iden, rtol=rtol, atol=atol)
|
def checkIPFromAlias(alias=None):
'''
Method that checks if the given alias is currently connected to Skype and returns its IP address.
:param alias: Alias to be searched.
:return: Python structure for the Json received. It has the following structure:
{
"type": "i3visio.ip",
"value": "1.1.1.1",
"attributes" : []
}
'''
headers = {
"Content-type": "text/html",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Encoding": " gzip, deflate",
"Accept-Language": " es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3",
"Connection": "keep-alive",
"DNT": "1",
"Host": "www.resolvethem.com",
"Referer": "http://www.resolvethem.com/index.php",
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:38.0) Gecko/20100101 Firefox/38.0",
"Content-Length": "26",
"Content-Type": "application/x-www-form-urlencoded",
}
req = requests.post("http://www.resolvethem.com/index.php",headers=headers,data={'skypeUsername': alias,'submit':''})
# Data returned
data = req.content
# Compilation of the regular expression
p = re.compile("class='alert alert-success'>([0-9\.]*)<")
allMatches = p.findall(data)
if len(allMatches)> 0:
jsonData = {}
jsonData["type"]="i3visio.ip"
jsonData["value"]=allMatches[0]
jsonData["attributes"]=[]
return jsonData
return {}
|
def function[checkIPFromAlias, parameter[alias]]:
constant[
Method that checks if the given alias is currently connected to Skype and returns its IP address.
:param alias: Alias to be searched.
:return: Python structure for the Json received. It has the following structure:
{
"type": "i3visio.ip",
"value": "1.1.1.1",
"attributes" : []
}
]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b13ab940>, <ast.Constant object at 0x7da1b13ab2e0>, <ast.Constant object at 0x7da1b13a8490>, <ast.Constant object at 0x7da1b13a8580>, <ast.Constant object at 0x7da1b13abeb0>, <ast.Constant object at 0x7da1b13a93c0>, <ast.Constant object at 0x7da1b13aafb0>, <ast.Constant object at 0x7da1b13abf10>, <ast.Constant object at 0x7da1b13abdf0>, <ast.Constant object at 0x7da1b13ab580>, <ast.Constant object at 0x7da1b13a8460>], [<ast.Constant object at 0x7da1b13a8550>, <ast.Constant object at 0x7da1b13a8640>, <ast.Constant object at 0x7da1b13aaa40>, <ast.Constant object at 0x7da1b13a95a0>, <ast.Constant object at 0x7da1b13aba00>, <ast.Constant object at 0x7da1b13abf40>, <ast.Constant object at 0x7da1b13a8f10>, <ast.Constant object at 0x7da1b13a84f0>, <ast.Constant object at 0x7da1b13aa740>, <ast.Constant object at 0x7da1b13ab7f0>, <ast.Constant object at 0x7da1b13aa080>]]
variable[req] assign[=] call[name[requests].post, parameter[constant[http://www.resolvethem.com/index.php]]]
variable[data] assign[=] name[req].content
variable[p] assign[=] call[name[re].compile, parameter[constant[class='alert alert-success'>([0-9\.]*)<]]]
variable[allMatches] assign[=] call[name[p].findall, parameter[name[data]]]
if compare[call[name[len], parameter[name[allMatches]]] greater[>] constant[0]] begin[:]
variable[jsonData] assign[=] dictionary[[], []]
call[name[jsonData]][constant[type]] assign[=] constant[i3visio.ip]
call[name[jsonData]][constant[value]] assign[=] call[name[allMatches]][constant[0]]
call[name[jsonData]][constant[attributes]] assign[=] list[[]]
return[name[jsonData]]
return[dictionary[[], []]]
|
keyword[def] identifier[checkIPFromAlias] ( identifier[alias] = keyword[None] ):
literal[string]
identifier[headers] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[req] = identifier[requests] . identifier[post] ( literal[string] , identifier[headers] = identifier[headers] , identifier[data] ={ literal[string] : identifier[alias] , literal[string] : literal[string] })
identifier[data] = identifier[req] . identifier[content]
identifier[p] = identifier[re] . identifier[compile] ( literal[string] )
identifier[allMatches] = identifier[p] . identifier[findall] ( identifier[data] )
keyword[if] identifier[len] ( identifier[allMatches] )> literal[int] :
identifier[jsonData] ={}
identifier[jsonData] [ literal[string] ]= literal[string]
identifier[jsonData] [ literal[string] ]= identifier[allMatches] [ literal[int] ]
identifier[jsonData] [ literal[string] ]=[]
keyword[return] identifier[jsonData]
keyword[return] {}
|
def checkIPFromAlias(alias=None):
"""
Method that checks if the given alias is currently connected to Skype and returns its IP address.
:param alias: Alias to be searched.
:return: Python structure for the Json received. It has the following structure:
{
"type": "i3visio.ip",
"value": "1.1.1.1",
"attributes" : []
}
"""
headers = {'Content-type': 'text/html', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Encoding': ' gzip, deflate', 'Accept-Language': ' es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3', 'Connection': 'keep-alive', 'DNT': '1', 'Host': 'www.resolvethem.com', 'Referer': 'http://www.resolvethem.com/index.php', 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; rv:38.0) Gecko/20100101 Firefox/38.0', 'Content-Length': '26', 'Content-Type': 'application/x-www-form-urlencoded'}
req = requests.post('http://www.resolvethem.com/index.php', headers=headers, data={'skypeUsername': alias, 'submit': ''})
# Data returned
data = req.content
# Compilation of the regular expression
p = re.compile("class='alert alert-success'>([0-9\\.]*)<")
allMatches = p.findall(data)
if len(allMatches) > 0:
jsonData = {}
jsonData['type'] = 'i3visio.ip'
jsonData['value'] = allMatches[0]
jsonData['attributes'] = []
return jsonData # depends on [control=['if'], data=[]]
return {}
|
def QA_util_send_mail(msg, title, from_user, from_password, to_addr, smtp):
"""邮件发送
Arguments:
msg {[type]} -- [description]
title {[type]} -- [description]
from_user {[type]} -- [description]
from_password {[type]} -- [description]
to_addr {[type]} -- [description]
smtp {[type]} -- [description]
"""
msg = MIMEText(msg, 'plain', 'utf-8')
msg['Subject'] = Header(title, 'utf-8').encode()
server = smtplib.SMTP(smtp, 25) # SMTP协议默认端口是25
server.set_debuglevel(1)
server.login(from_user, from_password)
server.sendmail(from_user, [to_addr], msg.as_string())
|
def function[QA_util_send_mail, parameter[msg, title, from_user, from_password, to_addr, smtp]]:
constant[邮件发送
Arguments:
msg {[type]} -- [description]
title {[type]} -- [description]
from_user {[type]} -- [description]
from_password {[type]} -- [description]
to_addr {[type]} -- [description]
smtp {[type]} -- [description]
]
variable[msg] assign[=] call[name[MIMEText], parameter[name[msg], constant[plain], constant[utf-8]]]
call[name[msg]][constant[Subject]] assign[=] call[call[name[Header], parameter[name[title], constant[utf-8]]].encode, parameter[]]
variable[server] assign[=] call[name[smtplib].SMTP, parameter[name[smtp], constant[25]]]
call[name[server].set_debuglevel, parameter[constant[1]]]
call[name[server].login, parameter[name[from_user], name[from_password]]]
call[name[server].sendmail, parameter[name[from_user], list[[<ast.Name object at 0x7da1b1f74be0>]], call[name[msg].as_string, parameter[]]]]
|
keyword[def] identifier[QA_util_send_mail] ( identifier[msg] , identifier[title] , identifier[from_user] , identifier[from_password] , identifier[to_addr] , identifier[smtp] ):
literal[string]
identifier[msg] = identifier[MIMEText] ( identifier[msg] , literal[string] , literal[string] )
identifier[msg] [ literal[string] ]= identifier[Header] ( identifier[title] , literal[string] ). identifier[encode] ()
identifier[server] = identifier[smtplib] . identifier[SMTP] ( identifier[smtp] , literal[int] )
identifier[server] . identifier[set_debuglevel] ( literal[int] )
identifier[server] . identifier[login] ( identifier[from_user] , identifier[from_password] )
identifier[server] . identifier[sendmail] ( identifier[from_user] ,[ identifier[to_addr] ], identifier[msg] . identifier[as_string] ())
|
def QA_util_send_mail(msg, title, from_user, from_password, to_addr, smtp):
"""邮件发送
Arguments:
msg {[type]} -- [description]
title {[type]} -- [description]
from_user {[type]} -- [description]
from_password {[type]} -- [description]
to_addr {[type]} -- [description]
smtp {[type]} -- [description]
"""
msg = MIMEText(msg, 'plain', 'utf-8')
msg['Subject'] = Header(title, 'utf-8').encode()
server = smtplib.SMTP(smtp, 25) # SMTP协议默认端口是25
server.set_debuglevel(1)
server.login(from_user, from_password)
server.sendmail(from_user, [to_addr], msg.as_string())
|
def monthdatescalendar(cls, year, month):
""" Returns a list of week in a month. A week is a list of NepDate objects """
weeks = []
week = []
for day in NepCal.itermonthdates(year, month):
week.append(day)
if len(week) == 7:
weeks.append(week)
week = []
if len(week) > 0:
weeks.append(week)
return weeks
|
def function[monthdatescalendar, parameter[cls, year, month]]:
constant[ Returns a list of week in a month. A week is a list of NepDate objects ]
variable[weeks] assign[=] list[[]]
variable[week] assign[=] list[[]]
for taget[name[day]] in starred[call[name[NepCal].itermonthdates, parameter[name[year], name[month]]]] begin[:]
call[name[week].append, parameter[name[day]]]
if compare[call[name[len], parameter[name[week]]] equal[==] constant[7]] begin[:]
call[name[weeks].append, parameter[name[week]]]
variable[week] assign[=] list[[]]
if compare[call[name[len], parameter[name[week]]] greater[>] constant[0]] begin[:]
call[name[weeks].append, parameter[name[week]]]
return[name[weeks]]
|
keyword[def] identifier[monthdatescalendar] ( identifier[cls] , identifier[year] , identifier[month] ):
literal[string]
identifier[weeks] =[]
identifier[week] =[]
keyword[for] identifier[day] keyword[in] identifier[NepCal] . identifier[itermonthdates] ( identifier[year] , identifier[month] ):
identifier[week] . identifier[append] ( identifier[day] )
keyword[if] identifier[len] ( identifier[week] )== literal[int] :
identifier[weeks] . identifier[append] ( identifier[week] )
identifier[week] =[]
keyword[if] identifier[len] ( identifier[week] )> literal[int] :
identifier[weeks] . identifier[append] ( identifier[week] )
keyword[return] identifier[weeks]
|
def monthdatescalendar(cls, year, month):
""" Returns a list of week in a month. A week is a list of NepDate objects """
weeks = []
week = []
for day in NepCal.itermonthdates(year, month):
week.append(day)
if len(week) == 7:
weeks.append(week)
week = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['day']]
if len(week) > 0:
weeks.append(week) # depends on [control=['if'], data=[]]
return weeks
|
def trim_decimals(s, precision=-3):
"""
Convert from scientific notation using precision
"""
encoded = s.encode('ascii', 'ignore')
str_val = ""
if six.PY3:
str_val = str(encoded, encoding='ascii', errors='ignore')[:precision]
else:
# If precision is 0, this must be handled seperately
if precision == 0:
str_val = str(encoded)
else:
str_val = str(encoded)[:precision]
if len(str_val) > 0:
return float(str_val)
else:
return 0
|
def function[trim_decimals, parameter[s, precision]]:
constant[
Convert from scientific notation using precision
]
variable[encoded] assign[=] call[name[s].encode, parameter[constant[ascii], constant[ignore]]]
variable[str_val] assign[=] constant[]
if name[six].PY3 begin[:]
variable[str_val] assign[=] call[call[name[str], parameter[name[encoded]]]][<ast.Slice object at 0x7da1b11a0cd0>]
if compare[call[name[len], parameter[name[str_val]]] greater[>] constant[0]] begin[:]
return[call[name[float], parameter[name[str_val]]]]
|
keyword[def] identifier[trim_decimals] ( identifier[s] , identifier[precision] =- literal[int] ):
literal[string]
identifier[encoded] = identifier[s] . identifier[encode] ( literal[string] , literal[string] )
identifier[str_val] = literal[string]
keyword[if] identifier[six] . identifier[PY3] :
identifier[str_val] = identifier[str] ( identifier[encoded] , identifier[encoding] = literal[string] , identifier[errors] = literal[string] )[: identifier[precision] ]
keyword[else] :
keyword[if] identifier[precision] == literal[int] :
identifier[str_val] = identifier[str] ( identifier[encoded] )
keyword[else] :
identifier[str_val] = identifier[str] ( identifier[encoded] )[: identifier[precision] ]
keyword[if] identifier[len] ( identifier[str_val] )> literal[int] :
keyword[return] identifier[float] ( identifier[str_val] )
keyword[else] :
keyword[return] literal[int]
|
def trim_decimals(s, precision=-3):
"""
Convert from scientific notation using precision
"""
encoded = s.encode('ascii', 'ignore')
str_val = ''
if six.PY3:
str_val = str(encoded, encoding='ascii', errors='ignore')[:precision] # depends on [control=['if'], data=[]]
# If precision is 0, this must be handled seperately
elif precision == 0:
str_val = str(encoded) # depends on [control=['if'], data=[]]
else:
str_val = str(encoded)[:precision]
if len(str_val) > 0:
return float(str_val) # depends on [control=['if'], data=[]]
else:
return 0
|
def rmlst(databasepath, credentials):
"""
Get the most up-to-date profiles and alleles from pubmlst. Note that you will need the necessary access token
and secret for this to work
:param databasepath: path to use to save the database
:param credentials: path to folder containing accessory token and secret.txt files
"""
logging.info('Downloading rMLST database')
# Set the name of the file to be used to determine if the database download and set-up was successful
completefile = os.path.join(databasepath, 'rMLST', 'complete')
if not os.path.isfile(completefile):
# Create an object to send to the rMLST download script
args = MetadataObject()
# Add the path and start time attributes
args.path = databasepath
args.logging = logging
args.credentials = credentials
# Run the rMLST download
get_rmlst.Get(args)
# Create and populate the complete.txt file
with open(completefile, 'w') as complete:
complete.write('\n'.join(glob(os.path.join(databasepath, 'rMLST', '*'))))
|
def function[rmlst, parameter[databasepath, credentials]]:
constant[
Get the most up-to-date profiles and alleles from pubmlst. Note that you will need the necessary access token
and secret for this to work
:param databasepath: path to use to save the database
:param credentials: path to folder containing accessory token and secret.txt files
]
call[name[logging].info, parameter[constant[Downloading rMLST database]]]
variable[completefile] assign[=] call[name[os].path.join, parameter[name[databasepath], constant[rMLST], constant[complete]]]
if <ast.UnaryOp object at 0x7da18f09d4b0> begin[:]
variable[args] assign[=] call[name[MetadataObject], parameter[]]
name[args].path assign[=] name[databasepath]
name[args].logging assign[=] name[logging]
name[args].credentials assign[=] name[credentials]
call[name[get_rmlst].Get, parameter[name[args]]]
with call[name[open], parameter[name[completefile], constant[w]]] begin[:]
call[name[complete].write, parameter[call[constant[
].join, parameter[call[name[glob], parameter[call[name[os].path.join, parameter[name[databasepath], constant[rMLST], constant[*]]]]]]]]]
|
keyword[def] identifier[rmlst] ( identifier[databasepath] , identifier[credentials] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] )
identifier[completefile] = identifier[os] . identifier[path] . identifier[join] ( identifier[databasepath] , literal[string] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[completefile] ):
identifier[args] = identifier[MetadataObject] ()
identifier[args] . identifier[path] = identifier[databasepath]
identifier[args] . identifier[logging] = identifier[logging]
identifier[args] . identifier[credentials] = identifier[credentials]
identifier[get_rmlst] . identifier[Get] ( identifier[args] )
keyword[with] identifier[open] ( identifier[completefile] , literal[string] ) keyword[as] identifier[complete] :
identifier[complete] . identifier[write] ( literal[string] . identifier[join] ( identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[databasepath] , literal[string] , literal[string] ))))
|
def rmlst(databasepath, credentials):
"""
Get the most up-to-date profiles and alleles from pubmlst. Note that you will need the necessary access token
and secret for this to work
:param databasepath: path to use to save the database
:param credentials: path to folder containing accessory token and secret.txt files
"""
logging.info('Downloading rMLST database')
# Set the name of the file to be used to determine if the database download and set-up was successful
completefile = os.path.join(databasepath, 'rMLST', 'complete')
if not os.path.isfile(completefile):
# Create an object to send to the rMLST download script
args = MetadataObject()
# Add the path and start time attributes
args.path = databasepath
args.logging = logging
args.credentials = credentials
# Run the rMLST download
get_rmlst.Get(args)
# Create and populate the complete.txt file
with open(completefile, 'w') as complete:
complete.write('\n'.join(glob(os.path.join(databasepath, 'rMLST', '*')))) # depends on [control=['with'], data=['complete']] # depends on [control=['if'], data=[]]
|
def find_by_uuid(self, si, uuid, is_vm=True, path=None, data_center=None):
"""
Finds vm/host by his uuid in the vCenter or returns "None"
:param si: pyvmomi 'ServiceInstance'
:param uuid: the object uuid
:param path: the path to find the object ('dc' or 'dc/folder' or 'dc/folder/folder/etc...')
:param is_vm: if true, search for virtual machines, otherwise search for hosts
:param data_center:
"""
if uuid is None:
return None
if path is not None:
data_center = self.find_item_in_path_by_type(si, path, vim.Datacenter)
search_index = si.content.searchIndex
return search_index.FindByUuid(data_center, uuid, is_vm)
|
def function[find_by_uuid, parameter[self, si, uuid, is_vm, path, data_center]]:
constant[
Finds vm/host by his uuid in the vCenter or returns "None"
:param si: pyvmomi 'ServiceInstance'
:param uuid: the object uuid
:param path: the path to find the object ('dc' or 'dc/folder' or 'dc/folder/folder/etc...')
:param is_vm: if true, search for virtual machines, otherwise search for hosts
:param data_center:
]
if compare[name[uuid] is constant[None]] begin[:]
return[constant[None]]
if compare[name[path] is_not constant[None]] begin[:]
variable[data_center] assign[=] call[name[self].find_item_in_path_by_type, parameter[name[si], name[path], name[vim].Datacenter]]
variable[search_index] assign[=] name[si].content.searchIndex
return[call[name[search_index].FindByUuid, parameter[name[data_center], name[uuid], name[is_vm]]]]
|
keyword[def] identifier[find_by_uuid] ( identifier[self] , identifier[si] , identifier[uuid] , identifier[is_vm] = keyword[True] , identifier[path] = keyword[None] , identifier[data_center] = keyword[None] ):
literal[string]
keyword[if] identifier[uuid] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] :
identifier[data_center] = identifier[self] . identifier[find_item_in_path_by_type] ( identifier[si] , identifier[path] , identifier[vim] . identifier[Datacenter] )
identifier[search_index] = identifier[si] . identifier[content] . identifier[searchIndex]
keyword[return] identifier[search_index] . identifier[FindByUuid] ( identifier[data_center] , identifier[uuid] , identifier[is_vm] )
|
def find_by_uuid(self, si, uuid, is_vm=True, path=None, data_center=None):
"""
Finds vm/host by his uuid in the vCenter or returns "None"
:param si: pyvmomi 'ServiceInstance'
:param uuid: the object uuid
:param path: the path to find the object ('dc' or 'dc/folder' or 'dc/folder/folder/etc...')
:param is_vm: if true, search for virtual machines, otherwise search for hosts
:param data_center:
"""
if uuid is None:
return None # depends on [control=['if'], data=[]]
if path is not None:
data_center = self.find_item_in_path_by_type(si, path, vim.Datacenter) # depends on [control=['if'], data=['path']]
search_index = si.content.searchIndex
return search_index.FindByUuid(data_center, uuid, is_vm)
|
def WriteClientStats(self, client_id,
stats):
"""Stores a ClientStats instance."""
if client_id not in self.ReadAllClientIDs():
raise db.UnknownClientError(client_id)
self.client_stats[client_id][rdfvalue.RDFDatetime.Now()] = stats
|
def function[WriteClientStats, parameter[self, client_id, stats]]:
constant[Stores a ClientStats instance.]
if compare[name[client_id] <ast.NotIn object at 0x7da2590d7190> call[name[self].ReadAllClientIDs, parameter[]]] begin[:]
<ast.Raise object at 0x7da2054a58d0>
call[call[name[self].client_stats][name[client_id]]][call[name[rdfvalue].RDFDatetime.Now, parameter[]]] assign[=] name[stats]
|
keyword[def] identifier[WriteClientStats] ( identifier[self] , identifier[client_id] ,
identifier[stats] ):
literal[string]
keyword[if] identifier[client_id] keyword[not] keyword[in] identifier[self] . identifier[ReadAllClientIDs] ():
keyword[raise] identifier[db] . identifier[UnknownClientError] ( identifier[client_id] )
identifier[self] . identifier[client_stats] [ identifier[client_id] ][ identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] ()]= identifier[stats]
|
def WriteClientStats(self, client_id, stats):
"""Stores a ClientStats instance."""
if client_id not in self.ReadAllClientIDs():
raise db.UnknownClientError(client_id) # depends on [control=['if'], data=['client_id']]
self.client_stats[client_id][rdfvalue.RDFDatetime.Now()] = stats
|
def process(self, context, data):
"""
Will modify the context.target_backend attribute based on the requester identifier.
:param context: request context
:param data: the internal request
"""
context.target_backend = self.requester_mapping[data.requester]
return super().process(context, data)
|
def function[process, parameter[self, context, data]]:
constant[
Will modify the context.target_backend attribute based on the requester identifier.
:param context: request context
:param data: the internal request
]
name[context].target_backend assign[=] call[name[self].requester_mapping][name[data].requester]
return[call[call[name[super], parameter[]].process, parameter[name[context], name[data]]]]
|
keyword[def] identifier[process] ( identifier[self] , identifier[context] , identifier[data] ):
literal[string]
identifier[context] . identifier[target_backend] = identifier[self] . identifier[requester_mapping] [ identifier[data] . identifier[requester] ]
keyword[return] identifier[super] (). identifier[process] ( identifier[context] , identifier[data] )
|
def process(self, context, data):
"""
Will modify the context.target_backend attribute based on the requester identifier.
:param context: request context
:param data: the internal request
"""
context.target_backend = self.requester_mapping[data.requester]
return super().process(context, data)
|
def summary(self, raw):
"""
Return one taxonomy summarizing the reported tags
If there is only one tag, use it as the predicate
If there are multiple tags, use "entries" as the predicate
Use the total count as the value
Use the most malicious level found
Examples:
Input
{
"name": SCANNER1,
"intention": ""
}
Output
GreyNoise:SCANNER1 = 1 (info)
Input
{
"name": SCANNER1,
"intention": "malicious"
},
{
"name": SCANNER1,
"intention": "benign"
}
Output
GreyNoise:SCANNER1 = 2 (malicious)
Input
{
"name": SCANNER1,
"intention": ""
},
{
"name": SCANNER1,
"intention": "safe"
},
{
"name": SCANNER2,
"intention": ""
}
Output
GreyNoise:entries = 3 (safe)
"""
try:
taxonomies = []
if raw.get('records'):
final_level = None
taxonomy_data = defaultdict(int)
for record in raw.get('records', []):
name = record.get('name', 'unknown')
intention = record.get('intention', 'unknown')
taxonomy_data[name] += 1
final_level = self._get_level(final_level, intention)
if len(taxonomy_data) > 1: # Multiple tags have been found
taxonomies.append(self.build_taxonomy(final_level, 'GreyNoise', 'entries', len(taxonomy_data)))
else: # There is only one tag found, possibly multiple times
for name, count in taxonomy_data.iteritems():
taxonomies.append(self.build_taxonomy(final_level, 'GreyNoise', name, count))
else:
taxonomies.append(self.build_taxonomy('info', 'GreyNoise', 'Records', 'None'))
return {"taxonomies": taxonomies}
except Exception as e:
self.error('Summary failed\n{}'.format(e.message))
|
def function[summary, parameter[self, raw]]:
constant[
Return one taxonomy summarizing the reported tags
If there is only one tag, use it as the predicate
If there are multiple tags, use "entries" as the predicate
Use the total count as the value
Use the most malicious level found
Examples:
Input
{
"name": SCANNER1,
"intention": ""
}
Output
GreyNoise:SCANNER1 = 1 (info)
Input
{
"name": SCANNER1,
"intention": "malicious"
},
{
"name": SCANNER1,
"intention": "benign"
}
Output
GreyNoise:SCANNER1 = 2 (malicious)
Input
{
"name": SCANNER1,
"intention": ""
},
{
"name": SCANNER1,
"intention": "safe"
},
{
"name": SCANNER2,
"intention": ""
}
Output
GreyNoise:entries = 3 (safe)
]
<ast.Try object at 0x7da18f09fb80>
|
keyword[def] identifier[summary] ( identifier[self] , identifier[raw] ):
literal[string]
keyword[try] :
identifier[taxonomies] =[]
keyword[if] identifier[raw] . identifier[get] ( literal[string] ):
identifier[final_level] = keyword[None]
identifier[taxonomy_data] = identifier[defaultdict] ( identifier[int] )
keyword[for] identifier[record] keyword[in] identifier[raw] . identifier[get] ( literal[string] ,[]):
identifier[name] = identifier[record] . identifier[get] ( literal[string] , literal[string] )
identifier[intention] = identifier[record] . identifier[get] ( literal[string] , literal[string] )
identifier[taxonomy_data] [ identifier[name] ]+= literal[int]
identifier[final_level] = identifier[self] . identifier[_get_level] ( identifier[final_level] , identifier[intention] )
keyword[if] identifier[len] ( identifier[taxonomy_data] )> literal[int] :
identifier[taxonomies] . identifier[append] ( identifier[self] . identifier[build_taxonomy] ( identifier[final_level] , literal[string] , literal[string] , identifier[len] ( identifier[taxonomy_data] )))
keyword[else] :
keyword[for] identifier[name] , identifier[count] keyword[in] identifier[taxonomy_data] . identifier[iteritems] ():
identifier[taxonomies] . identifier[append] ( identifier[self] . identifier[build_taxonomy] ( identifier[final_level] , literal[string] , identifier[name] , identifier[count] ))
keyword[else] :
identifier[taxonomies] . identifier[append] ( identifier[self] . identifier[build_taxonomy] ( literal[string] , literal[string] , literal[string] , literal[string] ))
keyword[return] { literal[string] : identifier[taxonomies] }
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[error] ( literal[string] . identifier[format] ( identifier[e] . identifier[message] ))
|
def summary(self, raw):
"""
Return one taxonomy summarizing the reported tags
If there is only one tag, use it as the predicate
If there are multiple tags, use "entries" as the predicate
Use the total count as the value
Use the most malicious level found
Examples:
Input
{
"name": SCANNER1,
"intention": ""
}
Output
GreyNoise:SCANNER1 = 1 (info)
Input
{
"name": SCANNER1,
"intention": "malicious"
},
{
"name": SCANNER1,
"intention": "benign"
}
Output
GreyNoise:SCANNER1 = 2 (malicious)
Input
{
"name": SCANNER1,
"intention": ""
},
{
"name": SCANNER1,
"intention": "safe"
},
{
"name": SCANNER2,
"intention": ""
}
Output
GreyNoise:entries = 3 (safe)
"""
try:
taxonomies = []
if raw.get('records'):
final_level = None
taxonomy_data = defaultdict(int)
for record in raw.get('records', []):
name = record.get('name', 'unknown')
intention = record.get('intention', 'unknown')
taxonomy_data[name] += 1
final_level = self._get_level(final_level, intention) # depends on [control=['for'], data=['record']]
if len(taxonomy_data) > 1: # Multiple tags have been found
taxonomies.append(self.build_taxonomy(final_level, 'GreyNoise', 'entries', len(taxonomy_data))) # depends on [control=['if'], data=[]]
else: # There is only one tag found, possibly multiple times
for (name, count) in taxonomy_data.iteritems():
taxonomies.append(self.build_taxonomy(final_level, 'GreyNoise', name, count)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
taxonomies.append(self.build_taxonomy('info', 'GreyNoise', 'Records', 'None'))
return {'taxonomies': taxonomies} # depends on [control=['try'], data=[]]
except Exception as e:
self.error('Summary failed\n{}'.format(e.message)) # depends on [control=['except'], data=['e']]
|
def shv(command, capture=False, ignore_error=False, cwd=None):
"""Run the given command inside the virtual environment, if available:
"""
_setVirtualEnv()
try:
command = "%s; %s" % (options.virtualenv.activate_cmd, command)
except AttributeError:
pass
return bash(command, capture=capture, ignore_error=ignore_error, cwd=cwd)
|
def function[shv, parameter[command, capture, ignore_error, cwd]]:
constant[Run the given command inside the virtual environment, if available:
]
call[name[_setVirtualEnv], parameter[]]
<ast.Try object at 0x7da1b0037c40>
return[call[name[bash], parameter[name[command]]]]
|
keyword[def] identifier[shv] ( identifier[command] , identifier[capture] = keyword[False] , identifier[ignore_error] = keyword[False] , identifier[cwd] = keyword[None] ):
literal[string]
identifier[_setVirtualEnv] ()
keyword[try] :
identifier[command] = literal[string] %( identifier[options] . identifier[virtualenv] . identifier[activate_cmd] , identifier[command] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[return] identifier[bash] ( identifier[command] , identifier[capture] = identifier[capture] , identifier[ignore_error] = identifier[ignore_error] , identifier[cwd] = identifier[cwd] )
|
def shv(command, capture=False, ignore_error=False, cwd=None):
"""Run the given command inside the virtual environment, if available:
"""
_setVirtualEnv()
try:
command = '%s; %s' % (options.virtualenv.activate_cmd, command) # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
return bash(command, capture=capture, ignore_error=ignore_error, cwd=cwd)
|
def show_list(timeout_in_sec, out=sys.stdout, host=jps.env.get_master_host(), sub_port=jps.DEFAULT_SUB_PORT):
'''get the name list of the topics, and print it
'''
class TopicNameStore(object):
def __init__(self):
self._topic_names = set()
def callback(self, msg, topic):
self._topic_names.add(topic)
def get_topic_names(self):
names = list(self._topic_names)
names.sort()
return names
store = TopicNameStore()
sub = jps.Subscriber('*', store.callback, host=host, sub_port=sub_port)
sleep_sec = 0.01
for i in range(int(timeout_in_sec / sleep_sec)):
sub.spin_once(sleep_sec)
time.sleep(0.001) # for context switch
for name in store.get_topic_names():
out.write('{}\n'.format(name))
|
def function[show_list, parameter[timeout_in_sec, out, host, sub_port]]:
constant[get the name list of the topics, and print it
]
class class[TopicNameStore, parameter[]] begin[:]
def function[__init__, parameter[self]]:
name[self]._topic_names assign[=] call[name[set], parameter[]]
def function[callback, parameter[self, msg, topic]]:
call[name[self]._topic_names.add, parameter[name[topic]]]
def function[get_topic_names, parameter[self]]:
variable[names] assign[=] call[name[list], parameter[name[self]._topic_names]]
call[name[names].sort, parameter[]]
return[name[names]]
variable[store] assign[=] call[name[TopicNameStore], parameter[]]
variable[sub] assign[=] call[name[jps].Subscriber, parameter[constant[*], name[store].callback]]
variable[sleep_sec] assign[=] constant[0.01]
for taget[name[i]] in starred[call[name[range], parameter[call[name[int], parameter[binary_operation[name[timeout_in_sec] / name[sleep_sec]]]]]]] begin[:]
call[name[sub].spin_once, parameter[name[sleep_sec]]]
call[name[time].sleep, parameter[constant[0.001]]]
for taget[name[name]] in starred[call[name[store].get_topic_names, parameter[]]] begin[:]
call[name[out].write, parameter[call[constant[{}
].format, parameter[name[name]]]]]
|
keyword[def] identifier[show_list] ( identifier[timeout_in_sec] , identifier[out] = identifier[sys] . identifier[stdout] , identifier[host] = identifier[jps] . identifier[env] . identifier[get_master_host] (), identifier[sub_port] = identifier[jps] . identifier[DEFAULT_SUB_PORT] ):
literal[string]
keyword[class] identifier[TopicNameStore] ( identifier[object] ):
keyword[def] identifier[__init__] ( identifier[self] ):
identifier[self] . identifier[_topic_names] = identifier[set] ()
keyword[def] identifier[callback] ( identifier[self] , identifier[msg] , identifier[topic] ):
identifier[self] . identifier[_topic_names] . identifier[add] ( identifier[topic] )
keyword[def] identifier[get_topic_names] ( identifier[self] ):
identifier[names] = identifier[list] ( identifier[self] . identifier[_topic_names] )
identifier[names] . identifier[sort] ()
keyword[return] identifier[names]
identifier[store] = identifier[TopicNameStore] ()
identifier[sub] = identifier[jps] . identifier[Subscriber] ( literal[string] , identifier[store] . identifier[callback] , identifier[host] = identifier[host] , identifier[sub_port] = identifier[sub_port] )
identifier[sleep_sec] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[int] ( identifier[timeout_in_sec] / identifier[sleep_sec] )):
identifier[sub] . identifier[spin_once] ( identifier[sleep_sec] )
identifier[time] . identifier[sleep] ( literal[int] )
keyword[for] identifier[name] keyword[in] identifier[store] . identifier[get_topic_names] ():
identifier[out] . identifier[write] ( literal[string] . identifier[format] ( identifier[name] ))
|
def show_list(timeout_in_sec, out=sys.stdout, host=jps.env.get_master_host(), sub_port=jps.DEFAULT_SUB_PORT):
"""get the name list of the topics, and print it
"""
class TopicNameStore(object):
def __init__(self):
self._topic_names = set()
def callback(self, msg, topic):
self._topic_names.add(topic)
def get_topic_names(self):
names = list(self._topic_names)
names.sort()
return names
store = TopicNameStore()
sub = jps.Subscriber('*', store.callback, host=host, sub_port=sub_port)
sleep_sec = 0.01
for i in range(int(timeout_in_sec / sleep_sec)):
sub.spin_once(sleep_sec)
time.sleep(0.001) # for context switch # depends on [control=['for'], data=[]]
for name in store.get_topic_names():
out.write('{}\n'.format(name)) # depends on [control=['for'], data=['name']]
|
def run(tests=(), reporter=None, stop_after=None):
"""
Run the tests that are loaded by each of the strings provided.
Arguments:
tests (iterable):
the collection of tests (specified as `str` s) to run
reporter (Reporter):
a `Reporter` to use for the run. If unprovided, the default
is to return a `virtue.reporters.Counter` (which produces no
output).
stop_after (int):
a number of non-successful tests to allow before stopping the run.
"""
if reporter is None:
reporter = Counter()
if stop_after is not None:
reporter = _StopAfterWrapper(reporter=reporter, limit=stop_after)
locator = ObjectLocator()
cases = (
case
for test in tests
for loader in locator.locate_by_name(name=test)
for case in loader.load()
)
suite = unittest.TestSuite(cases)
getattr(reporter, "startTestRun", lambda: None)()
suite.run(reporter)
getattr(reporter, "stopTestRun", lambda: None)()
return reporter
|
def function[run, parameter[tests, reporter, stop_after]]:
constant[
Run the tests that are loaded by each of the strings provided.
Arguments:
tests (iterable):
the collection of tests (specified as `str` s) to run
reporter (Reporter):
a `Reporter` to use for the run. If unprovided, the default
is to return a `virtue.reporters.Counter` (which produces no
output).
stop_after (int):
a number of non-successful tests to allow before stopping the run.
]
if compare[name[reporter] is constant[None]] begin[:]
variable[reporter] assign[=] call[name[Counter], parameter[]]
if compare[name[stop_after] is_not constant[None]] begin[:]
variable[reporter] assign[=] call[name[_StopAfterWrapper], parameter[]]
variable[locator] assign[=] call[name[ObjectLocator], parameter[]]
variable[cases] assign[=] <ast.GeneratorExp object at 0x7da1b0146860>
variable[suite] assign[=] call[name[unittest].TestSuite, parameter[name[cases]]]
call[call[name[getattr], parameter[name[reporter], constant[startTestRun], <ast.Lambda object at 0x7da1b0147340>]], parameter[]]
call[name[suite].run, parameter[name[reporter]]]
call[call[name[getattr], parameter[name[reporter], constant[stopTestRun], <ast.Lambda object at 0x7da1b0147190>]], parameter[]]
return[name[reporter]]
|
keyword[def] identifier[run] ( identifier[tests] =(), identifier[reporter] = keyword[None] , identifier[stop_after] = keyword[None] ):
literal[string]
keyword[if] identifier[reporter] keyword[is] keyword[None] :
identifier[reporter] = identifier[Counter] ()
keyword[if] identifier[stop_after] keyword[is] keyword[not] keyword[None] :
identifier[reporter] = identifier[_StopAfterWrapper] ( identifier[reporter] = identifier[reporter] , identifier[limit] = identifier[stop_after] )
identifier[locator] = identifier[ObjectLocator] ()
identifier[cases] =(
identifier[case]
keyword[for] identifier[test] keyword[in] identifier[tests]
keyword[for] identifier[loader] keyword[in] identifier[locator] . identifier[locate_by_name] ( identifier[name] = identifier[test] )
keyword[for] identifier[case] keyword[in] identifier[loader] . identifier[load] ()
)
identifier[suite] = identifier[unittest] . identifier[TestSuite] ( identifier[cases] )
identifier[getattr] ( identifier[reporter] , literal[string] , keyword[lambda] : keyword[None] )()
identifier[suite] . identifier[run] ( identifier[reporter] )
identifier[getattr] ( identifier[reporter] , literal[string] , keyword[lambda] : keyword[None] )()
keyword[return] identifier[reporter]
|
def run(tests=(), reporter=None, stop_after=None):
"""
Run the tests that are loaded by each of the strings provided.
Arguments:
tests (iterable):
the collection of tests (specified as `str` s) to run
reporter (Reporter):
a `Reporter` to use for the run. If unprovided, the default
is to return a `virtue.reporters.Counter` (which produces no
output).
stop_after (int):
a number of non-successful tests to allow before stopping the run.
"""
if reporter is None:
reporter = Counter() # depends on [control=['if'], data=['reporter']]
if stop_after is not None:
reporter = _StopAfterWrapper(reporter=reporter, limit=stop_after) # depends on [control=['if'], data=['stop_after']]
locator = ObjectLocator()
cases = (case for test in tests for loader in locator.locate_by_name(name=test) for case in loader.load())
suite = unittest.TestSuite(cases)
getattr(reporter, 'startTestRun', lambda : None)()
suite.run(reporter)
getattr(reporter, 'stopTestRun', lambda : None)()
return reporter
|
def calibration_stimulus(self, mode):
"""Gets the stimulus model for calibration
:param mode: Type of stimulus to get: tone or noise
:type mode: str
:returns: :class:`StimulusModel<sparkle.stim.stimulus_model.StimulusModel>`
"""
if mode == 'tone':
return self.tone_calibrator.stimulus
elif mode =='noise':
return self.bs_calibrator.stimulus
|
def function[calibration_stimulus, parameter[self, mode]]:
constant[Gets the stimulus model for calibration
:param mode: Type of stimulus to get: tone or noise
:type mode: str
:returns: :class:`StimulusModel<sparkle.stim.stimulus_model.StimulusModel>`
]
if compare[name[mode] equal[==] constant[tone]] begin[:]
return[name[self].tone_calibrator.stimulus]
|
keyword[def] identifier[calibration_stimulus] ( identifier[self] , identifier[mode] ):
literal[string]
keyword[if] identifier[mode] == literal[string] :
keyword[return] identifier[self] . identifier[tone_calibrator] . identifier[stimulus]
keyword[elif] identifier[mode] == literal[string] :
keyword[return] identifier[self] . identifier[bs_calibrator] . identifier[stimulus]
|
def calibration_stimulus(self, mode):
"""Gets the stimulus model for calibration
:param mode: Type of stimulus to get: tone or noise
:type mode: str
:returns: :class:`StimulusModel<sparkle.stim.stimulus_model.StimulusModel>`
"""
if mode == 'tone':
return self.tone_calibrator.stimulus # depends on [control=['if'], data=[]]
elif mode == 'noise':
return self.bs_calibrator.stimulus # depends on [control=['if'], data=[]]
|
def get_option(self, key):
"""Returns current value of specified option.
:param key: key of the option
"""
# Backwards compatibility
if key == "rtmpdump":
key = "rtmp-rtmpdump"
elif key == "rtmpdump-proxy":
key = "rtmp-proxy"
elif key == "errorlog":
key = "subprocess-errorlog"
if key == "http-proxy":
return self.http.proxies.get("http")
elif key == "https-proxy":
return self.http.proxies.get("https")
elif key == "http-cookies":
return self.http.cookies
elif key == "http-headers":
return self.http.headers
elif key == "http-query-params":
return self.http.params
elif key == "http-trust-env":
return self.http.trust_env
elif key == "http-ssl-verify":
return self.http.verify
elif key == "http-ssl-cert":
return self.http.cert
elif key == "http-timeout":
return self.http.timeout
else:
return self.options.get(key)
|
def function[get_option, parameter[self, key]]:
constant[Returns current value of specified option.
:param key: key of the option
]
if compare[name[key] equal[==] constant[rtmpdump]] begin[:]
variable[key] assign[=] constant[rtmp-rtmpdump]
if compare[name[key] equal[==] constant[http-proxy]] begin[:]
return[call[name[self].http.proxies.get, parameter[constant[http]]]]
|
keyword[def] identifier[get_option] ( identifier[self] , identifier[key] ):
literal[string]
keyword[if] identifier[key] == literal[string] :
identifier[key] = literal[string]
keyword[elif] identifier[key] == literal[string] :
identifier[key] = literal[string]
keyword[elif] identifier[key] == literal[string] :
identifier[key] = literal[string]
keyword[if] identifier[key] == literal[string] :
keyword[return] identifier[self] . identifier[http] . identifier[proxies] . identifier[get] ( literal[string] )
keyword[elif] identifier[key] == literal[string] :
keyword[return] identifier[self] . identifier[http] . identifier[proxies] . identifier[get] ( literal[string] )
keyword[elif] identifier[key] == literal[string] :
keyword[return] identifier[self] . identifier[http] . identifier[cookies]
keyword[elif] identifier[key] == literal[string] :
keyword[return] identifier[self] . identifier[http] . identifier[headers]
keyword[elif] identifier[key] == literal[string] :
keyword[return] identifier[self] . identifier[http] . identifier[params]
keyword[elif] identifier[key] == literal[string] :
keyword[return] identifier[self] . identifier[http] . identifier[trust_env]
keyword[elif] identifier[key] == literal[string] :
keyword[return] identifier[self] . identifier[http] . identifier[verify]
keyword[elif] identifier[key] == literal[string] :
keyword[return] identifier[self] . identifier[http] . identifier[cert]
keyword[elif] identifier[key] == literal[string] :
keyword[return] identifier[self] . identifier[http] . identifier[timeout]
keyword[else] :
keyword[return] identifier[self] . identifier[options] . identifier[get] ( identifier[key] )
|
def get_option(self, key):
"""Returns current value of specified option.
:param key: key of the option
"""
# Backwards compatibility
if key == 'rtmpdump':
key = 'rtmp-rtmpdump' # depends on [control=['if'], data=['key']]
elif key == 'rtmpdump-proxy':
key = 'rtmp-proxy' # depends on [control=['if'], data=['key']]
elif key == 'errorlog':
key = 'subprocess-errorlog' # depends on [control=['if'], data=['key']]
if key == 'http-proxy':
return self.http.proxies.get('http') # depends on [control=['if'], data=[]]
elif key == 'https-proxy':
return self.http.proxies.get('https') # depends on [control=['if'], data=[]]
elif key == 'http-cookies':
return self.http.cookies # depends on [control=['if'], data=[]]
elif key == 'http-headers':
return self.http.headers # depends on [control=['if'], data=[]]
elif key == 'http-query-params':
return self.http.params # depends on [control=['if'], data=[]]
elif key == 'http-trust-env':
return self.http.trust_env # depends on [control=['if'], data=[]]
elif key == 'http-ssl-verify':
return self.http.verify # depends on [control=['if'], data=[]]
elif key == 'http-ssl-cert':
return self.http.cert # depends on [control=['if'], data=[]]
elif key == 'http-timeout':
return self.http.timeout # depends on [control=['if'], data=[]]
else:
return self.options.get(key)
|
def telegram(self, client_key=None, tgid=None, key=None):
"""Create Telegram Templates which can be used to send telegrams
:param client_key: Client Key Nationstates Gave you
:param tgid: TGID from api template
:param key: Key from api Template
"""
return Telegram(self, client_key, tgid, key)
|
def function[telegram, parameter[self, client_key, tgid, key]]:
constant[Create Telegram Templates which can be used to send telegrams
:param client_key: Client Key Nationstates Gave you
:param tgid: TGID from api template
:param key: Key from api Template
]
return[call[name[Telegram], parameter[name[self], name[client_key], name[tgid], name[key]]]]
|
keyword[def] identifier[telegram] ( identifier[self] , identifier[client_key] = keyword[None] , identifier[tgid] = keyword[None] , identifier[key] = keyword[None] ):
literal[string]
keyword[return] identifier[Telegram] ( identifier[self] , identifier[client_key] , identifier[tgid] , identifier[key] )
|
def telegram(self, client_key=None, tgid=None, key=None):
"""Create Telegram Templates which can be used to send telegrams
:param client_key: Client Key Nationstates Gave you
:param tgid: TGID from api template
:param key: Key from api Template
"""
return Telegram(self, client_key, tgid, key)
|
def flavor_access_list(name, projects, **kwargs):
'''
Grants access of the flavor to a project. Flavor must be private.
:param name: non-public flavor name
:param projects: list of projects which should have the access to the flavor
.. code-block:: yaml
nova-flavor-share:
nova.flavor_project_access:
- name: myflavor
- project:
- project1
- project2
To remove all project from access list:
.. code-block:: yaml
- project: []
'''
dry_run = __opts__['test']
ret = {'name': name, 'result': False, 'comment': '', 'changes': {}}
kwargs.update({'filter': {'is_public': False}})
try:
flavor_list = __salt__['nova.flavor_list'](**kwargs)
flavor_id = flavor_list[name]['id']
except KeyError:
raise
project_list = __salt__['keystone.project_list'](**kwargs)
access_list = __salt__['nova.flavor_access_list'](flavor_id, **kwargs)
existing_list = [six.text_type(pname) for pname in project_list
if project_list[pname]['id'] in access_list[flavor_id]]
defined_list = [six.text_type(project) for project in projects]
add_list = set(defined_list) - set(existing_list)
remove_list = set(existing_list) - set(defined_list)
if not add_list and not remove_list:
ret['result'] = True
ret['comment'] = 'Flavor "{0}" access list corresponds to defined one.'.format(name)
else:
if dry_run:
ret['result'] = None
ret['comment'] = 'Flavor "{0}" access list would be corrected.'.format(name)
ret['changes'] = {name: {'new': defined_list, 'old': existing_list}}
else:
added = []
removed = []
if add_list:
for project in add_list:
added.append(__salt__['nova.flavor_access_add'](flavor_id, project_list[project]['id'], **kwargs))
if remove_list:
for project in remove_list:
removed.append(__salt__['nova.flavor_access_remove'](flavor_id,
project_list[project]['id'], **kwargs))
if any(add_list) or any(remove_list):
ret['result'] = True
ret['comment'] = 'Flavor "{0}" access list corrected.'.format(name)
ret['changes'] = {name: {'new': defined_list, 'old': existing_list}}
return ret
|
def function[flavor_access_list, parameter[name, projects]]:
constant[
Grants access of the flavor to a project. Flavor must be private.
:param name: non-public flavor name
:param projects: list of projects which should have the access to the flavor
.. code-block:: yaml
nova-flavor-share:
nova.flavor_project_access:
- name: myflavor
- project:
- project1
- project2
To remove all project from access list:
.. code-block:: yaml
- project: []
]
variable[dry_run] assign[=] call[name[__opts__]][constant[test]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da20c7ca860>, <ast.Constant object at 0x7da20c7cb910>, <ast.Constant object at 0x7da20c7cbe20>, <ast.Constant object at 0x7da20c7cb400>], [<ast.Name object at 0x7da20c7cbac0>, <ast.Constant object at 0x7da20c7c8fa0>, <ast.Constant object at 0x7da20c7c8400>, <ast.Dict object at 0x7da20c7c8c40>]]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da20c7c9f30>], [<ast.Dict object at 0x7da20c7c9360>]]]]
<ast.Try object at 0x7da20c7cbeb0>
variable[project_list] assign[=] call[call[name[__salt__]][constant[keystone.project_list]], parameter[]]
variable[access_list] assign[=] call[call[name[__salt__]][constant[nova.flavor_access_list]], parameter[name[flavor_id]]]
variable[existing_list] assign[=] <ast.ListComp object at 0x7da204623640>
variable[defined_list] assign[=] <ast.ListComp object at 0x7da204621600>
variable[add_list] assign[=] binary_operation[call[name[set], parameter[name[defined_list]]] - call[name[set], parameter[name[existing_list]]]]
variable[remove_list] assign[=] binary_operation[call[name[set], parameter[name[existing_list]]] - call[name[set], parameter[name[defined_list]]]]
if <ast.BoolOp object at 0x7da20c7cb9d0> begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
call[name[ret]][constant[comment]] assign[=] call[constant[Flavor "{0}" access list corresponds to defined one.].format, parameter[name[name]]]
return[name[ret]]
|
keyword[def] identifier[flavor_access_list] ( identifier[name] , identifier[projects] ,** identifier[kwargs] ):
literal[string]
identifier[dry_run] = identifier[__opts__] [ literal[string] ]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[False] , literal[string] : literal[string] , literal[string] :{}}
identifier[kwargs] . identifier[update] ({ literal[string] :{ literal[string] : keyword[False] }})
keyword[try] :
identifier[flavor_list] = identifier[__salt__] [ literal[string] ](** identifier[kwargs] )
identifier[flavor_id] = identifier[flavor_list] [ identifier[name] ][ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise]
identifier[project_list] = identifier[__salt__] [ literal[string] ](** identifier[kwargs] )
identifier[access_list] = identifier[__salt__] [ literal[string] ]( identifier[flavor_id] ,** identifier[kwargs] )
identifier[existing_list] =[ identifier[six] . identifier[text_type] ( identifier[pname] ) keyword[for] identifier[pname] keyword[in] identifier[project_list]
keyword[if] identifier[project_list] [ identifier[pname] ][ literal[string] ] keyword[in] identifier[access_list] [ identifier[flavor_id] ]]
identifier[defined_list] =[ identifier[six] . identifier[text_type] ( identifier[project] ) keyword[for] identifier[project] keyword[in] identifier[projects] ]
identifier[add_list] = identifier[set] ( identifier[defined_list] )- identifier[set] ( identifier[existing_list] )
identifier[remove_list] = identifier[set] ( identifier[existing_list] )- identifier[set] ( identifier[defined_list] )
keyword[if] keyword[not] identifier[add_list] keyword[and] keyword[not] identifier[remove_list] :
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
keyword[if] identifier[dry_run] :
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ identifier[name] :{ literal[string] : identifier[defined_list] , literal[string] : identifier[existing_list] }}
keyword[else] :
identifier[added] =[]
identifier[removed] =[]
keyword[if] identifier[add_list] :
keyword[for] identifier[project] keyword[in] identifier[add_list] :
identifier[added] . identifier[append] ( identifier[__salt__] [ literal[string] ]( identifier[flavor_id] , identifier[project_list] [ identifier[project] ][ literal[string] ],** identifier[kwargs] ))
keyword[if] identifier[remove_list] :
keyword[for] identifier[project] keyword[in] identifier[remove_list] :
identifier[removed] . identifier[append] ( identifier[__salt__] [ literal[string] ]( identifier[flavor_id] ,
identifier[project_list] [ identifier[project] ][ literal[string] ],** identifier[kwargs] ))
keyword[if] identifier[any] ( identifier[add_list] ) keyword[or] identifier[any] ( identifier[remove_list] ):
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ identifier[name] :{ literal[string] : identifier[defined_list] , literal[string] : identifier[existing_list] }}
keyword[return] identifier[ret]
|
def flavor_access_list(name, projects, **kwargs):
"""
Grants access of the flavor to a project. Flavor must be private.
:param name: non-public flavor name
:param projects: list of projects which should have the access to the flavor
.. code-block:: yaml
nova-flavor-share:
nova.flavor_project_access:
- name: myflavor
- project:
- project1
- project2
To remove all project from access list:
.. code-block:: yaml
- project: []
"""
dry_run = __opts__['test']
ret = {'name': name, 'result': False, 'comment': '', 'changes': {}}
kwargs.update({'filter': {'is_public': False}})
try:
flavor_list = __salt__['nova.flavor_list'](**kwargs)
flavor_id = flavor_list[name]['id'] # depends on [control=['try'], data=[]]
except KeyError:
raise # depends on [control=['except'], data=[]]
project_list = __salt__['keystone.project_list'](**kwargs)
access_list = __salt__['nova.flavor_access_list'](flavor_id, **kwargs)
existing_list = [six.text_type(pname) for pname in project_list if project_list[pname]['id'] in access_list[flavor_id]]
defined_list = [six.text_type(project) for project in projects]
add_list = set(defined_list) - set(existing_list)
remove_list = set(existing_list) - set(defined_list)
if not add_list and (not remove_list):
ret['result'] = True
ret['comment'] = 'Flavor "{0}" access list corresponds to defined one.'.format(name) # depends on [control=['if'], data=[]]
elif dry_run:
ret['result'] = None
ret['comment'] = 'Flavor "{0}" access list would be corrected.'.format(name)
ret['changes'] = {name: {'new': defined_list, 'old': existing_list}} # depends on [control=['if'], data=[]]
else:
added = []
removed = []
if add_list:
for project in add_list:
added.append(__salt__['nova.flavor_access_add'](flavor_id, project_list[project]['id'], **kwargs)) # depends on [control=['for'], data=['project']] # depends on [control=['if'], data=[]]
if remove_list:
for project in remove_list:
removed.append(__salt__['nova.flavor_access_remove'](flavor_id, project_list[project]['id'], **kwargs)) # depends on [control=['for'], data=['project']] # depends on [control=['if'], data=[]]
if any(add_list) or any(remove_list):
ret['result'] = True
ret['comment'] = 'Flavor "{0}" access list corrected.'.format(name)
ret['changes'] = {name: {'new': defined_list, 'old': existing_list}} # depends on [control=['if'], data=[]]
return ret
|
def to_joint_gaussian(self):
"""
The linear Gaussian Bayesian Networks are an alternative
representation for the class of multivariate Gaussian distributions.
This method returns an equivalent joint Gaussian distribution.
Returns
-------
GaussianDistribution: An equivalent joint Gaussian
distribution for the network.
Reference
---------
Section 7.2, Example 7.3,
Probabilistic Graphical Models, Principles and Techniques
Examples
--------
>>> from pgmpy.models import LinearGaussianBayesianNetwork
>>> from pgmpy.factors.continuous import LinearGaussianCPD
>>> model = LinearGaussianBayesianNetwork([('x1', 'x2'), ('x2', 'x3')])
>>> cpd1 = LinearGaussianCPD('x1', [1], 4)
>>> cpd2 = LinearGaussianCPD('x2', [-5, 0.5], 4, ['x1'])
>>> cpd3 = LinearGaussianCPD('x3', [4, -1], 3, ['x2'])
>>> model.add_cpds(cpd1, cpd2, cpd3)
>>> jgd = model.to_joint_gaussian()
>>> jgd.variables
['x1', 'x2', 'x3']
>>> jgd.mean
array([[ 1. ],
[-4.5],
[ 8.5]])
>>> jgd.covariance
array([[ 4., 2., -2.],
[ 2., 5., -5.],
[-2., -5., 8.]])
"""
variables = nx.topological_sort(self)
mean = np.zeros(len(variables))
covariance = np.zeros((len(variables), len(variables)))
for node_idx in range(len(variables)):
cpd = self.get_cpds(variables[node_idx])
mean[node_idx] = sum([coeff * mean[variables.index(parent)] for
coeff, parent in zip(cpd.beta_vector, cpd.evidence)]) + cpd.beta_0
covariance[node_idx, node_idx] = sum(
[coeff * coeff * covariance[variables.index(parent), variables.index(parent)]
for coeff, parent in zip(cpd.beta_vector, cpd.evidence)]) + cpd.variance
for node_i_idx in range(len(variables)):
for node_j_idx in range(len(variables)):
if covariance[node_j_idx, node_i_idx] != 0:
covariance[node_i_idx, node_j_idx] = covariance[node_j_idx, node_i_idx]
else:
cpd_j = self.get_cpds(variables[node_j_idx])
covariance[node_i_idx, node_j_idx] = sum(
[coeff * covariance[node_i_idx, variables.index(parent)]
for coeff, parent in zip(cpd_j.beta_vector, cpd_j.evidence)])
return GaussianDistribution(variables, mean, covariance)
|
def function[to_joint_gaussian, parameter[self]]:
constant[
The linear Gaussian Bayesian Networks are an alternative
representation for the class of multivariate Gaussian distributions.
This method returns an equivalent joint Gaussian distribution.
Returns
-------
GaussianDistribution: An equivalent joint Gaussian
distribution for the network.
Reference
---------
Section 7.2, Example 7.3,
Probabilistic Graphical Models, Principles and Techniques
Examples
--------
>>> from pgmpy.models import LinearGaussianBayesianNetwork
>>> from pgmpy.factors.continuous import LinearGaussianCPD
>>> model = LinearGaussianBayesianNetwork([('x1', 'x2'), ('x2', 'x3')])
>>> cpd1 = LinearGaussianCPD('x1', [1], 4)
>>> cpd2 = LinearGaussianCPD('x2', [-5, 0.5], 4, ['x1'])
>>> cpd3 = LinearGaussianCPD('x3', [4, -1], 3, ['x2'])
>>> model.add_cpds(cpd1, cpd2, cpd3)
>>> jgd = model.to_joint_gaussian()
>>> jgd.variables
['x1', 'x2', 'x3']
>>> jgd.mean
array([[ 1. ],
[-4.5],
[ 8.5]])
>>> jgd.covariance
array([[ 4., 2., -2.],
[ 2., 5., -5.],
[-2., -5., 8.]])
]
variable[variables] assign[=] call[name[nx].topological_sort, parameter[name[self]]]
variable[mean] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[variables]]]]]
variable[covariance] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Call object at 0x7da18f00e0b0>, <ast.Call object at 0x7da18f00d660>]]]]
for taget[name[node_idx]] in starred[call[name[range], parameter[call[name[len], parameter[name[variables]]]]]] begin[:]
variable[cpd] assign[=] call[name[self].get_cpds, parameter[call[name[variables]][name[node_idx]]]]
call[name[mean]][name[node_idx]] assign[=] binary_operation[call[name[sum], parameter[<ast.ListComp object at 0x7da18f00f1c0>]] + name[cpd].beta_0]
call[name[covariance]][tuple[[<ast.Name object at 0x7da18f00eb00>, <ast.Name object at 0x7da18f00e860>]]] assign[=] binary_operation[call[name[sum], parameter[<ast.ListComp object at 0x7da18f00e110>]] + name[cpd].variance]
for taget[name[node_i_idx]] in starred[call[name[range], parameter[call[name[len], parameter[name[variables]]]]]] begin[:]
for taget[name[node_j_idx]] in starred[call[name[range], parameter[call[name[len], parameter[name[variables]]]]]] begin[:]
if compare[call[name[covariance]][tuple[[<ast.Name object at 0x7da18f810370>, <ast.Name object at 0x7da18f810190>]]] not_equal[!=] constant[0]] begin[:]
call[name[covariance]][tuple[[<ast.Name object at 0x7da18f812a70>, <ast.Name object at 0x7da18f812830>]]] assign[=] call[name[covariance]][tuple[[<ast.Name object at 0x7da18f812f20>, <ast.Name object at 0x7da18f810dc0>]]]
return[call[name[GaussianDistribution], parameter[name[variables], name[mean], name[covariance]]]]
|
keyword[def] identifier[to_joint_gaussian] ( identifier[self] ):
literal[string]
identifier[variables] = identifier[nx] . identifier[topological_sort] ( identifier[self] )
identifier[mean] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[variables] ))
identifier[covariance] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[variables] ), identifier[len] ( identifier[variables] )))
keyword[for] identifier[node_idx] keyword[in] identifier[range] ( identifier[len] ( identifier[variables] )):
identifier[cpd] = identifier[self] . identifier[get_cpds] ( identifier[variables] [ identifier[node_idx] ])
identifier[mean] [ identifier[node_idx] ]= identifier[sum] ([ identifier[coeff] * identifier[mean] [ identifier[variables] . identifier[index] ( identifier[parent] )] keyword[for]
identifier[coeff] , identifier[parent] keyword[in] identifier[zip] ( identifier[cpd] . identifier[beta_vector] , identifier[cpd] . identifier[evidence] )])+ identifier[cpd] . identifier[beta_0]
identifier[covariance] [ identifier[node_idx] , identifier[node_idx] ]= identifier[sum] (
[ identifier[coeff] * identifier[coeff] * identifier[covariance] [ identifier[variables] . identifier[index] ( identifier[parent] ), identifier[variables] . identifier[index] ( identifier[parent] )]
keyword[for] identifier[coeff] , identifier[parent] keyword[in] identifier[zip] ( identifier[cpd] . identifier[beta_vector] , identifier[cpd] . identifier[evidence] )])+ identifier[cpd] . identifier[variance]
keyword[for] identifier[node_i_idx] keyword[in] identifier[range] ( identifier[len] ( identifier[variables] )):
keyword[for] identifier[node_j_idx] keyword[in] identifier[range] ( identifier[len] ( identifier[variables] )):
keyword[if] identifier[covariance] [ identifier[node_j_idx] , identifier[node_i_idx] ]!= literal[int] :
identifier[covariance] [ identifier[node_i_idx] , identifier[node_j_idx] ]= identifier[covariance] [ identifier[node_j_idx] , identifier[node_i_idx] ]
keyword[else] :
identifier[cpd_j] = identifier[self] . identifier[get_cpds] ( identifier[variables] [ identifier[node_j_idx] ])
identifier[covariance] [ identifier[node_i_idx] , identifier[node_j_idx] ]= identifier[sum] (
[ identifier[coeff] * identifier[covariance] [ identifier[node_i_idx] , identifier[variables] . identifier[index] ( identifier[parent] )]
keyword[for] identifier[coeff] , identifier[parent] keyword[in] identifier[zip] ( identifier[cpd_j] . identifier[beta_vector] , identifier[cpd_j] . identifier[evidence] )])
keyword[return] identifier[GaussianDistribution] ( identifier[variables] , identifier[mean] , identifier[covariance] )
|
def to_joint_gaussian(self):
"""
The linear Gaussian Bayesian Networks are an alternative
representation for the class of multivariate Gaussian distributions.
This method returns an equivalent joint Gaussian distribution.
Returns
-------
GaussianDistribution: An equivalent joint Gaussian
distribution for the network.
Reference
---------
Section 7.2, Example 7.3,
Probabilistic Graphical Models, Principles and Techniques
Examples
--------
>>> from pgmpy.models import LinearGaussianBayesianNetwork
>>> from pgmpy.factors.continuous import LinearGaussianCPD
>>> model = LinearGaussianBayesianNetwork([('x1', 'x2'), ('x2', 'x3')])
>>> cpd1 = LinearGaussianCPD('x1', [1], 4)
>>> cpd2 = LinearGaussianCPD('x2', [-5, 0.5], 4, ['x1'])
>>> cpd3 = LinearGaussianCPD('x3', [4, -1], 3, ['x2'])
>>> model.add_cpds(cpd1, cpd2, cpd3)
>>> jgd = model.to_joint_gaussian()
>>> jgd.variables
['x1', 'x2', 'x3']
>>> jgd.mean
array([[ 1. ],
[-4.5],
[ 8.5]])
>>> jgd.covariance
array([[ 4., 2., -2.],
[ 2., 5., -5.],
[-2., -5., 8.]])
"""
variables = nx.topological_sort(self)
mean = np.zeros(len(variables))
covariance = np.zeros((len(variables), len(variables)))
for node_idx in range(len(variables)):
cpd = self.get_cpds(variables[node_idx])
mean[node_idx] = sum([coeff * mean[variables.index(parent)] for (coeff, parent) in zip(cpd.beta_vector, cpd.evidence)]) + cpd.beta_0
covariance[node_idx, node_idx] = sum([coeff * coeff * covariance[variables.index(parent), variables.index(parent)] for (coeff, parent) in zip(cpd.beta_vector, cpd.evidence)]) + cpd.variance # depends on [control=['for'], data=['node_idx']]
for node_i_idx in range(len(variables)):
for node_j_idx in range(len(variables)):
if covariance[node_j_idx, node_i_idx] != 0:
covariance[node_i_idx, node_j_idx] = covariance[node_j_idx, node_i_idx] # depends on [control=['if'], data=[]]
else:
cpd_j = self.get_cpds(variables[node_j_idx])
covariance[node_i_idx, node_j_idx] = sum([coeff * covariance[node_i_idx, variables.index(parent)] for (coeff, parent) in zip(cpd_j.beta_vector, cpd_j.evidence)]) # depends on [control=['for'], data=['node_j_idx']] # depends on [control=['for'], data=['node_i_idx']]
return GaussianDistribution(variables, mean, covariance)
|
def constraints_since(self, other):
"""
Returns the constraints that have been accumulated since `other`.
:param other: a prior PathHistory object
:returns: a list of constraints
"""
constraints = [ ]
cur = self
while cur is not other and cur is not None:
constraints.extend(cur.recent_constraints)
cur = cur.parent
return constraints
|
def function[constraints_since, parameter[self, other]]:
constant[
Returns the constraints that have been accumulated since `other`.
:param other: a prior PathHistory object
:returns: a list of constraints
]
variable[constraints] assign[=] list[[]]
variable[cur] assign[=] name[self]
while <ast.BoolOp object at 0x7da204623d30> begin[:]
call[name[constraints].extend, parameter[name[cur].recent_constraints]]
variable[cur] assign[=] name[cur].parent
return[name[constraints]]
|
keyword[def] identifier[constraints_since] ( identifier[self] , identifier[other] ):
literal[string]
identifier[constraints] =[]
identifier[cur] = identifier[self]
keyword[while] identifier[cur] keyword[is] keyword[not] identifier[other] keyword[and] identifier[cur] keyword[is] keyword[not] keyword[None] :
identifier[constraints] . identifier[extend] ( identifier[cur] . identifier[recent_constraints] )
identifier[cur] = identifier[cur] . identifier[parent]
keyword[return] identifier[constraints]
|
def constraints_since(self, other):
"""
Returns the constraints that have been accumulated since `other`.
:param other: a prior PathHistory object
:returns: a list of constraints
"""
constraints = []
cur = self
while cur is not other and cur is not None:
constraints.extend(cur.recent_constraints)
cur = cur.parent # depends on [control=['while'], data=[]]
return constraints
|
def sampleByKey(self, withReplacement, fractions, seed=None):
"""
Return a subset of this RDD sampled by key (via stratified sampling).
Create a sample of this RDD using variable sampling rates for
different keys as specified by fractions, a key to sampling rate map.
>>> fractions = {"a": 0.2, "b": 0.1}
>>> rdd = sc.parallelize(fractions.keys()).cartesian(sc.parallelize(range(0, 1000)))
>>> sample = dict(rdd.sampleByKey(False, fractions, 2).groupByKey().collect())
>>> 100 < len(sample["a"]) < 300 and 50 < len(sample["b"]) < 150
True
>>> max(sample["a"]) <= 999 and min(sample["a"]) >= 0
True
>>> max(sample["b"]) <= 999 and min(sample["b"]) >= 0
True
"""
for fraction in fractions.values():
assert fraction >= 0.0, "Negative fraction value: %s" % fraction
return self.mapPartitionsWithIndex(
RDDStratifiedSampler(withReplacement, fractions, seed).func, True)
|
def function[sampleByKey, parameter[self, withReplacement, fractions, seed]]:
constant[
Return a subset of this RDD sampled by key (via stratified sampling).
Create a sample of this RDD using variable sampling rates for
different keys as specified by fractions, a key to sampling rate map.
>>> fractions = {"a": 0.2, "b": 0.1}
>>> rdd = sc.parallelize(fractions.keys()).cartesian(sc.parallelize(range(0, 1000)))
>>> sample = dict(rdd.sampleByKey(False, fractions, 2).groupByKey().collect())
>>> 100 < len(sample["a"]) < 300 and 50 < len(sample["b"]) < 150
True
>>> max(sample["a"]) <= 999 and min(sample["a"]) >= 0
True
>>> max(sample["b"]) <= 999 and min(sample["b"]) >= 0
True
]
for taget[name[fraction]] in starred[call[name[fractions].values, parameter[]]] begin[:]
assert[compare[name[fraction] greater_or_equal[>=] constant[0.0]]]
return[call[name[self].mapPartitionsWithIndex, parameter[call[name[RDDStratifiedSampler], parameter[name[withReplacement], name[fractions], name[seed]]].func, constant[True]]]]
|
keyword[def] identifier[sampleByKey] ( identifier[self] , identifier[withReplacement] , identifier[fractions] , identifier[seed] = keyword[None] ):
literal[string]
keyword[for] identifier[fraction] keyword[in] identifier[fractions] . identifier[values] ():
keyword[assert] identifier[fraction] >= literal[int] , literal[string] % identifier[fraction]
keyword[return] identifier[self] . identifier[mapPartitionsWithIndex] (
identifier[RDDStratifiedSampler] ( identifier[withReplacement] , identifier[fractions] , identifier[seed] ). identifier[func] , keyword[True] )
|
def sampleByKey(self, withReplacement, fractions, seed=None):
"""
Return a subset of this RDD sampled by key (via stratified sampling).
Create a sample of this RDD using variable sampling rates for
different keys as specified by fractions, a key to sampling rate map.
>>> fractions = {"a": 0.2, "b": 0.1}
>>> rdd = sc.parallelize(fractions.keys()).cartesian(sc.parallelize(range(0, 1000)))
>>> sample = dict(rdd.sampleByKey(False, fractions, 2).groupByKey().collect())
>>> 100 < len(sample["a"]) < 300 and 50 < len(sample["b"]) < 150
True
>>> max(sample["a"]) <= 999 and min(sample["a"]) >= 0
True
>>> max(sample["b"]) <= 999 and min(sample["b"]) >= 0
True
"""
for fraction in fractions.values():
assert fraction >= 0.0, 'Negative fraction value: %s' % fraction # depends on [control=['for'], data=['fraction']]
return self.mapPartitionsWithIndex(RDDStratifiedSampler(withReplacement, fractions, seed).func, True)
|
def find(self, objects):
"""Find exactly one match in the list of objects.
:param objects: objects to filter
:type objects: :class:`list`
:return: the one matching object
:raises groupy.exceptions.NoMatchesError: if no objects match
:raises groupy.exceptions.MultipleMatchesError: if multiple objects match
"""
matches = list(self.__call__(objects))
if not matches:
raise exceptions.NoMatchesError(objects, self.tests)
elif len(matches) > 1:
raise exceptions.MultipleMatchesError(objects, self.tests,
matches=matches)
return matches[0]
|
def function[find, parameter[self, objects]]:
constant[Find exactly one match in the list of objects.
:param objects: objects to filter
:type objects: :class:`list`
:return: the one matching object
:raises groupy.exceptions.NoMatchesError: if no objects match
:raises groupy.exceptions.MultipleMatchesError: if multiple objects match
]
variable[matches] assign[=] call[name[list], parameter[call[name[self].__call__, parameter[name[objects]]]]]
if <ast.UnaryOp object at 0x7da1b1104640> begin[:]
<ast.Raise object at 0x7da1b11064a0>
return[call[name[matches]][constant[0]]]
|
keyword[def] identifier[find] ( identifier[self] , identifier[objects] ):
literal[string]
identifier[matches] = identifier[list] ( identifier[self] . identifier[__call__] ( identifier[objects] ))
keyword[if] keyword[not] identifier[matches] :
keyword[raise] identifier[exceptions] . identifier[NoMatchesError] ( identifier[objects] , identifier[self] . identifier[tests] )
keyword[elif] identifier[len] ( identifier[matches] )> literal[int] :
keyword[raise] identifier[exceptions] . identifier[MultipleMatchesError] ( identifier[objects] , identifier[self] . identifier[tests] ,
identifier[matches] = identifier[matches] )
keyword[return] identifier[matches] [ literal[int] ]
|
def find(self, objects):
"""Find exactly one match in the list of objects.
:param objects: objects to filter
:type objects: :class:`list`
:return: the one matching object
:raises groupy.exceptions.NoMatchesError: if no objects match
:raises groupy.exceptions.MultipleMatchesError: if multiple objects match
"""
matches = list(self.__call__(objects))
if not matches:
raise exceptions.NoMatchesError(objects, self.tests) # depends on [control=['if'], data=[]]
elif len(matches) > 1:
raise exceptions.MultipleMatchesError(objects, self.tests, matches=matches) # depends on [control=['if'], data=[]]
return matches[0]
|
def delete(self, **kwds):
"""
Endpoint: /photo/<id>/delete.json
Deletes this photo.
Returns True if successful.
Raises a TroveboxError if not.
"""
result = self._client.photo.delete(self, **kwds)
self._delete_fields()
return result
|
def function[delete, parameter[self]]:
constant[
Endpoint: /photo/<id>/delete.json
Deletes this photo.
Returns True if successful.
Raises a TroveboxError if not.
]
variable[result] assign[=] call[name[self]._client.photo.delete, parameter[name[self]]]
call[name[self]._delete_fields, parameter[]]
return[name[result]]
|
keyword[def] identifier[delete] ( identifier[self] ,** identifier[kwds] ):
literal[string]
identifier[result] = identifier[self] . identifier[_client] . identifier[photo] . identifier[delete] ( identifier[self] ,** identifier[kwds] )
identifier[self] . identifier[_delete_fields] ()
keyword[return] identifier[result]
|
def delete(self, **kwds):
"""
Endpoint: /photo/<id>/delete.json
Deletes this photo.
Returns True if successful.
Raises a TroveboxError if not.
"""
result = self._client.photo.delete(self, **kwds)
self._delete_fields()
return result
|
def cross_entropy_reward_loss(logits, actions, rewards, name=None):
"""Calculate the loss for Policy Gradient Network.
Parameters
----------
logits : tensor
The network outputs without softmax. This function implements softmax inside.
actions : tensor or placeholder
The agent actions.
rewards : tensor or placeholder
The rewards.
Returns
--------
Tensor
The TensorFlow loss function.
Examples
----------
>>> states_batch_pl = tf.placeholder(tf.float32, shape=[None, D])
>>> network = InputLayer(states_batch_pl, name='input')
>>> network = DenseLayer(network, n_units=H, act=tf.nn.relu, name='relu1')
>>> network = DenseLayer(network, n_units=3, name='out')
>>> probs = network.outputs
>>> sampling_prob = tf.nn.softmax(probs)
>>> actions_batch_pl = tf.placeholder(tf.int32, shape=[None])
>>> discount_rewards_batch_pl = tf.placeholder(tf.float32, shape=[None])
>>> loss = tl.rein.cross_entropy_reward_loss(probs, actions_batch_pl, discount_rewards_batch_pl)
>>> train_op = tf.train.RMSPropOptimizer(learning_rate, decay_rate).minimize(loss)
"""
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=actions, logits=logits, name=name)
return tf.reduce_sum(tf.multiply(cross_entropy, rewards))
|
def function[cross_entropy_reward_loss, parameter[logits, actions, rewards, name]]:
constant[Calculate the loss for Policy Gradient Network.
Parameters
----------
logits : tensor
The network outputs without softmax. This function implements softmax inside.
actions : tensor or placeholder
The agent actions.
rewards : tensor or placeholder
The rewards.
Returns
--------
Tensor
The TensorFlow loss function.
Examples
----------
>>> states_batch_pl = tf.placeholder(tf.float32, shape=[None, D])
>>> network = InputLayer(states_batch_pl, name='input')
>>> network = DenseLayer(network, n_units=H, act=tf.nn.relu, name='relu1')
>>> network = DenseLayer(network, n_units=3, name='out')
>>> probs = network.outputs
>>> sampling_prob = tf.nn.softmax(probs)
>>> actions_batch_pl = tf.placeholder(tf.int32, shape=[None])
>>> discount_rewards_batch_pl = tf.placeholder(tf.float32, shape=[None])
>>> loss = tl.rein.cross_entropy_reward_loss(probs, actions_batch_pl, discount_rewards_batch_pl)
>>> train_op = tf.train.RMSPropOptimizer(learning_rate, decay_rate).minimize(loss)
]
variable[cross_entropy] assign[=] call[name[tf].nn.sparse_softmax_cross_entropy_with_logits, parameter[]]
return[call[name[tf].reduce_sum, parameter[call[name[tf].multiply, parameter[name[cross_entropy], name[rewards]]]]]]
|
keyword[def] identifier[cross_entropy_reward_loss] ( identifier[logits] , identifier[actions] , identifier[rewards] , identifier[name] = keyword[None] ):
literal[string]
identifier[cross_entropy] = identifier[tf] . identifier[nn] . identifier[sparse_softmax_cross_entropy_with_logits] ( identifier[labels] = identifier[actions] , identifier[logits] = identifier[logits] , identifier[name] = identifier[name] )
keyword[return] identifier[tf] . identifier[reduce_sum] ( identifier[tf] . identifier[multiply] ( identifier[cross_entropy] , identifier[rewards] ))
|
def cross_entropy_reward_loss(logits, actions, rewards, name=None):
"""Calculate the loss for Policy Gradient Network.
Parameters
----------
logits : tensor
The network outputs without softmax. This function implements softmax inside.
actions : tensor or placeholder
The agent actions.
rewards : tensor or placeholder
The rewards.
Returns
--------
Tensor
The TensorFlow loss function.
Examples
----------
>>> states_batch_pl = tf.placeholder(tf.float32, shape=[None, D])
>>> network = InputLayer(states_batch_pl, name='input')
>>> network = DenseLayer(network, n_units=H, act=tf.nn.relu, name='relu1')
>>> network = DenseLayer(network, n_units=3, name='out')
>>> probs = network.outputs
>>> sampling_prob = tf.nn.softmax(probs)
>>> actions_batch_pl = tf.placeholder(tf.int32, shape=[None])
>>> discount_rewards_batch_pl = tf.placeholder(tf.float32, shape=[None])
>>> loss = tl.rein.cross_entropy_reward_loss(probs, actions_batch_pl, discount_rewards_batch_pl)
>>> train_op = tf.train.RMSPropOptimizer(learning_rate, decay_rate).minimize(loss)
"""
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=actions, logits=logits, name=name)
return tf.reduce_sum(tf.multiply(cross_entropy, rewards))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.