code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def chmod(cls, path, permission_text):
"""
:param str permission_text: "ls -l" style permission string. e.g. -rw-r--r--
"""
try:
check_file_existence(path)
except FileNotFoundError:
_, e, _ = sys.exc_info() # for python 2.5 compatibility
logger.debug(e)
return False
logger.debug("chmod %s %s" % (path, permission_text))
os.chmod(path, parseLsPermissionText(permission_text)) | def function[chmod, parameter[cls, path, permission_text]]:
constant[
:param str permission_text: "ls -l" style permission string. e.g. -rw-r--r--
]
<ast.Try object at 0x7da18f58f4c0>
call[name[logger].debug, parameter[binary_operation[constant[chmod %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f58cdf0>, <ast.Name object at 0x7da18f58c820>]]]]]
call[name[os].chmod, parameter[name[path], call[name[parseLsPermissionText], parameter[name[permission_text]]]]] | keyword[def] identifier[chmod] ( identifier[cls] , identifier[path] , identifier[permission_text] ):
literal[string]
keyword[try] :
identifier[check_file_existence] ( identifier[path] )
keyword[except] identifier[FileNotFoundError] :
identifier[_] , identifier[e] , identifier[_] = identifier[sys] . identifier[exc_info] ()
identifier[logger] . identifier[debug] ( identifier[e] )
keyword[return] keyword[False]
identifier[logger] . identifier[debug] ( literal[string] %( identifier[path] , identifier[permission_text] ))
identifier[os] . identifier[chmod] ( identifier[path] , identifier[parseLsPermissionText] ( identifier[permission_text] )) | def chmod(cls, path, permission_text):
"""
:param str permission_text: "ls -l" style permission string. e.g. -rw-r--r--
"""
try:
check_file_existence(path) # depends on [control=['try'], data=[]]
except FileNotFoundError:
(_, e, _) = sys.exc_info() # for python 2.5 compatibility
logger.debug(e)
return False # depends on [control=['except'], data=[]]
logger.debug('chmod %s %s' % (path, permission_text))
os.chmod(path, parseLsPermissionText(permission_text)) |
def malloc(self, dwSize, lpAddress = None):
"""
Allocates memory into the address space of the process.
@see: L{free}
@type dwSize: int
@param dwSize: Number of bytes to allocate.
@type lpAddress: int
@param lpAddress: (Optional)
Desired address for the newly allocated memory.
This is only a hint, the memory could still be allocated somewhere
else.
@rtype: int
@return: Address of the newly allocated memory.
@raise WindowsError: On error an exception is raised.
"""
hProcess = self.get_handle(win32.PROCESS_VM_OPERATION)
return win32.VirtualAllocEx(hProcess, lpAddress, dwSize) | def function[malloc, parameter[self, dwSize, lpAddress]]:
constant[
Allocates memory into the address space of the process.
@see: L{free}
@type dwSize: int
@param dwSize: Number of bytes to allocate.
@type lpAddress: int
@param lpAddress: (Optional)
Desired address for the newly allocated memory.
This is only a hint, the memory could still be allocated somewhere
else.
@rtype: int
@return: Address of the newly allocated memory.
@raise WindowsError: On error an exception is raised.
]
variable[hProcess] assign[=] call[name[self].get_handle, parameter[name[win32].PROCESS_VM_OPERATION]]
return[call[name[win32].VirtualAllocEx, parameter[name[hProcess], name[lpAddress], name[dwSize]]]] | keyword[def] identifier[malloc] ( identifier[self] , identifier[dwSize] , identifier[lpAddress] = keyword[None] ):
literal[string]
identifier[hProcess] = identifier[self] . identifier[get_handle] ( identifier[win32] . identifier[PROCESS_VM_OPERATION] )
keyword[return] identifier[win32] . identifier[VirtualAllocEx] ( identifier[hProcess] , identifier[lpAddress] , identifier[dwSize] ) | def malloc(self, dwSize, lpAddress=None):
"""
Allocates memory into the address space of the process.
@see: L{free}
@type dwSize: int
@param dwSize: Number of bytes to allocate.
@type lpAddress: int
@param lpAddress: (Optional)
Desired address for the newly allocated memory.
This is only a hint, the memory could still be allocated somewhere
else.
@rtype: int
@return: Address of the newly allocated memory.
@raise WindowsError: On error an exception is raised.
"""
hProcess = self.get_handle(win32.PROCESS_VM_OPERATION)
return win32.VirtualAllocEx(hProcess, lpAddress, dwSize) |
def generate_multiline_list(
self,
items, # type: typing.List[typing.Text]
before='', # type: typing.Text
after='', # type: typing.Text
delim=('(', ')'), # type: DelimTuple
compact=True, # type: bool
sep=',', # type: typing.Text
skip_last_sep=False # type: bool
):
# type: (...) -> None
"""
Given a list of items, emits one item per line.
This is convenient for function prototypes and invocations, as well as
for instantiating arrays, sets, and maps in some languages.
TODO(kelkabany): A backend that uses tabs cannot be used with this
if compact is false.
Args:
items (list[str]): Should contain the items to generate a list of.
before (str): The string to come before the list of items.
after (str): The string to follow the list of items.
delim (str, str): The first element is added immediately following
`before`. The second element is added prior to `after`.
compact (bool): In compact mode, the enclosing parentheses are on
the same lines as the first and last list item.
sep (str): The string that follows each list item when compact is
true. If compact is false, the separator is omitted for the
last item.
skip_last_sep (bool): When compact is false, whether the last line
should have a trailing separator. Ignored when compact is true.
"""
assert len(delim) == 2 and isinstance(delim[0], six.text_type) and \
isinstance(delim[1], six.text_type), 'delim must be a tuple of two unicode strings.'
if len(items) == 0:
self.emit(before + delim[0] + delim[1] + after)
return
if len(items) == 1:
self.emit(before + delim[0] + items[0] + delim[1] + after)
return
if compact:
self.emit(before + delim[0] + items[0] + sep)
def emit_list(items):
items = items[1:]
for (i, item) in enumerate(items):
if i == len(items) - 1:
self.emit(item + delim[1] + after)
else:
self.emit(item + sep)
if before or delim[0]:
with self.indent(len(before) + len(delim[0])):
emit_list(items)
else:
emit_list(items)
else:
if before or delim[0]:
self.emit(before + delim[0])
with self.indent():
for (i, item) in enumerate(items):
if i == len(items) - 1 and skip_last_sep:
self.emit(item)
else:
self.emit(item + sep)
if delim[1] or after:
self.emit(delim[1] + after)
elif delim[1]:
self.emit(delim[1]) | def function[generate_multiline_list, parameter[self, items, before, after, delim, compact, sep, skip_last_sep]]:
constant[
Given a list of items, emits one item per line.
This is convenient for function prototypes and invocations, as well as
for instantiating arrays, sets, and maps in some languages.
TODO(kelkabany): A backend that uses tabs cannot be used with this
if compact is false.
Args:
items (list[str]): Should contain the items to generate a list of.
before (str): The string to come before the list of items.
after (str): The string to follow the list of items.
delim (str, str): The first element is added immediately following
`before`. The second element is added prior to `after`.
compact (bool): In compact mode, the enclosing parentheses are on
the same lines as the first and last list item.
sep (str): The string that follows each list item when compact is
true. If compact is false, the separator is omitted for the
last item.
skip_last_sep (bool): When compact is false, whether the last line
should have a trailing separator. Ignored when compact is true.
]
assert[<ast.BoolOp object at 0x7da18dc9a350>]
if compare[call[name[len], parameter[name[items]]] equal[==] constant[0]] begin[:]
call[name[self].emit, parameter[binary_operation[binary_operation[binary_operation[name[before] + call[name[delim]][constant[0]]] + call[name[delim]][constant[1]]] + name[after]]]]
return[None]
if compare[call[name[len], parameter[name[items]]] equal[==] constant[1]] begin[:]
call[name[self].emit, parameter[binary_operation[binary_operation[binary_operation[binary_operation[name[before] + call[name[delim]][constant[0]]] + call[name[items]][constant[0]]] + call[name[delim]][constant[1]]] + name[after]]]]
return[None]
if name[compact] begin[:]
call[name[self].emit, parameter[binary_operation[binary_operation[binary_operation[name[before] + call[name[delim]][constant[0]]] + call[name[items]][constant[0]]] + name[sep]]]]
def function[emit_list, parameter[items]]:
variable[items] assign[=] call[name[items]][<ast.Slice object at 0x7da18dc9bcd0>]
for taget[tuple[[<ast.Name object at 0x7da18dc9abf0>, <ast.Name object at 0x7da18dc9add0>]]] in starred[call[name[enumerate], parameter[name[items]]]] begin[:]
if compare[name[i] equal[==] binary_operation[call[name[len], parameter[name[items]]] - constant[1]]] begin[:]
call[name[self].emit, parameter[binary_operation[binary_operation[name[item] + call[name[delim]][constant[1]]] + name[after]]]]
if <ast.BoolOp object at 0x7da18dc98f40> begin[:]
with call[name[self].indent, parameter[binary_operation[call[name[len], parameter[name[before]]] + call[name[len], parameter[call[name[delim]][constant[0]]]]]]] begin[:]
call[name[emit_list], parameter[name[items]]] | keyword[def] identifier[generate_multiline_list] (
identifier[self] ,
identifier[items] ,
identifier[before] = literal[string] ,
identifier[after] = literal[string] ,
identifier[delim] =( literal[string] , literal[string] ),
identifier[compact] = keyword[True] ,
identifier[sep] = literal[string] ,
identifier[skip_last_sep] = keyword[False]
):
literal[string]
keyword[assert] identifier[len] ( identifier[delim] )== literal[int] keyword[and] identifier[isinstance] ( identifier[delim] [ literal[int] ], identifier[six] . identifier[text_type] ) keyword[and] identifier[isinstance] ( identifier[delim] [ literal[int] ], identifier[six] . identifier[text_type] ), literal[string]
keyword[if] identifier[len] ( identifier[items] )== literal[int] :
identifier[self] . identifier[emit] ( identifier[before] + identifier[delim] [ literal[int] ]+ identifier[delim] [ literal[int] ]+ identifier[after] )
keyword[return]
keyword[if] identifier[len] ( identifier[items] )== literal[int] :
identifier[self] . identifier[emit] ( identifier[before] + identifier[delim] [ literal[int] ]+ identifier[items] [ literal[int] ]+ identifier[delim] [ literal[int] ]+ identifier[after] )
keyword[return]
keyword[if] identifier[compact] :
identifier[self] . identifier[emit] ( identifier[before] + identifier[delim] [ literal[int] ]+ identifier[items] [ literal[int] ]+ identifier[sep] )
keyword[def] identifier[emit_list] ( identifier[items] ):
identifier[items] = identifier[items] [ literal[int] :]
keyword[for] ( identifier[i] , identifier[item] ) keyword[in] identifier[enumerate] ( identifier[items] ):
keyword[if] identifier[i] == identifier[len] ( identifier[items] )- literal[int] :
identifier[self] . identifier[emit] ( identifier[item] + identifier[delim] [ literal[int] ]+ identifier[after] )
keyword[else] :
identifier[self] . identifier[emit] ( identifier[item] + identifier[sep] )
keyword[if] identifier[before] keyword[or] identifier[delim] [ literal[int] ]:
keyword[with] identifier[self] . identifier[indent] ( identifier[len] ( identifier[before] )+ identifier[len] ( identifier[delim] [ literal[int] ])):
identifier[emit_list] ( identifier[items] )
keyword[else] :
identifier[emit_list] ( identifier[items] )
keyword[else] :
keyword[if] identifier[before] keyword[or] identifier[delim] [ literal[int] ]:
identifier[self] . identifier[emit] ( identifier[before] + identifier[delim] [ literal[int] ])
keyword[with] identifier[self] . identifier[indent] ():
keyword[for] ( identifier[i] , identifier[item] ) keyword[in] identifier[enumerate] ( identifier[items] ):
keyword[if] identifier[i] == identifier[len] ( identifier[items] )- literal[int] keyword[and] identifier[skip_last_sep] :
identifier[self] . identifier[emit] ( identifier[item] )
keyword[else] :
identifier[self] . identifier[emit] ( identifier[item] + identifier[sep] )
keyword[if] identifier[delim] [ literal[int] ] keyword[or] identifier[after] :
identifier[self] . identifier[emit] ( identifier[delim] [ literal[int] ]+ identifier[after] )
keyword[elif] identifier[delim] [ literal[int] ]:
identifier[self] . identifier[emit] ( identifier[delim] [ literal[int] ]) | def generate_multiline_list(self, items, before='', after='', delim=('(', ')'), compact=True, sep=',', skip_last_sep=False): # type: typing.List[typing.Text]
# type: typing.Text
# type: typing.Text
# type: DelimTuple
# type: bool
# type: typing.Text
# type: bool
# type: (...) -> None
'\n Given a list of items, emits one item per line.\n\n This is convenient for function prototypes and invocations, as well as\n for instantiating arrays, sets, and maps in some languages.\n\n TODO(kelkabany): A backend that uses tabs cannot be used with this\n if compact is false.\n\n Args:\n items (list[str]): Should contain the items to generate a list of.\n before (str): The string to come before the list of items.\n after (str): The string to follow the list of items.\n delim (str, str): The first element is added immediately following\n `before`. The second element is added prior to `after`.\n compact (bool): In compact mode, the enclosing parentheses are on\n the same lines as the first and last list item.\n sep (str): The string that follows each list item when compact is\n true. If compact is false, the separator is omitted for the\n last item.\n skip_last_sep (bool): When compact is false, whether the last line\n should have a trailing separator. Ignored when compact is true.\n '
assert len(delim) == 2 and isinstance(delim[0], six.text_type) and isinstance(delim[1], six.text_type), 'delim must be a tuple of two unicode strings.'
if len(items) == 0:
self.emit(before + delim[0] + delim[1] + after)
return # depends on [control=['if'], data=[]]
if len(items) == 1:
self.emit(before + delim[0] + items[0] + delim[1] + after)
return # depends on [control=['if'], data=[]]
if compact:
self.emit(before + delim[0] + items[0] + sep)
def emit_list(items):
items = items[1:]
for (i, item) in enumerate(items):
if i == len(items) - 1:
self.emit(item + delim[1] + after) # depends on [control=['if'], data=[]]
else:
self.emit(item + sep) # depends on [control=['for'], data=[]]
if before or delim[0]:
with self.indent(len(before) + len(delim[0])):
emit_list(items) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
else:
emit_list(items) # depends on [control=['if'], data=[]]
else:
if before or delim[0]:
self.emit(before + delim[0]) # depends on [control=['if'], data=[]]
with self.indent():
for (i, item) in enumerate(items):
if i == len(items) - 1 and skip_last_sep:
self.emit(item) # depends on [control=['if'], data=[]]
else:
self.emit(item + sep) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=[]]
if delim[1] or after:
self.emit(delim[1] + after) # depends on [control=['if'], data=[]]
elif delim[1]:
self.emit(delim[1]) # depends on [control=['if'], data=[]] |
def get_config(self, budget):
"""
Function to sample a new configuration
This function is called inside Hyperband to query a new configuration
Parameters:
-----------
budget: float
the budget for which this configuration is scheduled
returns: config
should return a valid configuration
"""
sample = None
info_dict = {}
# If no model is available, sample from prior
# also mix in a fraction of random configs
if len(self.kde_models.keys()) == 0 or np.random.rand() < self.random_fraction:
sample = self.configspace.sample_configuration()
info_dict['model_based_pick'] = False
if sample is None:
try:
#import pdb; pdb.set_trace()
samples = self.kde_models[budget]['good'].sample(self.num_samples)
ei = self.kde_models[budget]['good'].pdf(samples)/self.kde_models[budget]['bad'].pdf(samples)
best_idx = np.argmax(ei)
best_vector = samples[best_idx]
sample = ConfigSpace.Configuration(self.configspace, vector=best_vector)
sample = ConfigSpace.util.deactivate_inactive_hyperparameters(
configuration_space=self.configspace,
configuration=sample.get_dictionary()
)
info_dict['model_based_pick'] = True
except Exception as e:
self.logger.warning(("="*50 + "\n")*3 +\
"Error sampling a configuration!\n"+\
"Models for budgets: %s"%(self.kde_models.keys()) +\
"\n here is a traceback:" +\
traceback.format_exc())
for b,l in self.losses.items():
self.logger.debug("budget: {}\nlosses:{}".format(b,l))
sample = self.configspace.sample_configuration()
info_dict['model_based_pick'] = False
return sample.get_dictionary(), info_dict | def function[get_config, parameter[self, budget]]:
constant[
Function to sample a new configuration
This function is called inside Hyperband to query a new configuration
Parameters:
-----------
budget: float
the budget for which this configuration is scheduled
returns: config
should return a valid configuration
]
variable[sample] assign[=] constant[None]
variable[info_dict] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da1b176bf40> begin[:]
variable[sample] assign[=] call[name[self].configspace.sample_configuration, parameter[]]
call[name[info_dict]][constant[model_based_pick]] assign[=] constant[False]
if compare[name[sample] is constant[None]] begin[:]
<ast.Try object at 0x7da1b17696f0>
return[tuple[[<ast.Call object at 0x7da1b19db820>, <ast.Name object at 0x7da1b19db550>]]] | keyword[def] identifier[get_config] ( identifier[self] , identifier[budget] ):
literal[string]
identifier[sample] = keyword[None]
identifier[info_dict] ={}
keyword[if] identifier[len] ( identifier[self] . identifier[kde_models] . identifier[keys] ())== literal[int] keyword[or] identifier[np] . identifier[random] . identifier[rand] ()< identifier[self] . identifier[random_fraction] :
identifier[sample] = identifier[self] . identifier[configspace] . identifier[sample_configuration] ()
identifier[info_dict] [ literal[string] ]= keyword[False]
keyword[if] identifier[sample] keyword[is] keyword[None] :
keyword[try] :
identifier[samples] = identifier[self] . identifier[kde_models] [ identifier[budget] ][ literal[string] ]. identifier[sample] ( identifier[self] . identifier[num_samples] )
identifier[ei] = identifier[self] . identifier[kde_models] [ identifier[budget] ][ literal[string] ]. identifier[pdf] ( identifier[samples] )/ identifier[self] . identifier[kde_models] [ identifier[budget] ][ literal[string] ]. identifier[pdf] ( identifier[samples] )
identifier[best_idx] = identifier[np] . identifier[argmax] ( identifier[ei] )
identifier[best_vector] = identifier[samples] [ identifier[best_idx] ]
identifier[sample] = identifier[ConfigSpace] . identifier[Configuration] ( identifier[self] . identifier[configspace] , identifier[vector] = identifier[best_vector] )
identifier[sample] = identifier[ConfigSpace] . identifier[util] . identifier[deactivate_inactive_hyperparameters] (
identifier[configuration_space] = identifier[self] . identifier[configspace] ,
identifier[configuration] = identifier[sample] . identifier[get_dictionary] ()
)
identifier[info_dict] [ literal[string] ]= keyword[True]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[logger] . identifier[warning] (( literal[string] * literal[int] + literal[string] )* literal[int] + literal[string] + literal[string] %( identifier[self] . identifier[kde_models] . identifier[keys] ())+ literal[string] + identifier[traceback] . identifier[format_exc] ())
keyword[for] identifier[b] , identifier[l] keyword[in] identifier[self] . identifier[losses] . identifier[items] ():
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[b] , identifier[l] ))
identifier[sample] = identifier[self] . identifier[configspace] . identifier[sample_configuration] ()
identifier[info_dict] [ literal[string] ]= keyword[False]
keyword[return] identifier[sample] . identifier[get_dictionary] (), identifier[info_dict] | def get_config(self, budget):
"""
Function to sample a new configuration
This function is called inside Hyperband to query a new configuration
Parameters:
-----------
budget: float
the budget for which this configuration is scheduled
returns: config
should return a valid configuration
"""
sample = None
info_dict = {} # If no model is available, sample from prior
# also mix in a fraction of random configs
if len(self.kde_models.keys()) == 0 or np.random.rand() < self.random_fraction:
sample = self.configspace.sample_configuration()
info_dict['model_based_pick'] = False # depends on [control=['if'], data=[]]
if sample is None:
try: #import pdb; pdb.set_trace()
samples = self.kde_models[budget]['good'].sample(self.num_samples)
ei = self.kde_models[budget]['good'].pdf(samples) / self.kde_models[budget]['bad'].pdf(samples)
best_idx = np.argmax(ei)
best_vector = samples[best_idx]
sample = ConfigSpace.Configuration(self.configspace, vector=best_vector)
sample = ConfigSpace.util.deactivate_inactive_hyperparameters(configuration_space=self.configspace, configuration=sample.get_dictionary())
info_dict['model_based_pick'] = True # depends on [control=['try'], data=[]]
except Exception as e:
self.logger.warning(('=' * 50 + '\n') * 3 + 'Error sampling a configuration!\n' + 'Models for budgets: %s' % self.kde_models.keys() + '\n here is a traceback:' + traceback.format_exc())
for (b, l) in self.losses.items():
self.logger.debug('budget: {}\nlosses:{}'.format(b, l)) # depends on [control=['for'], data=[]]
sample = self.configspace.sample_configuration()
info_dict['model_based_pick'] = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['sample']]
return (sample.get_dictionary(), info_dict) |
def body(self):
'''
Returns the axis instance where the light curves will be shown
'''
res = self._body[self.bcount]()
self.bcount += 1
return res | def function[body, parameter[self]]:
constant[
Returns the axis instance where the light curves will be shown
]
variable[res] assign[=] call[call[name[self]._body][name[self].bcount], parameter[]]
<ast.AugAssign object at 0x7da1b0eac9d0>
return[name[res]] | keyword[def] identifier[body] ( identifier[self] ):
literal[string]
identifier[res] = identifier[self] . identifier[_body] [ identifier[self] . identifier[bcount] ]()
identifier[self] . identifier[bcount] += literal[int]
keyword[return] identifier[res] | def body(self):
"""
Returns the axis instance where the light curves will be shown
"""
res = self._body[self.bcount]()
self.bcount += 1
return res |
def log(x, base=None):
""" log(x, base=e)
Logarithmic function.
"""
_math = infer_math(x)
if base is None:
return _math.log(x)
elif _math == math:
return _math.log(x, base)
else:
# numpy has no option to set a base
return _math.log(x) / _math.log(base) | def function[log, parameter[x, base]]:
constant[ log(x, base=e)
Logarithmic function.
]
variable[_math] assign[=] call[name[infer_math], parameter[name[x]]]
if compare[name[base] is constant[None]] begin[:]
return[call[name[_math].log, parameter[name[x]]]] | keyword[def] identifier[log] ( identifier[x] , identifier[base] = keyword[None] ):
literal[string]
identifier[_math] = identifier[infer_math] ( identifier[x] )
keyword[if] identifier[base] keyword[is] keyword[None] :
keyword[return] identifier[_math] . identifier[log] ( identifier[x] )
keyword[elif] identifier[_math] == identifier[math] :
keyword[return] identifier[_math] . identifier[log] ( identifier[x] , identifier[base] )
keyword[else] :
keyword[return] identifier[_math] . identifier[log] ( identifier[x] )/ identifier[_math] . identifier[log] ( identifier[base] ) | def log(x, base=None):
""" log(x, base=e)
Logarithmic function.
"""
_math = infer_math(x)
if base is None:
return _math.log(x) # depends on [control=['if'], data=[]]
elif _math == math:
return _math.log(x, base) # depends on [control=['if'], data=['_math']]
else:
# numpy has no option to set a base
return _math.log(x) / _math.log(base) |
def cancel():
"""Returns a threading.Event() that will get set when SIGTERM, or
SIGINT are triggered. This can be used to cancel execution of threads.
"""
cancel = threading.Event()
def cancel_execution(signum, frame):
signame = SIGNAL_NAMES.get(signum, signum)
logger.info("Signal %s received, quitting "
"(this can take some time)...", signame)
cancel.set()
signal.signal(signal.SIGINT, cancel_execution)
signal.signal(signal.SIGTERM, cancel_execution)
return cancel | def function[cancel, parameter[]]:
constant[Returns a threading.Event() that will get set when SIGTERM, or
SIGINT are triggered. This can be used to cancel execution of threads.
]
variable[cancel] assign[=] call[name[threading].Event, parameter[]]
def function[cancel_execution, parameter[signum, frame]]:
variable[signame] assign[=] call[name[SIGNAL_NAMES].get, parameter[name[signum], name[signum]]]
call[name[logger].info, parameter[constant[Signal %s received, quitting (this can take some time)...], name[signame]]]
call[name[cancel].set, parameter[]]
call[name[signal].signal, parameter[name[signal].SIGINT, name[cancel_execution]]]
call[name[signal].signal, parameter[name[signal].SIGTERM, name[cancel_execution]]]
return[name[cancel]] | keyword[def] identifier[cancel] ():
literal[string]
identifier[cancel] = identifier[threading] . identifier[Event] ()
keyword[def] identifier[cancel_execution] ( identifier[signum] , identifier[frame] ):
identifier[signame] = identifier[SIGNAL_NAMES] . identifier[get] ( identifier[signum] , identifier[signum] )
identifier[logger] . identifier[info] ( literal[string]
literal[string] , identifier[signame] )
identifier[cancel] . identifier[set] ()
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[cancel_execution] )
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGTERM] , identifier[cancel_execution] )
keyword[return] identifier[cancel] | def cancel():
"""Returns a threading.Event() that will get set when SIGTERM, or
SIGINT are triggered. This can be used to cancel execution of threads.
"""
cancel = threading.Event()
def cancel_execution(signum, frame):
signame = SIGNAL_NAMES.get(signum, signum)
logger.info('Signal %s received, quitting (this can take some time)...', signame)
cancel.set()
signal.signal(signal.SIGINT, cancel_execution)
signal.signal(signal.SIGTERM, cancel_execution)
return cancel |
def roundrect(surface, rect, color, rounding=5, unit=PIXEL):
"""
Draw an antialiased round rectangle on the surface.
surface : destination
rect : rectangle
color : rgb or rgba
radius : 0 <= radius <= 1
:source: http://pygame.org/project-AAfilledRoundedRect-2349-.html
"""
if unit == PERCENT:
rounding = int(min(rect.size) / 2 * rounding / 100)
rect = pygame.Rect(rect)
color = pygame.Color(*color)
alpha = color.a
color.a = 0
pos = rect.topleft
rect.topleft = 0, 0
rectangle = pygame.Surface(rect.size, SRCALPHA)
circle = pygame.Surface([min(rect.size) * 3] * 2, SRCALPHA)
pygame.draw.ellipse(circle, (0, 0, 0), circle.get_rect(), 0)
circle = pygame.transform.smoothscale(circle, (rounding, rounding))
rounding = rectangle.blit(circle, (0, 0))
rounding.bottomright = rect.bottomright
rectangle.blit(circle, rounding)
rounding.topright = rect.topright
rectangle.blit(circle, rounding)
rounding.bottomleft = rect.bottomleft
rectangle.blit(circle, rounding)
rectangle.fill((0, 0, 0), rect.inflate(-rounding.w, 0))
rectangle.fill((0, 0, 0), rect.inflate(0, -rounding.h))
rectangle.fill(color, special_flags=BLEND_RGBA_MAX)
rectangle.fill((255, 255, 255, alpha), special_flags=BLEND_RGBA_MIN)
return surface.blit(rectangle, pos) | def function[roundrect, parameter[surface, rect, color, rounding, unit]]:
constant[
Draw an antialiased round rectangle on the surface.
surface : destination
rect : rectangle
color : rgb or rgba
radius : 0 <= radius <= 1
:source: http://pygame.org/project-AAfilledRoundedRect-2349-.html
]
if compare[name[unit] equal[==] name[PERCENT]] begin[:]
variable[rounding] assign[=] call[name[int], parameter[binary_operation[binary_operation[binary_operation[call[name[min], parameter[name[rect].size]] / constant[2]] * name[rounding]] / constant[100]]]]
variable[rect] assign[=] call[name[pygame].Rect, parameter[name[rect]]]
variable[color] assign[=] call[name[pygame].Color, parameter[<ast.Starred object at 0x7da20c991840>]]
variable[alpha] assign[=] name[color].a
name[color].a assign[=] constant[0]
variable[pos] assign[=] name[rect].topleft
name[rect].topleft assign[=] tuple[[<ast.Constant object at 0x7da20c992fe0>, <ast.Constant object at 0x7da20c9935b0>]]
variable[rectangle] assign[=] call[name[pygame].Surface, parameter[name[rect].size, name[SRCALPHA]]]
variable[circle] assign[=] call[name[pygame].Surface, parameter[binary_operation[list[[<ast.BinOp object at 0x7da20c992470>]] * constant[2]], name[SRCALPHA]]]
call[name[pygame].draw.ellipse, parameter[name[circle], tuple[[<ast.Constant object at 0x7da20c992560>, <ast.Constant object at 0x7da20c993d60>, <ast.Constant object at 0x7da20c991ae0>]], call[name[circle].get_rect, parameter[]], constant[0]]]
variable[circle] assign[=] call[name[pygame].transform.smoothscale, parameter[name[circle], tuple[[<ast.Name object at 0x7da20c9939a0>, <ast.Name object at 0x7da20c992410>]]]]
variable[rounding] assign[=] call[name[rectangle].blit, parameter[name[circle], tuple[[<ast.Constant object at 0x7da20c993ca0>, <ast.Constant object at 0x7da18bccb9a0>]]]]
name[rounding].bottomright assign[=] name[rect].bottomright
call[name[rectangle].blit, parameter[name[circle], name[rounding]]]
name[rounding].topright assign[=] name[rect].topright
call[name[rectangle].blit, parameter[name[circle], name[rounding]]]
name[rounding].bottomleft assign[=] name[rect].bottomleft
call[name[rectangle].blit, parameter[name[circle], name[rounding]]]
call[name[rectangle].fill, parameter[tuple[[<ast.Constant object at 0x7da18bccafb0>, <ast.Constant object at 0x7da18bccb6a0>, <ast.Constant object at 0x7da18bccb940>]], call[name[rect].inflate, parameter[<ast.UnaryOp object at 0x7da18bccb190>, constant[0]]]]]
call[name[rectangle].fill, parameter[tuple[[<ast.Constant object at 0x7da18bcc9ab0>, <ast.Constant object at 0x7da18bcc8e20>, <ast.Constant object at 0x7da18bcc9cf0>]], call[name[rect].inflate, parameter[constant[0], <ast.UnaryOp object at 0x7da18bccb130>]]]]
call[name[rectangle].fill, parameter[name[color]]]
call[name[rectangle].fill, parameter[tuple[[<ast.Constant object at 0x7da18bcc8820>, <ast.Constant object at 0x7da18bcc9600>, <ast.Constant object at 0x7da18bcc90f0>, <ast.Name object at 0x7da18bcc9480>]]]]
return[call[name[surface].blit, parameter[name[rectangle], name[pos]]]] | keyword[def] identifier[roundrect] ( identifier[surface] , identifier[rect] , identifier[color] , identifier[rounding] = literal[int] , identifier[unit] = identifier[PIXEL] ):
literal[string]
keyword[if] identifier[unit] == identifier[PERCENT] :
identifier[rounding] = identifier[int] ( identifier[min] ( identifier[rect] . identifier[size] )/ literal[int] * identifier[rounding] / literal[int] )
identifier[rect] = identifier[pygame] . identifier[Rect] ( identifier[rect] )
identifier[color] = identifier[pygame] . identifier[Color] (* identifier[color] )
identifier[alpha] = identifier[color] . identifier[a]
identifier[color] . identifier[a] = literal[int]
identifier[pos] = identifier[rect] . identifier[topleft]
identifier[rect] . identifier[topleft] = literal[int] , literal[int]
identifier[rectangle] = identifier[pygame] . identifier[Surface] ( identifier[rect] . identifier[size] , identifier[SRCALPHA] )
identifier[circle] = identifier[pygame] . identifier[Surface] ([ identifier[min] ( identifier[rect] . identifier[size] )* literal[int] ]* literal[int] , identifier[SRCALPHA] )
identifier[pygame] . identifier[draw] . identifier[ellipse] ( identifier[circle] ,( literal[int] , literal[int] , literal[int] ), identifier[circle] . identifier[get_rect] (), literal[int] )
identifier[circle] = identifier[pygame] . identifier[transform] . identifier[smoothscale] ( identifier[circle] ,( identifier[rounding] , identifier[rounding] ))
identifier[rounding] = identifier[rectangle] . identifier[blit] ( identifier[circle] ,( literal[int] , literal[int] ))
identifier[rounding] . identifier[bottomright] = identifier[rect] . identifier[bottomright]
identifier[rectangle] . identifier[blit] ( identifier[circle] , identifier[rounding] )
identifier[rounding] . identifier[topright] = identifier[rect] . identifier[topright]
identifier[rectangle] . identifier[blit] ( identifier[circle] , identifier[rounding] )
identifier[rounding] . identifier[bottomleft] = identifier[rect] . identifier[bottomleft]
identifier[rectangle] . identifier[blit] ( identifier[circle] , identifier[rounding] )
identifier[rectangle] . identifier[fill] (( literal[int] , literal[int] , literal[int] ), identifier[rect] . identifier[inflate] (- identifier[rounding] . identifier[w] , literal[int] ))
identifier[rectangle] . identifier[fill] (( literal[int] , literal[int] , literal[int] ), identifier[rect] . identifier[inflate] ( literal[int] ,- identifier[rounding] . identifier[h] ))
identifier[rectangle] . identifier[fill] ( identifier[color] , identifier[special_flags] = identifier[BLEND_RGBA_MAX] )
identifier[rectangle] . identifier[fill] (( literal[int] , literal[int] , literal[int] , identifier[alpha] ), identifier[special_flags] = identifier[BLEND_RGBA_MIN] )
keyword[return] identifier[surface] . identifier[blit] ( identifier[rectangle] , identifier[pos] ) | def roundrect(surface, rect, color, rounding=5, unit=PIXEL):
"""
Draw an antialiased round rectangle on the surface.
surface : destination
rect : rectangle
color : rgb or rgba
radius : 0 <= radius <= 1
:source: http://pygame.org/project-AAfilledRoundedRect-2349-.html
"""
if unit == PERCENT:
rounding = int(min(rect.size) / 2 * rounding / 100) # depends on [control=['if'], data=[]]
rect = pygame.Rect(rect)
color = pygame.Color(*color)
alpha = color.a
color.a = 0
pos = rect.topleft
rect.topleft = (0, 0)
rectangle = pygame.Surface(rect.size, SRCALPHA)
circle = pygame.Surface([min(rect.size) * 3] * 2, SRCALPHA)
pygame.draw.ellipse(circle, (0, 0, 0), circle.get_rect(), 0)
circle = pygame.transform.smoothscale(circle, (rounding, rounding))
rounding = rectangle.blit(circle, (0, 0))
rounding.bottomright = rect.bottomright
rectangle.blit(circle, rounding)
rounding.topright = rect.topright
rectangle.blit(circle, rounding)
rounding.bottomleft = rect.bottomleft
rectangle.blit(circle, rounding)
rectangle.fill((0, 0, 0), rect.inflate(-rounding.w, 0))
rectangle.fill((0, 0, 0), rect.inflate(0, -rounding.h))
rectangle.fill(color, special_flags=BLEND_RGBA_MAX)
rectangle.fill((255, 255, 255, alpha), special_flags=BLEND_RGBA_MIN)
return surface.blit(rectangle, pos) |
def prefix(transformers, default=None):
"""Returns a different transformer depending on the prefix at the end of the requested URL.
If none match and no default is given no transformation takes place.
should pass in a dict with the following format:
{'[prefix]': transformation_action,
...
}
"""
transformers = {prefix: auto_kwargs(transformer) if transformer else transformer
for prefix, transformer in transformers.items()}
default = default and auto_kwargs(default)
def transform(data, request=None, response=None):
path = request.path
transformer = default
for prefix_test, prefix_transformer in transformers.items():
if path.startswith(prefix_test):
transformer = prefix_transformer
break
return transformer(data) if transformer else data
return transform | def function[prefix, parameter[transformers, default]]:
constant[Returns a different transformer depending on the prefix at the end of the requested URL.
If none match and no default is given no transformation takes place.
should pass in a dict with the following format:
{'[prefix]': transformation_action,
...
}
]
variable[transformers] assign[=] <ast.DictComp object at 0x7da1b1b44f10>
variable[default] assign[=] <ast.BoolOp object at 0x7da1b1b44760>
def function[transform, parameter[data, request, response]]:
variable[path] assign[=] name[request].path
variable[transformer] assign[=] name[default]
for taget[tuple[[<ast.Name object at 0x7da18fe91ff0>, <ast.Name object at 0x7da18fe91d20>]]] in starred[call[name[transformers].items, parameter[]]] begin[:]
if call[name[path].startswith, parameter[name[prefix_test]]] begin[:]
variable[transformer] assign[=] name[prefix_transformer]
break
return[<ast.IfExp object at 0x7da1b1b45720>]
return[name[transform]] | keyword[def] identifier[prefix] ( identifier[transformers] , identifier[default] = keyword[None] ):
literal[string]
identifier[transformers] ={ identifier[prefix] : identifier[auto_kwargs] ( identifier[transformer] ) keyword[if] identifier[transformer] keyword[else] identifier[transformer]
keyword[for] identifier[prefix] , identifier[transformer] keyword[in] identifier[transformers] . identifier[items] ()}
identifier[default] = identifier[default] keyword[and] identifier[auto_kwargs] ( identifier[default] )
keyword[def] identifier[transform] ( identifier[data] , identifier[request] = keyword[None] , identifier[response] = keyword[None] ):
identifier[path] = identifier[request] . identifier[path]
identifier[transformer] = identifier[default]
keyword[for] identifier[prefix_test] , identifier[prefix_transformer] keyword[in] identifier[transformers] . identifier[items] ():
keyword[if] identifier[path] . identifier[startswith] ( identifier[prefix_test] ):
identifier[transformer] = identifier[prefix_transformer]
keyword[break]
keyword[return] identifier[transformer] ( identifier[data] ) keyword[if] identifier[transformer] keyword[else] identifier[data]
keyword[return] identifier[transform] | def prefix(transformers, default=None):
"""Returns a different transformer depending on the prefix at the end of the requested URL.
If none match and no default is given no transformation takes place.
should pass in a dict with the following format:
{'[prefix]': transformation_action,
...
}
"""
transformers = {prefix: auto_kwargs(transformer) if transformer else transformer for (prefix, transformer) in transformers.items()}
default = default and auto_kwargs(default)
def transform(data, request=None, response=None):
path = request.path
transformer = default
for (prefix_test, prefix_transformer) in transformers.items():
if path.startswith(prefix_test):
transformer = prefix_transformer
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return transformer(data) if transformer else data
return transform |
def config(self, name, suffix):
"Return config variable value, defaulting to environment"
var = '%s_%s' % (name, suffix)
var = var.upper().replace('-', '_')
if var in self._config:
return self._config[var]
return os.environ[var] | def function[config, parameter[self, name, suffix]]:
constant[Return config variable value, defaulting to environment]
variable[var] assign[=] binary_operation[constant[%s_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b198fbb0>, <ast.Name object at 0x7da1b1800160>]]]
variable[var] assign[=] call[call[name[var].upper, parameter[]].replace, parameter[constant[-], constant[_]]]
if compare[name[var] in name[self]._config] begin[:]
return[call[name[self]._config][name[var]]]
return[call[name[os].environ][name[var]]] | keyword[def] identifier[config] ( identifier[self] , identifier[name] , identifier[suffix] ):
literal[string]
identifier[var] = literal[string] %( identifier[name] , identifier[suffix] )
identifier[var] = identifier[var] . identifier[upper] (). identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[var] keyword[in] identifier[self] . identifier[_config] :
keyword[return] identifier[self] . identifier[_config] [ identifier[var] ]
keyword[return] identifier[os] . identifier[environ] [ identifier[var] ] | def config(self, name, suffix):
"""Return config variable value, defaulting to environment"""
var = '%s_%s' % (name, suffix)
var = var.upper().replace('-', '_')
if var in self._config:
return self._config[var] # depends on [control=['if'], data=['var']]
return os.environ[var] |
def cli_reload(self, event):
"""Experimental call to reload the component tree"""
self.log('Reloading all components.')
self.update_components(forcereload=True)
initialize()
from hfos.debugger import cli_compgraph
self.fireEvent(cli_compgraph()) | def function[cli_reload, parameter[self, event]]:
constant[Experimental call to reload the component tree]
call[name[self].log, parameter[constant[Reloading all components.]]]
call[name[self].update_components, parameter[]]
call[name[initialize], parameter[]]
from relative_module[hfos.debugger] import module[cli_compgraph]
call[name[self].fireEvent, parameter[call[name[cli_compgraph], parameter[]]]] | keyword[def] identifier[cli_reload] ( identifier[self] , identifier[event] ):
literal[string]
identifier[self] . identifier[log] ( literal[string] )
identifier[self] . identifier[update_components] ( identifier[forcereload] = keyword[True] )
identifier[initialize] ()
keyword[from] identifier[hfos] . identifier[debugger] keyword[import] identifier[cli_compgraph]
identifier[self] . identifier[fireEvent] ( identifier[cli_compgraph] ()) | def cli_reload(self, event):
"""Experimental call to reload the component tree"""
self.log('Reloading all components.')
self.update_components(forcereload=True)
initialize()
from hfos.debugger import cli_compgraph
self.fireEvent(cli_compgraph()) |
def items(self):
"""
A generator yielding ``(key, value)`` attribute pairs, sorted by key name.
"""
for key in sorted(self.attrs):
yield key, self.attrs[key] | def function[items, parameter[self]]:
constant[
A generator yielding ``(key, value)`` attribute pairs, sorted by key name.
]
for taget[name[key]] in starred[call[name[sorted], parameter[name[self].attrs]]] begin[:]
<ast.Yield object at 0x7da1affd6a70> | keyword[def] identifier[items] ( identifier[self] ):
literal[string]
keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[self] . identifier[attrs] ):
keyword[yield] identifier[key] , identifier[self] . identifier[attrs] [ identifier[key] ] | def items(self):
"""
A generator yielding ``(key, value)`` attribute pairs, sorted by key name.
"""
for key in sorted(self.attrs):
yield (key, self.attrs[key]) # depends on [control=['for'], data=['key']] |
def fix_local_scheme(home_dir, symlink=True):
"""
Platforms that use the "posix_local" install scheme (like Ubuntu with
Python 2.7) need to be given an additional "local" location, sigh.
"""
try:
import sysconfig
except ImportError:
pass
else:
if sysconfig._get_default_scheme() == 'posix_local':
local_path = os.path.join(home_dir, 'local')
if not os.path.exists(local_path):
os.mkdir(local_path)
for subdir_name in os.listdir(home_dir):
if subdir_name == 'local':
continue
copyfile(os.path.abspath(os.path.join(home_dir, subdir_name)), \
os.path.join(local_path, subdir_name), symlink) | def function[fix_local_scheme, parameter[home_dir, symlink]]:
constant[
Platforms that use the "posix_local" install scheme (like Ubuntu with
Python 2.7) need to be given an additional "local" location, sigh.
]
<ast.Try object at 0x7da207f017e0> | keyword[def] identifier[fix_local_scheme] ( identifier[home_dir] , identifier[symlink] = keyword[True] ):
literal[string]
keyword[try] :
keyword[import] identifier[sysconfig]
keyword[except] identifier[ImportError] :
keyword[pass]
keyword[else] :
keyword[if] identifier[sysconfig] . identifier[_get_default_scheme] ()== literal[string] :
identifier[local_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[home_dir] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[local_path] ):
identifier[os] . identifier[mkdir] ( identifier[local_path] )
keyword[for] identifier[subdir_name] keyword[in] identifier[os] . identifier[listdir] ( identifier[home_dir] ):
keyword[if] identifier[subdir_name] == literal[string] :
keyword[continue]
identifier[copyfile] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[home_dir] , identifier[subdir_name] )), identifier[os] . identifier[path] . identifier[join] ( identifier[local_path] , identifier[subdir_name] ), identifier[symlink] ) | def fix_local_scheme(home_dir, symlink=True):
"""
Platforms that use the "posix_local" install scheme (like Ubuntu with
Python 2.7) need to be given an additional "local" location, sigh.
"""
try:
import sysconfig # depends on [control=['try'], data=[]]
except ImportError:
pass # depends on [control=['except'], data=[]]
else:
if sysconfig._get_default_scheme() == 'posix_local':
local_path = os.path.join(home_dir, 'local')
if not os.path.exists(local_path):
os.mkdir(local_path)
for subdir_name in os.listdir(home_dir):
if subdir_name == 'local':
continue # depends on [control=['if'], data=[]]
copyfile(os.path.abspath(os.path.join(home_dir, subdir_name)), os.path.join(local_path, subdir_name), symlink) # depends on [control=['for'], data=['subdir_name']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def read(self, n):
"""
Read n bytes from mapped view.
"""
out = ctypes.create_string_buffer(n)
ctypes.windll.kernel32.RtlMoveMemory(out, self.view + self.pos, n)
self.pos += n
return out.raw | def function[read, parameter[self, n]]:
constant[
Read n bytes from mapped view.
]
variable[out] assign[=] call[name[ctypes].create_string_buffer, parameter[name[n]]]
call[name[ctypes].windll.kernel32.RtlMoveMemory, parameter[name[out], binary_operation[name[self].view + name[self].pos], name[n]]]
<ast.AugAssign object at 0x7da1b24ac790>
return[name[out].raw] | keyword[def] identifier[read] ( identifier[self] , identifier[n] ):
literal[string]
identifier[out] = identifier[ctypes] . identifier[create_string_buffer] ( identifier[n] )
identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[RtlMoveMemory] ( identifier[out] , identifier[self] . identifier[view] + identifier[self] . identifier[pos] , identifier[n] )
identifier[self] . identifier[pos] += identifier[n]
keyword[return] identifier[out] . identifier[raw] | def read(self, n):
"""
Read n bytes from mapped view.
"""
out = ctypes.create_string_buffer(n)
ctypes.windll.kernel32.RtlMoveMemory(out, self.view + self.pos, n)
self.pos += n
return out.raw |
def addMenuLabel(menu, text):
"""Adds a QLabel contaning text to the given menu"""
qaw = QWidgetAction(menu)
lab = QLabel(text, menu)
qaw.setDefaultWidget(lab)
lab.setAlignment(Qt.AlignCenter)
lab.setFrameShape(QFrame.StyledPanel)
lab.setFrameShadow(QFrame.Sunken)
menu.addAction(qaw)
return lab | def function[addMenuLabel, parameter[menu, text]]:
constant[Adds a QLabel contaning text to the given menu]
variable[qaw] assign[=] call[name[QWidgetAction], parameter[name[menu]]]
variable[lab] assign[=] call[name[QLabel], parameter[name[text], name[menu]]]
call[name[qaw].setDefaultWidget, parameter[name[lab]]]
call[name[lab].setAlignment, parameter[name[Qt].AlignCenter]]
call[name[lab].setFrameShape, parameter[name[QFrame].StyledPanel]]
call[name[lab].setFrameShadow, parameter[name[QFrame].Sunken]]
call[name[menu].addAction, parameter[name[qaw]]]
return[name[lab]] | keyword[def] identifier[addMenuLabel] ( identifier[menu] , identifier[text] ):
literal[string]
identifier[qaw] = identifier[QWidgetAction] ( identifier[menu] )
identifier[lab] = identifier[QLabel] ( identifier[text] , identifier[menu] )
identifier[qaw] . identifier[setDefaultWidget] ( identifier[lab] )
identifier[lab] . identifier[setAlignment] ( identifier[Qt] . identifier[AlignCenter] )
identifier[lab] . identifier[setFrameShape] ( identifier[QFrame] . identifier[StyledPanel] )
identifier[lab] . identifier[setFrameShadow] ( identifier[QFrame] . identifier[Sunken] )
identifier[menu] . identifier[addAction] ( identifier[qaw] )
keyword[return] identifier[lab] | def addMenuLabel(menu, text):
"""Adds a QLabel contaning text to the given menu"""
qaw = QWidgetAction(menu)
lab = QLabel(text, menu)
qaw.setDefaultWidget(lab)
lab.setAlignment(Qt.AlignCenter)
lab.setFrameShape(QFrame.StyledPanel)
lab.setFrameShadow(QFrame.Sunken)
menu.addAction(qaw)
return lab |
def refresh(self, force: bool = False) -> bool:
"""
Loads the cauldron.json definition file for the project and populates
the project with the loaded data. Any existing data will be overwritten,
if the new definition file differs from the previous one.
If the project has already loaded with the most recent version of the
cauldron.json file, this method will return without making any changes
to the project.
:param force:
If true the project will be refreshed even if the project file
modified timestamp doesn't indicate that it needs to be refreshed.
:return:
Whether or not a refresh was needed and carried out
"""
lm = self.last_modified
is_newer = lm is not None and lm >= os.path.getmtime(self.source_path)
if not force and is_newer:
return False
old_definition = self.settings.fetch(None)
new_definition = definitions.load_project_definition(
self.source_directory
)
if not force and old_definition == new_definition:
return False
self.settings.clear().put(**new_definition)
old_step_definitions = old_definition.get('steps', [])
new_step_definitions = new_definition.get('steps', [])
if not force and old_step_definitions == new_step_definitions:
return True
old_steps = self.steps
self.steps = []
for step_data in new_step_definitions:
matches = [s for s in old_step_definitions if s == step_data]
if len(matches) > 0:
index = old_step_definitions.index(matches[0])
self.steps.append(old_steps[index])
else:
self.add_step(step_data)
self.last_modified = time.time()
return True | def function[refresh, parameter[self, force]]:
constant[
Loads the cauldron.json definition file for the project and populates
the project with the loaded data. Any existing data will be overwritten,
if the new definition file differs from the previous one.
If the project has already loaded with the most recent version of the
cauldron.json file, this method will return without making any changes
to the project.
:param force:
If true the project will be refreshed even if the project file
modified timestamp doesn't indicate that it needs to be refreshed.
:return:
Whether or not a refresh was needed and carried out
]
variable[lm] assign[=] name[self].last_modified
variable[is_newer] assign[=] <ast.BoolOp object at 0x7da18c4ce6b0>
if <ast.BoolOp object at 0x7da18c4cffa0> begin[:]
return[constant[False]]
variable[old_definition] assign[=] call[name[self].settings.fetch, parameter[constant[None]]]
variable[new_definition] assign[=] call[name[definitions].load_project_definition, parameter[name[self].source_directory]]
if <ast.BoolOp object at 0x7da18c4cc940> begin[:]
return[constant[False]]
call[call[name[self].settings.clear, parameter[]].put, parameter[]]
variable[old_step_definitions] assign[=] call[name[old_definition].get, parameter[constant[steps], list[[]]]]
variable[new_step_definitions] assign[=] call[name[new_definition].get, parameter[constant[steps], list[[]]]]
if <ast.BoolOp object at 0x7da18c4ce9b0> begin[:]
return[constant[True]]
variable[old_steps] assign[=] name[self].steps
name[self].steps assign[=] list[[]]
for taget[name[step_data]] in starred[name[new_step_definitions]] begin[:]
variable[matches] assign[=] <ast.ListComp object at 0x7da1b1b6b3d0>
if compare[call[name[len], parameter[name[matches]]] greater[>] constant[0]] begin[:]
variable[index] assign[=] call[name[old_step_definitions].index, parameter[call[name[matches]][constant[0]]]]
call[name[self].steps.append, parameter[call[name[old_steps]][name[index]]]]
name[self].last_modified assign[=] call[name[time].time, parameter[]]
return[constant[True]] | keyword[def] identifier[refresh] ( identifier[self] , identifier[force] : identifier[bool] = keyword[False] )-> identifier[bool] :
literal[string]
identifier[lm] = identifier[self] . identifier[last_modified]
identifier[is_newer] = identifier[lm] keyword[is] keyword[not] keyword[None] keyword[and] identifier[lm] >= identifier[os] . identifier[path] . identifier[getmtime] ( identifier[self] . identifier[source_path] )
keyword[if] keyword[not] identifier[force] keyword[and] identifier[is_newer] :
keyword[return] keyword[False]
identifier[old_definition] = identifier[self] . identifier[settings] . identifier[fetch] ( keyword[None] )
identifier[new_definition] = identifier[definitions] . identifier[load_project_definition] (
identifier[self] . identifier[source_directory]
)
keyword[if] keyword[not] identifier[force] keyword[and] identifier[old_definition] == identifier[new_definition] :
keyword[return] keyword[False]
identifier[self] . identifier[settings] . identifier[clear] (). identifier[put] (** identifier[new_definition] )
identifier[old_step_definitions] = identifier[old_definition] . identifier[get] ( literal[string] ,[])
identifier[new_step_definitions] = identifier[new_definition] . identifier[get] ( literal[string] ,[])
keyword[if] keyword[not] identifier[force] keyword[and] identifier[old_step_definitions] == identifier[new_step_definitions] :
keyword[return] keyword[True]
identifier[old_steps] = identifier[self] . identifier[steps]
identifier[self] . identifier[steps] =[]
keyword[for] identifier[step_data] keyword[in] identifier[new_step_definitions] :
identifier[matches] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[old_step_definitions] keyword[if] identifier[s] == identifier[step_data] ]
keyword[if] identifier[len] ( identifier[matches] )> literal[int] :
identifier[index] = identifier[old_step_definitions] . identifier[index] ( identifier[matches] [ literal[int] ])
identifier[self] . identifier[steps] . identifier[append] ( identifier[old_steps] [ identifier[index] ])
keyword[else] :
identifier[self] . identifier[add_step] ( identifier[step_data] )
identifier[self] . identifier[last_modified] = identifier[time] . identifier[time] ()
keyword[return] keyword[True] | def refresh(self, force: bool=False) -> bool:
"""
Loads the cauldron.json definition file for the project and populates
the project with the loaded data. Any existing data will be overwritten,
if the new definition file differs from the previous one.
If the project has already loaded with the most recent version of the
cauldron.json file, this method will return without making any changes
to the project.
:param force:
If true the project will be refreshed even if the project file
modified timestamp doesn't indicate that it needs to be refreshed.
:return:
Whether or not a refresh was needed and carried out
"""
lm = self.last_modified
is_newer = lm is not None and lm >= os.path.getmtime(self.source_path)
if not force and is_newer:
return False # depends on [control=['if'], data=[]]
old_definition = self.settings.fetch(None)
new_definition = definitions.load_project_definition(self.source_directory)
if not force and old_definition == new_definition:
return False # depends on [control=['if'], data=[]]
self.settings.clear().put(**new_definition)
old_step_definitions = old_definition.get('steps', [])
new_step_definitions = new_definition.get('steps', [])
if not force and old_step_definitions == new_step_definitions:
return True # depends on [control=['if'], data=[]]
old_steps = self.steps
self.steps = []
for step_data in new_step_definitions:
matches = [s for s in old_step_definitions if s == step_data]
if len(matches) > 0:
index = old_step_definitions.index(matches[0])
self.steps.append(old_steps[index]) # depends on [control=['if'], data=[]]
else:
self.add_step(step_data) # depends on [control=['for'], data=['step_data']]
self.last_modified = time.time()
return True |
def _read_embeddings_from_hdf5(embeddings_filename: str,
embedding_dim: int,
vocab: Vocabulary,
namespace: str = "tokens") -> torch.FloatTensor:
"""
Reads from a hdf5 formatted file. The embedding matrix is assumed to
be keyed by 'embedding' and of size ``(num_tokens, embedding_dim)``.
"""
with h5py.File(embeddings_filename, 'r') as fin:
embeddings = fin['embedding'][...]
if list(embeddings.shape) != [vocab.get_vocab_size(namespace), embedding_dim]:
raise ConfigurationError(
"Read shape {0} embeddings from the file, but expected {1}".format(
list(embeddings.shape), [vocab.get_vocab_size(namespace), embedding_dim]))
return torch.FloatTensor(embeddings) | def function[_read_embeddings_from_hdf5, parameter[embeddings_filename, embedding_dim, vocab, namespace]]:
constant[
Reads from a hdf5 formatted file. The embedding matrix is assumed to
be keyed by 'embedding' and of size ``(num_tokens, embedding_dim)``.
]
with call[name[h5py].File, parameter[name[embeddings_filename], constant[r]]] begin[:]
variable[embeddings] assign[=] call[call[name[fin]][constant[embedding]]][constant[Ellipsis]]
if compare[call[name[list], parameter[name[embeddings].shape]] not_equal[!=] list[[<ast.Call object at 0x7da18fe92410>, <ast.Name object at 0x7da20c993a90>]]] begin[:]
<ast.Raise object at 0x7da20c990970>
return[call[name[torch].FloatTensor, parameter[name[embeddings]]]] | keyword[def] identifier[_read_embeddings_from_hdf5] ( identifier[embeddings_filename] : identifier[str] ,
identifier[embedding_dim] : identifier[int] ,
identifier[vocab] : identifier[Vocabulary] ,
identifier[namespace] : identifier[str] = literal[string] )-> identifier[torch] . identifier[FloatTensor] :
literal[string]
keyword[with] identifier[h5py] . identifier[File] ( identifier[embeddings_filename] , literal[string] ) keyword[as] identifier[fin] :
identifier[embeddings] = identifier[fin] [ literal[string] ][...]
keyword[if] identifier[list] ( identifier[embeddings] . identifier[shape] )!=[ identifier[vocab] . identifier[get_vocab_size] ( identifier[namespace] ), identifier[embedding_dim] ]:
keyword[raise] identifier[ConfigurationError] (
literal[string] . identifier[format] (
identifier[list] ( identifier[embeddings] . identifier[shape] ),[ identifier[vocab] . identifier[get_vocab_size] ( identifier[namespace] ), identifier[embedding_dim] ]))
keyword[return] identifier[torch] . identifier[FloatTensor] ( identifier[embeddings] ) | def _read_embeddings_from_hdf5(embeddings_filename: str, embedding_dim: int, vocab: Vocabulary, namespace: str='tokens') -> torch.FloatTensor:
"""
Reads from a hdf5 formatted file. The embedding matrix is assumed to
be keyed by 'embedding' and of size ``(num_tokens, embedding_dim)``.
"""
with h5py.File(embeddings_filename, 'r') as fin:
embeddings = fin['embedding'][...] # depends on [control=['with'], data=['fin']]
if list(embeddings.shape) != [vocab.get_vocab_size(namespace), embedding_dim]:
raise ConfigurationError('Read shape {0} embeddings from the file, but expected {1}'.format(list(embeddings.shape), [vocab.get_vocab_size(namespace), embedding_dim])) # depends on [control=['if'], data=[]]
return torch.FloatTensor(embeddings) |
def google_register(username:str, email:str, full_name:str, google_id:int, bio:str, token:str=None):
"""
Register a new user from google.
This can raise `exc.IntegrityError` exceptions in
case of conflics found.
:returns: User
"""
auth_data_model = apps.get_model("users", "AuthData")
user_model = apps.get_model("users", "User")
try:
# Google user association exist?
auth_data = auth_data_model.objects.get(key="google", value=google_id)
user = auth_data.user
except auth_data_model.DoesNotExist:
try:
# Is a user with the same email as the google user?
user = user_model.objects.get(email=email)
auth_data_model.objects.create(user=user, key="google", value=google_id, extra={})
except user_model.DoesNotExist:
# Create a new user
username_unique = slugify_uniquely(username, user_model, slugfield="username")
user = user_model.objects.create(email=email,
username=username_unique,
full_name=full_name,
bio=bio)
auth_data_model.objects.create(user=user, key="google", value=google_id, extra={})
send_register_email(user)
user_registered_signal.send(sender=user.__class__, user=user)
if token:
membership = get_membership_by_token(token)
membership.user = user
membership.save(update_fields=["user"])
return user | def function[google_register, parameter[username, email, full_name, google_id, bio, token]]:
constant[
Register a new user from google.
This can raise `exc.IntegrityError` exceptions in
case of conflics found.
:returns: User
]
variable[auth_data_model] assign[=] call[name[apps].get_model, parameter[constant[users], constant[AuthData]]]
variable[user_model] assign[=] call[name[apps].get_model, parameter[constant[users], constant[User]]]
<ast.Try object at 0x7da18f722710>
if name[token] begin[:]
variable[membership] assign[=] call[name[get_membership_by_token], parameter[name[token]]]
name[membership].user assign[=] name[user]
call[name[membership].save, parameter[]]
return[name[user]] | keyword[def] identifier[google_register] ( identifier[username] : identifier[str] , identifier[email] : identifier[str] , identifier[full_name] : identifier[str] , identifier[google_id] : identifier[int] , identifier[bio] : identifier[str] , identifier[token] : identifier[str] = keyword[None] ):
literal[string]
identifier[auth_data_model] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] )
identifier[user_model] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] )
keyword[try] :
identifier[auth_data] = identifier[auth_data_model] . identifier[objects] . identifier[get] ( identifier[key] = literal[string] , identifier[value] = identifier[google_id] )
identifier[user] = identifier[auth_data] . identifier[user]
keyword[except] identifier[auth_data_model] . identifier[DoesNotExist] :
keyword[try] :
identifier[user] = identifier[user_model] . identifier[objects] . identifier[get] ( identifier[email] = identifier[email] )
identifier[auth_data_model] . identifier[objects] . identifier[create] ( identifier[user] = identifier[user] , identifier[key] = literal[string] , identifier[value] = identifier[google_id] , identifier[extra] ={})
keyword[except] identifier[user_model] . identifier[DoesNotExist] :
identifier[username_unique] = identifier[slugify_uniquely] ( identifier[username] , identifier[user_model] , identifier[slugfield] = literal[string] )
identifier[user] = identifier[user_model] . identifier[objects] . identifier[create] ( identifier[email] = identifier[email] ,
identifier[username] = identifier[username_unique] ,
identifier[full_name] = identifier[full_name] ,
identifier[bio] = identifier[bio] )
identifier[auth_data_model] . identifier[objects] . identifier[create] ( identifier[user] = identifier[user] , identifier[key] = literal[string] , identifier[value] = identifier[google_id] , identifier[extra] ={})
identifier[send_register_email] ( identifier[user] )
identifier[user_registered_signal] . identifier[send] ( identifier[sender] = identifier[user] . identifier[__class__] , identifier[user] = identifier[user] )
keyword[if] identifier[token] :
identifier[membership] = identifier[get_membership_by_token] ( identifier[token] )
identifier[membership] . identifier[user] = identifier[user]
identifier[membership] . identifier[save] ( identifier[update_fields] =[ literal[string] ])
keyword[return] identifier[user] | def google_register(username: str, email: str, full_name: str, google_id: int, bio: str, token: str=None):
"""
Register a new user from google.
This can raise `exc.IntegrityError` exceptions in
case of conflics found.
:returns: User
"""
auth_data_model = apps.get_model('users', 'AuthData')
user_model = apps.get_model('users', 'User')
try:
# Google user association exist?
auth_data = auth_data_model.objects.get(key='google', value=google_id)
user = auth_data.user # depends on [control=['try'], data=[]]
except auth_data_model.DoesNotExist:
try:
# Is a user with the same email as the google user?
user = user_model.objects.get(email=email)
auth_data_model.objects.create(user=user, key='google', value=google_id, extra={}) # depends on [control=['try'], data=[]]
except user_model.DoesNotExist:
# Create a new user
username_unique = slugify_uniquely(username, user_model, slugfield='username')
user = user_model.objects.create(email=email, username=username_unique, full_name=full_name, bio=bio)
auth_data_model.objects.create(user=user, key='google', value=google_id, extra={})
send_register_email(user)
user_registered_signal.send(sender=user.__class__, user=user) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
if token:
membership = get_membership_by_token(token)
membership.user = user
membership.save(update_fields=['user']) # depends on [control=['if'], data=[]]
return user |
def mc_sample_path(P, init=0, sample_size=1000, random_state=None):
"""
Generates one sample path from the Markov chain represented by
(n x n) transition matrix P on state space S = {{0,...,n-1}}.
Parameters
----------
P : array_like(float, ndim=2)
A Markov transition matrix.
init : array_like(float ndim=1) or scalar(int), optional(default=0)
If init is an array_like, then it is treated as the initial
distribution across states. If init is a scalar, then it
treated as the deterministic initial state.
sample_size : scalar(int), optional(default=1000)
The length of the sample path.
random_state : int or np.random.RandomState, optional
Random seed (integer) or np.random.RandomState instance to set
the initial state of the random number generator for
reproducibility. If None, a randomly initialized RandomState is
used.
Returns
-------
X : array_like(int, ndim=1)
The simulation of states.
"""
random_state = check_random_state(random_state)
if isinstance(init, numbers.Integral):
X_0 = init
else:
cdf0 = np.cumsum(init)
u_0 = random_state.random_sample()
X_0 = searchsorted(cdf0, u_0)
mc = MarkovChain(P)
return mc.simulate(ts_length=sample_size, init=X_0,
random_state=random_state) | def function[mc_sample_path, parameter[P, init, sample_size, random_state]]:
constant[
Generates one sample path from the Markov chain represented by
(n x n) transition matrix P on state space S = {{0,...,n-1}}.
Parameters
----------
P : array_like(float, ndim=2)
A Markov transition matrix.
init : array_like(float ndim=1) or scalar(int), optional(default=0)
If init is an array_like, then it is treated as the initial
distribution across states. If init is a scalar, then it
treated as the deterministic initial state.
sample_size : scalar(int), optional(default=1000)
The length of the sample path.
random_state : int or np.random.RandomState, optional
Random seed (integer) or np.random.RandomState instance to set
the initial state of the random number generator for
reproducibility. If None, a randomly initialized RandomState is
used.
Returns
-------
X : array_like(int, ndim=1)
The simulation of states.
]
variable[random_state] assign[=] call[name[check_random_state], parameter[name[random_state]]]
if call[name[isinstance], parameter[name[init], name[numbers].Integral]] begin[:]
variable[X_0] assign[=] name[init]
variable[mc] assign[=] call[name[MarkovChain], parameter[name[P]]]
return[call[name[mc].simulate, parameter[]]] | keyword[def] identifier[mc_sample_path] ( identifier[P] , identifier[init] = literal[int] , identifier[sample_size] = literal[int] , identifier[random_state] = keyword[None] ):
literal[string]
identifier[random_state] = identifier[check_random_state] ( identifier[random_state] )
keyword[if] identifier[isinstance] ( identifier[init] , identifier[numbers] . identifier[Integral] ):
identifier[X_0] = identifier[init]
keyword[else] :
identifier[cdf0] = identifier[np] . identifier[cumsum] ( identifier[init] )
identifier[u_0] = identifier[random_state] . identifier[random_sample] ()
identifier[X_0] = identifier[searchsorted] ( identifier[cdf0] , identifier[u_0] )
identifier[mc] = identifier[MarkovChain] ( identifier[P] )
keyword[return] identifier[mc] . identifier[simulate] ( identifier[ts_length] = identifier[sample_size] , identifier[init] = identifier[X_0] ,
identifier[random_state] = identifier[random_state] ) | def mc_sample_path(P, init=0, sample_size=1000, random_state=None):
"""
Generates one sample path from the Markov chain represented by
(n x n) transition matrix P on state space S = {{0,...,n-1}}.
Parameters
----------
P : array_like(float, ndim=2)
A Markov transition matrix.
init : array_like(float ndim=1) or scalar(int), optional(default=0)
If init is an array_like, then it is treated as the initial
distribution across states. If init is a scalar, then it
treated as the deterministic initial state.
sample_size : scalar(int), optional(default=1000)
The length of the sample path.
random_state : int or np.random.RandomState, optional
Random seed (integer) or np.random.RandomState instance to set
the initial state of the random number generator for
reproducibility. If None, a randomly initialized RandomState is
used.
Returns
-------
X : array_like(int, ndim=1)
The simulation of states.
"""
random_state = check_random_state(random_state)
if isinstance(init, numbers.Integral):
X_0 = init # depends on [control=['if'], data=[]]
else:
cdf0 = np.cumsum(init)
u_0 = random_state.random_sample()
X_0 = searchsorted(cdf0, u_0)
mc = MarkovChain(P)
return mc.simulate(ts_length=sample_size, init=X_0, random_state=random_state) |
def ping(self, params=None):
"""
Returns True if the cluster is up, False otherwise.
`<http://www.elastic.co/guide/>`_
"""
try:
return self.transport.perform_request("HEAD", "/", params=params)
except TransportError:
return False | def function[ping, parameter[self, params]]:
constant[
Returns True if the cluster is up, False otherwise.
`<http://www.elastic.co/guide/>`_
]
<ast.Try object at 0x7da1b2199c30> | keyword[def] identifier[ping] ( identifier[self] , identifier[params] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[transport] . identifier[perform_request] ( literal[string] , literal[string] , identifier[params] = identifier[params] )
keyword[except] identifier[TransportError] :
keyword[return] keyword[False] | def ping(self, params=None):
"""
Returns True if the cluster is up, False otherwise.
`<http://www.elastic.co/guide/>`_
"""
try:
return self.transport.perform_request('HEAD', '/', params=params) # depends on [control=['try'], data=[]]
except TransportError:
return False # depends on [control=['except'], data=[]] |
def _loc_to_file_path(self, path, environ=None):
"""Convert resource path to a unicode absolute file path.
Optional environ argument may be useful e.g. in relation to per-user
sub-folder chrooting inside root_folder_path.
"""
root_path = self.root_folder_path
assert root_path is not None
assert compat.is_native(root_path)
assert compat.is_native(path)
path_parts = path.strip("/").split("/")
file_path = os.path.abspath(os.path.join(root_path, *path_parts))
if not file_path.startswith(root_path):
raise RuntimeError(
"Security exception: tried to access file outside root: {}".format(
file_path
)
)
# Convert to unicode
file_path = util.to_unicode_safe(file_path)
return file_path | def function[_loc_to_file_path, parameter[self, path, environ]]:
constant[Convert resource path to a unicode absolute file path.
Optional environ argument may be useful e.g. in relation to per-user
sub-folder chrooting inside root_folder_path.
]
variable[root_path] assign[=] name[self].root_folder_path
assert[compare[name[root_path] is_not constant[None]]]
assert[call[name[compat].is_native, parameter[name[root_path]]]]
assert[call[name[compat].is_native, parameter[name[path]]]]
variable[path_parts] assign[=] call[call[name[path].strip, parameter[constant[/]]].split, parameter[constant[/]]]
variable[file_path] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[name[root_path], <ast.Starred object at 0x7da1b0003dc0>]]]]
if <ast.UnaryOp object at 0x7da1b0003a60> begin[:]
<ast.Raise object at 0x7da1b0003280>
variable[file_path] assign[=] call[name[util].to_unicode_safe, parameter[name[file_path]]]
return[name[file_path]] | keyword[def] identifier[_loc_to_file_path] ( identifier[self] , identifier[path] , identifier[environ] = keyword[None] ):
literal[string]
identifier[root_path] = identifier[self] . identifier[root_folder_path]
keyword[assert] identifier[root_path] keyword[is] keyword[not] keyword[None]
keyword[assert] identifier[compat] . identifier[is_native] ( identifier[root_path] )
keyword[assert] identifier[compat] . identifier[is_native] ( identifier[path] )
identifier[path_parts] = identifier[path] . identifier[strip] ( literal[string] ). identifier[split] ( literal[string] )
identifier[file_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root_path] ,* identifier[path_parts] ))
keyword[if] keyword[not] identifier[file_path] . identifier[startswith] ( identifier[root_path] ):
keyword[raise] identifier[RuntimeError] (
literal[string] . identifier[format] (
identifier[file_path]
)
)
identifier[file_path] = identifier[util] . identifier[to_unicode_safe] ( identifier[file_path] )
keyword[return] identifier[file_path] | def _loc_to_file_path(self, path, environ=None):
"""Convert resource path to a unicode absolute file path.
Optional environ argument may be useful e.g. in relation to per-user
sub-folder chrooting inside root_folder_path.
"""
root_path = self.root_folder_path
assert root_path is not None
assert compat.is_native(root_path)
assert compat.is_native(path)
path_parts = path.strip('/').split('/')
file_path = os.path.abspath(os.path.join(root_path, *path_parts))
if not file_path.startswith(root_path):
raise RuntimeError('Security exception: tried to access file outside root: {}'.format(file_path)) # depends on [control=['if'], data=[]]
# Convert to unicode
file_path = util.to_unicode_safe(file_path)
return file_path |
def do_finalize():
"""Runs finalize phase; run after all builds are complete and all modules
have been stopped.
"""
def _finalize(shutit):
# Stop all the modules
shutit.stop_all()
# Finalize in reverse order
shutit.log('PHASE: finalizing object ' + str(shutit), level=logging.DEBUG)
# Login at least once to get the exports.
for module_id in shutit.module_ids(rev=True):
# Only finalize if it's thought to be installed.
if shutit.is_installed(shutit.shutit_map[module_id]):
shutit.login(prompt_prefix=module_id,command=shutit_global.shutit_global_object.bash_startup_command,echo=False)
if not shutit.shutit_map[module_id].finalize(shutit):
shutit.fail(module_id + ' failed on finalize', shutit_pexpect_child=shutit.get_shutit_pexpect_session_from_id('target_child').pexpect_child) # pragma: no cover
shutit.logout(echo=False)
for fshutit in shutit_global.shutit_global_object.shutit_objects:
_finalize(fshutit) | def function[do_finalize, parameter[]]:
constant[Runs finalize phase; run after all builds are complete and all modules
have been stopped.
]
def function[_finalize, parameter[shutit]]:
call[name[shutit].stop_all, parameter[]]
call[name[shutit].log, parameter[binary_operation[constant[PHASE: finalizing object ] + call[name[str], parameter[name[shutit]]]]]]
for taget[name[module_id]] in starred[call[name[shutit].module_ids, parameter[]]] begin[:]
if call[name[shutit].is_installed, parameter[call[name[shutit].shutit_map][name[module_id]]]] begin[:]
call[name[shutit].login, parameter[]]
if <ast.UnaryOp object at 0x7da18f810d00> begin[:]
call[name[shutit].fail, parameter[binary_operation[name[module_id] + constant[ failed on finalize]]]]
call[name[shutit].logout, parameter[]]
for taget[name[fshutit]] in starred[name[shutit_global].shutit_global_object.shutit_objects] begin[:]
call[name[_finalize], parameter[name[fshutit]]] | keyword[def] identifier[do_finalize] ():
literal[string]
keyword[def] identifier[_finalize] ( identifier[shutit] ):
identifier[shutit] . identifier[stop_all] ()
identifier[shutit] . identifier[log] ( literal[string] + identifier[str] ( identifier[shutit] ), identifier[level] = identifier[logging] . identifier[DEBUG] )
keyword[for] identifier[module_id] keyword[in] identifier[shutit] . identifier[module_ids] ( identifier[rev] = keyword[True] ):
keyword[if] identifier[shutit] . identifier[is_installed] ( identifier[shutit] . identifier[shutit_map] [ identifier[module_id] ]):
identifier[shutit] . identifier[login] ( identifier[prompt_prefix] = identifier[module_id] , identifier[command] = identifier[shutit_global] . identifier[shutit_global_object] . identifier[bash_startup_command] , identifier[echo] = keyword[False] )
keyword[if] keyword[not] identifier[shutit] . identifier[shutit_map] [ identifier[module_id] ]. identifier[finalize] ( identifier[shutit] ):
identifier[shutit] . identifier[fail] ( identifier[module_id] + literal[string] , identifier[shutit_pexpect_child] = identifier[shutit] . identifier[get_shutit_pexpect_session_from_id] ( literal[string] ). identifier[pexpect_child] )
identifier[shutit] . identifier[logout] ( identifier[echo] = keyword[False] )
keyword[for] identifier[fshutit] keyword[in] identifier[shutit_global] . identifier[shutit_global_object] . identifier[shutit_objects] :
identifier[_finalize] ( identifier[fshutit] ) | def do_finalize():
"""Runs finalize phase; run after all builds are complete and all modules
have been stopped.
"""
def _finalize(shutit): # Stop all the modules
shutit.stop_all() # Finalize in reverse order
shutit.log('PHASE: finalizing object ' + str(shutit), level=logging.DEBUG) # Login at least once to get the exports.
for module_id in shutit.module_ids(rev=True): # Only finalize if it's thought to be installed.
if shutit.is_installed(shutit.shutit_map[module_id]):
shutit.login(prompt_prefix=module_id, command=shutit_global.shutit_global_object.bash_startup_command, echo=False)
if not shutit.shutit_map[module_id].finalize(shutit):
shutit.fail(module_id + ' failed on finalize', shutit_pexpect_child=shutit.get_shutit_pexpect_session_from_id('target_child').pexpect_child) # pragma: no cover # depends on [control=['if'], data=[]]
shutit.logout(echo=False) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['module_id']]
for fshutit in shutit_global.shutit_global_object.shutit_objects:
_finalize(fshutit) # depends on [control=['for'], data=['fshutit']] |
async def removeHook(self, *args, **kwargs):
"""
Delete a hook
This endpoint will remove a hook definition.
This method is ``stable``
"""
return await self._makeApiCall(self.funcinfo["removeHook"], *args, **kwargs) | <ast.AsyncFunctionDef object at 0x7da204347850> | keyword[async] keyword[def] identifier[removeHook] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] keyword[await] identifier[self] . identifier[_makeApiCall] ( identifier[self] . identifier[funcinfo] [ literal[string] ],* identifier[args] ,** identifier[kwargs] ) | async def removeHook(self, *args, **kwargs):
"""
Delete a hook
This endpoint will remove a hook definition.
This method is ``stable``
"""
return await self._makeApiCall(self.funcinfo['removeHook'], *args, **kwargs) |
def _cleanup_temporary_files(self):
# type: (Downloader) -> None
"""Cleanup temporary files in case of an exception or interrupt.
This function is not thread-safe.
:param Downloader self: this
"""
# iterate through dd map and cleanup files
for key in self._dd_map:
dd = self._dd_map[key]
try:
dd.cleanup_all_temporary_files()
except Exception as e:
logger.exception(e) | def function[_cleanup_temporary_files, parameter[self]]:
constant[Cleanup temporary files in case of an exception or interrupt.
This function is not thread-safe.
:param Downloader self: this
]
for taget[name[key]] in starred[name[self]._dd_map] begin[:]
variable[dd] assign[=] call[name[self]._dd_map][name[key]]
<ast.Try object at 0x7da20c76e740> | keyword[def] identifier[_cleanup_temporary_files] ( identifier[self] ):
literal[string]
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_dd_map] :
identifier[dd] = identifier[self] . identifier[_dd_map] [ identifier[key] ]
keyword[try] :
identifier[dd] . identifier[cleanup_all_temporary_files] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[exception] ( identifier[e] ) | def _cleanup_temporary_files(self):
# type: (Downloader) -> None
'Cleanup temporary files in case of an exception or interrupt.\n This function is not thread-safe.\n :param Downloader self: this\n '
# iterate through dd map and cleanup files
for key in self._dd_map:
dd = self._dd_map[key]
try:
dd.cleanup_all_temporary_files() # depends on [control=['try'], data=[]]
except Exception as e:
logger.exception(e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['key']] |
def _domain_event_migration_iteration_cb(conn, domain, iteration, opaque):
'''
Domain migration iteration events handler
'''
_salt_send_domain_event(opaque, conn, domain, opaque['event'], {
'iteration': iteration
}) | def function[_domain_event_migration_iteration_cb, parameter[conn, domain, iteration, opaque]]:
constant[
Domain migration iteration events handler
]
call[name[_salt_send_domain_event], parameter[name[opaque], name[conn], name[domain], call[name[opaque]][constant[event]], dictionary[[<ast.Constant object at 0x7da18c4cc6a0>], [<ast.Name object at 0x7da18c4cc340>]]]] | keyword[def] identifier[_domain_event_migration_iteration_cb] ( identifier[conn] , identifier[domain] , identifier[iteration] , identifier[opaque] ):
literal[string]
identifier[_salt_send_domain_event] ( identifier[opaque] , identifier[conn] , identifier[domain] , identifier[opaque] [ literal[string] ],{
literal[string] : identifier[iteration]
}) | def _domain_event_migration_iteration_cb(conn, domain, iteration, opaque):
"""
Domain migration iteration events handler
"""
_salt_send_domain_event(opaque, conn, domain, opaque['event'], {'iteration': iteration}) |
def add(self, template, resource, name=None):
"""Add a route to a resource.
The optional `name` assigns a name to this route that can be used when
building URLs. The name must be unique within this Mapper object.
"""
# Special case for standalone handler functions
if hasattr(resource, '_rhino_meta'):
route = Route(
template, Resource(resource), name=name, ranges=self.ranges)
else:
route = Route(
template, resource, name=name, ranges=self.ranges)
obj_id = id(resource)
if obj_id not in self._lookup:
# It's ok to have multiple routes for the same object id, the
# lookup will return the first one.
self._lookup[obj_id] = route
if name is not None:
if name in self.named_routes:
raise InvalidArgumentError("A route named '%s' already exists in this %s object."
% (name, self.__class__.__name__))
self.named_routes[name] = route
self.routes.append(route) | def function[add, parameter[self, template, resource, name]]:
constant[Add a route to a resource.
The optional `name` assigns a name to this route that can be used when
building URLs. The name must be unique within this Mapper object.
]
if call[name[hasattr], parameter[name[resource], constant[_rhino_meta]]] begin[:]
variable[route] assign[=] call[name[Route], parameter[name[template], call[name[Resource], parameter[name[resource]]]]]
variable[obj_id] assign[=] call[name[id], parameter[name[resource]]]
if compare[name[obj_id] <ast.NotIn object at 0x7da2590d7190> name[self]._lookup] begin[:]
call[name[self]._lookup][name[obj_id]] assign[=] name[route]
if compare[name[name] is_not constant[None]] begin[:]
if compare[name[name] in name[self].named_routes] begin[:]
<ast.Raise object at 0x7da20c76c5e0>
call[name[self].named_routes][name[name]] assign[=] name[route]
call[name[self].routes.append, parameter[name[route]]] | keyword[def] identifier[add] ( identifier[self] , identifier[template] , identifier[resource] , identifier[name] = keyword[None] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[resource] , literal[string] ):
identifier[route] = identifier[Route] (
identifier[template] , identifier[Resource] ( identifier[resource] ), identifier[name] = identifier[name] , identifier[ranges] = identifier[self] . identifier[ranges] )
keyword[else] :
identifier[route] = identifier[Route] (
identifier[template] , identifier[resource] , identifier[name] = identifier[name] , identifier[ranges] = identifier[self] . identifier[ranges] )
identifier[obj_id] = identifier[id] ( identifier[resource] )
keyword[if] identifier[obj_id] keyword[not] keyword[in] identifier[self] . identifier[_lookup] :
identifier[self] . identifier[_lookup] [ identifier[obj_id] ]= identifier[route]
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[named_routes] :
keyword[raise] identifier[InvalidArgumentError] ( literal[string]
%( identifier[name] , identifier[self] . identifier[__class__] . identifier[__name__] ))
identifier[self] . identifier[named_routes] [ identifier[name] ]= identifier[route]
identifier[self] . identifier[routes] . identifier[append] ( identifier[route] ) | def add(self, template, resource, name=None):
"""Add a route to a resource.
The optional `name` assigns a name to this route that can be used when
building URLs. The name must be unique within this Mapper object.
"""
# Special case for standalone handler functions
if hasattr(resource, '_rhino_meta'):
route = Route(template, Resource(resource), name=name, ranges=self.ranges) # depends on [control=['if'], data=[]]
else:
route = Route(template, resource, name=name, ranges=self.ranges)
obj_id = id(resource)
if obj_id not in self._lookup:
# It's ok to have multiple routes for the same object id, the
# lookup will return the first one.
self._lookup[obj_id] = route # depends on [control=['if'], data=['obj_id']]
if name is not None:
if name in self.named_routes:
raise InvalidArgumentError("A route named '%s' already exists in this %s object." % (name, self.__class__.__name__)) # depends on [control=['if'], data=['name']]
self.named_routes[name] = route # depends on [control=['if'], data=['name']]
self.routes.append(route) |
def send_messages(self, email_messages):
"""Sends one or more EmailMessage objects and returns the number of
email messages sent.
"""
if not email_messages:
return
num_sent = 0
for message in email_messages:
if self._send(message):
num_sent += 1
return num_sent | def function[send_messages, parameter[self, email_messages]]:
constant[Sends one or more EmailMessage objects and returns the number of
email messages sent.
]
if <ast.UnaryOp object at 0x7da1b1a2f880> begin[:]
return[None]
variable[num_sent] assign[=] constant[0]
for taget[name[message]] in starred[name[email_messages]] begin[:]
if call[name[self]._send, parameter[name[message]]] begin[:]
<ast.AugAssign object at 0x7da1b1a2cd90>
return[name[num_sent]] | keyword[def] identifier[send_messages] ( identifier[self] , identifier[email_messages] ):
literal[string]
keyword[if] keyword[not] identifier[email_messages] :
keyword[return]
identifier[num_sent] = literal[int]
keyword[for] identifier[message] keyword[in] identifier[email_messages] :
keyword[if] identifier[self] . identifier[_send] ( identifier[message] ):
identifier[num_sent] += literal[int]
keyword[return] identifier[num_sent] | def send_messages(self, email_messages):
"""Sends one or more EmailMessage objects and returns the number of
email messages sent.
"""
if not email_messages:
return # depends on [control=['if'], data=[]]
num_sent = 0
for message in email_messages:
if self._send(message):
num_sent += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['message']]
return num_sent |
def is_uploaded_container(self, msg_info):
"""
returns 0 if it doesn't correspond to an uploaded container
-1 if it corresponds to an uploaded container but it is corrupted
1 if it corresponds to an uploaded container and is OK
"""
results = {
'BAD': -1,
'NOT_FCB': 0,
'OK': 1
}
for part in msg_info.msg_body.walk():
if part.is_multipart():
continue
"""
if part.get('Content-Disposition') is None:
print("no content dispo")
continue
"""
if part.get_content_type() == 'text/plain':
if self._is_content_from_fcb(part.get_payload()):
self._log.debug("Body detected as FCB: %s", part.get_payload())
else:
self._log.debug("Body doesn't match FCB: %s", part.get_payload())
continue
attachment_name = self._get_attachment_name(part)
if not attachment_name:
self._log.debug("Couldn't get attachment name. Will ignore the part.")
continue
files_container = self._get_files_container_by_name(attachment_name)
if files_container:
sha1_in_db = files_container.sha1
msg_info.files_containers_id = files_container.id
tmp_file = FileInfo(os.path.join(tempfile.gettempdir(), "downloaded.tmp"))
fp = open(tmp_file.path, 'wb')
fp.write(part.get_payload(decode=1))
fp.flush()
fp.close()
if tmp_file.sha1 == sha1_in_db:
self._log.info("File container '%s' verified!", attachment_name)
result = results['OK']
else:
self._log.error("File container '%s' doesn't match the sha1 sum. Expected '%s' but got '%s'",
attachment_name, sha1_in_db, tmp_file.sha1)
result = results['BAD']
os.remove(tmp_file.path)
return result
else:
self._log.debug("Attached file '%s' not found in DB. Will ignore this mail.", attachment_name)
return results['NOT_FCB'] | def function[is_uploaded_container, parameter[self, msg_info]]:
constant[
returns 0 if it doesn't correspond to an uploaded container
-1 if it corresponds to an uploaded container but it is corrupted
1 if it corresponds to an uploaded container and is OK
]
variable[results] assign[=] dictionary[[<ast.Constant object at 0x7da1b26a5c90>, <ast.Constant object at 0x7da1b26a7d90>, <ast.Constant object at 0x7da1b26a62f0>], [<ast.UnaryOp object at 0x7da1b26a40d0>, <ast.Constant object at 0x7da1b26a6b00>, <ast.Constant object at 0x7da1b26a4910>]]
for taget[name[part]] in starred[call[name[msg_info].msg_body.walk, parameter[]]] begin[:]
if call[name[part].is_multipart, parameter[]] begin[:]
continue
constant[
if part.get('Content-Disposition') is None:
print("no content dispo")
continue
]
if compare[call[name[part].get_content_type, parameter[]] equal[==] constant[text/plain]] begin[:]
if call[name[self]._is_content_from_fcb, parameter[call[name[part].get_payload, parameter[]]]] begin[:]
call[name[self]._log.debug, parameter[constant[Body detected as FCB: %s], call[name[part].get_payload, parameter[]]]]
variable[attachment_name] assign[=] call[name[self]._get_attachment_name, parameter[name[part]]]
if <ast.UnaryOp object at 0x7da1b26a7250> begin[:]
call[name[self]._log.debug, parameter[constant[Couldn't get attachment name. Will ignore the part.]]]
continue
variable[files_container] assign[=] call[name[self]._get_files_container_by_name, parameter[name[attachment_name]]]
if name[files_container] begin[:]
variable[sha1_in_db] assign[=] name[files_container].sha1
name[msg_info].files_containers_id assign[=] name[files_container].id
variable[tmp_file] assign[=] call[name[FileInfo], parameter[call[name[os].path.join, parameter[call[name[tempfile].gettempdir, parameter[]], constant[downloaded.tmp]]]]]
variable[fp] assign[=] call[name[open], parameter[name[tmp_file].path, constant[wb]]]
call[name[fp].write, parameter[call[name[part].get_payload, parameter[]]]]
call[name[fp].flush, parameter[]]
call[name[fp].close, parameter[]]
if compare[name[tmp_file].sha1 equal[==] name[sha1_in_db]] begin[:]
call[name[self]._log.info, parameter[constant[File container '%s' verified!], name[attachment_name]]]
variable[result] assign[=] call[name[results]][constant[OK]]
call[name[os].remove, parameter[name[tmp_file].path]]
return[name[result]]
return[call[name[results]][constant[NOT_FCB]]] | keyword[def] identifier[is_uploaded_container] ( identifier[self] , identifier[msg_info] ):
literal[string]
identifier[results] ={
literal[string] :- literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int]
}
keyword[for] identifier[part] keyword[in] identifier[msg_info] . identifier[msg_body] . identifier[walk] ():
keyword[if] identifier[part] . identifier[is_multipart] ():
keyword[continue]
literal[string]
keyword[if] identifier[part] . identifier[get_content_type] ()== literal[string] :
keyword[if] identifier[self] . identifier[_is_content_from_fcb] ( identifier[part] . identifier[get_payload] ()):
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] , identifier[part] . identifier[get_payload] ())
keyword[else] :
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] , identifier[part] . identifier[get_payload] ())
keyword[continue]
identifier[attachment_name] = identifier[self] . identifier[_get_attachment_name] ( identifier[part] )
keyword[if] keyword[not] identifier[attachment_name] :
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
keyword[continue]
identifier[files_container] = identifier[self] . identifier[_get_files_container_by_name] ( identifier[attachment_name] )
keyword[if] identifier[files_container] :
identifier[sha1_in_db] = identifier[files_container] . identifier[sha1]
identifier[msg_info] . identifier[files_containers_id] = identifier[files_container] . identifier[id]
identifier[tmp_file] = identifier[FileInfo] ( identifier[os] . identifier[path] . identifier[join] ( identifier[tempfile] . identifier[gettempdir] (), literal[string] ))
identifier[fp] = identifier[open] ( identifier[tmp_file] . identifier[path] , literal[string] )
identifier[fp] . identifier[write] ( identifier[part] . identifier[get_payload] ( identifier[decode] = literal[int] ))
identifier[fp] . identifier[flush] ()
identifier[fp] . identifier[close] ()
keyword[if] identifier[tmp_file] . identifier[sha1] == identifier[sha1_in_db] :
identifier[self] . identifier[_log] . identifier[info] ( literal[string] , identifier[attachment_name] )
identifier[result] = identifier[results] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[_log] . identifier[error] ( literal[string] ,
identifier[attachment_name] , identifier[sha1_in_db] , identifier[tmp_file] . identifier[sha1] )
identifier[result] = identifier[results] [ literal[string] ]
identifier[os] . identifier[remove] ( identifier[tmp_file] . identifier[path] )
keyword[return] identifier[result]
keyword[else] :
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] , identifier[attachment_name] )
keyword[return] identifier[results] [ literal[string] ] | def is_uploaded_container(self, msg_info):
"""
returns 0 if it doesn't correspond to an uploaded container
-1 if it corresponds to an uploaded container but it is corrupted
1 if it corresponds to an uploaded container and is OK
"""
results = {'BAD': -1, 'NOT_FCB': 0, 'OK': 1}
for part in msg_info.msg_body.walk():
if part.is_multipart():
continue # depends on [control=['if'], data=[]]
'\n if part.get(\'Content-Disposition\') is None:\n print("no content dispo")\n continue\n '
if part.get_content_type() == 'text/plain':
if self._is_content_from_fcb(part.get_payload()):
self._log.debug('Body detected as FCB: %s', part.get_payload()) # depends on [control=['if'], data=[]]
else:
self._log.debug("Body doesn't match FCB: %s", part.get_payload())
continue # depends on [control=['if'], data=[]]
attachment_name = self._get_attachment_name(part)
if not attachment_name:
self._log.debug("Couldn't get attachment name. Will ignore the part.")
continue # depends on [control=['if'], data=[]]
files_container = self._get_files_container_by_name(attachment_name)
if files_container:
sha1_in_db = files_container.sha1
msg_info.files_containers_id = files_container.id
tmp_file = FileInfo(os.path.join(tempfile.gettempdir(), 'downloaded.tmp'))
fp = open(tmp_file.path, 'wb')
fp.write(part.get_payload(decode=1))
fp.flush()
fp.close()
if tmp_file.sha1 == sha1_in_db:
self._log.info("File container '%s' verified!", attachment_name)
result = results['OK'] # depends on [control=['if'], data=[]]
else:
self._log.error("File container '%s' doesn't match the sha1 sum. Expected '%s' but got '%s'", attachment_name, sha1_in_db, tmp_file.sha1)
result = results['BAD']
os.remove(tmp_file.path)
return result # depends on [control=['if'], data=[]]
else:
self._log.debug("Attached file '%s' not found in DB. Will ignore this mail.", attachment_name) # depends on [control=['for'], data=['part']]
return results['NOT_FCB'] |
def cleanup(self, output):
"""
Generates consistent OpenWRT/LEDE UCI output
"""
# correct indentation
output = output.replace(' ', '')\
.replace('\noption', '\n\toption')\
.replace('\nlist', '\n\tlist')
# convert True to 1 and False to 0
output = output.replace('True', '1')\
.replace('False', '0')
# max 2 consecutive \n delimiters
output = output.replace('\n\n\n', '\n\n')
# if output is present
# ensure it always ends with 1 new line
if output.endswith('\n\n'):
return output[0:-1]
return output | def function[cleanup, parameter[self, output]]:
constant[
Generates consistent OpenWRT/LEDE UCI output
]
variable[output] assign[=] call[call[call[name[output].replace, parameter[constant[ ], constant[]]].replace, parameter[constant[
option], constant[
option]]].replace, parameter[constant[
list], constant[
list]]]
variable[output] assign[=] call[call[name[output].replace, parameter[constant[True], constant[1]]].replace, parameter[constant[False], constant[0]]]
variable[output] assign[=] call[name[output].replace, parameter[constant[
], constant[
]]]
if call[name[output].endswith, parameter[constant[
]]] begin[:]
return[call[name[output]][<ast.Slice object at 0x7da1b013c9d0>]]
return[name[output]] | keyword[def] identifier[cleanup] ( identifier[self] , identifier[output] ):
literal[string]
identifier[output] = identifier[output] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[output] = identifier[output] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[output] = identifier[output] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[output] . identifier[endswith] ( literal[string] ):
keyword[return] identifier[output] [ literal[int] :- literal[int] ]
keyword[return] identifier[output] | def cleanup(self, output):
"""
Generates consistent OpenWRT/LEDE UCI output
"""
# correct indentation
output = output.replace(' ', '').replace('\noption', '\n\toption').replace('\nlist', '\n\tlist')
# convert True to 1 and False to 0
output = output.replace('True', '1').replace('False', '0')
# max 2 consecutive \n delimiters
output = output.replace('\n\n\n', '\n\n')
# if output is present
# ensure it always ends with 1 new line
if output.endswith('\n\n'):
return output[0:-1] # depends on [control=['if'], data=[]]
return output |
def fuseoptref(cls):
"""
Find out which options are recognized by the library.
Result is a `FuseArgs` instance with the list of supported
options, suitable for passing on to the `filter` method of
another `FuseArgs` instance.
"""
import os, re
pr, pw = os.pipe()
pid = os.fork()
if pid == 0:
os.dup2(pw, 2)
os.close(pr)
fh = cls()
fh.fuse_args = FuseArgs()
fh.fuse_args.setmod('showhelp')
fh.main()
sys.exit()
os.close(pw)
fa = FuseArgs()
ore = re.compile("-o\s+([\w\[\]]+(?:=\w+)?)")
fpr = os.fdopen(pr)
for l in fpr:
m = ore.search(l)
if m:
o = m.groups()[0]
oa = [o]
# try to catch two-in-one options (like "[no]foo")
opa = o.split("[")
if len(opa) == 2:
o1, ox = opa
oxpa = ox.split("]")
if len(oxpa) == 2:
oo, o2 = oxpa
oa = [o1 + o2, o1 + oo + o2]
for o in oa:
fa.add(o)
fpr.close()
return fa | def function[fuseoptref, parameter[cls]]:
constant[
Find out which options are recognized by the library.
Result is a `FuseArgs` instance with the list of supported
options, suitable for passing on to the `filter` method of
another `FuseArgs` instance.
]
import module[os], module[re]
<ast.Tuple object at 0x7da20c6e6ad0> assign[=] call[name[os].pipe, parameter[]]
variable[pid] assign[=] call[name[os].fork, parameter[]]
if compare[name[pid] equal[==] constant[0]] begin[:]
call[name[os].dup2, parameter[name[pw], constant[2]]]
call[name[os].close, parameter[name[pr]]]
variable[fh] assign[=] call[name[cls], parameter[]]
name[fh].fuse_args assign[=] call[name[FuseArgs], parameter[]]
call[name[fh].fuse_args.setmod, parameter[constant[showhelp]]]
call[name[fh].main, parameter[]]
call[name[sys].exit, parameter[]]
call[name[os].close, parameter[name[pw]]]
variable[fa] assign[=] call[name[FuseArgs], parameter[]]
variable[ore] assign[=] call[name[re].compile, parameter[constant[-o\s+([\w\[\]]+(?:=\w+)?)]]]
variable[fpr] assign[=] call[name[os].fdopen, parameter[name[pr]]]
for taget[name[l]] in starred[name[fpr]] begin[:]
variable[m] assign[=] call[name[ore].search, parameter[name[l]]]
if name[m] begin[:]
variable[o] assign[=] call[call[name[m].groups, parameter[]]][constant[0]]
variable[oa] assign[=] list[[<ast.Name object at 0x7da18ede75b0>]]
variable[opa] assign[=] call[name[o].split, parameter[constant[[]]]
if compare[call[name[len], parameter[name[opa]]] equal[==] constant[2]] begin[:]
<ast.Tuple object at 0x7da1b2347910> assign[=] name[opa]
variable[oxpa] assign[=] call[name[ox].split, parameter[constant[]]]]
if compare[call[name[len], parameter[name[oxpa]]] equal[==] constant[2]] begin[:]
<ast.Tuple object at 0x7da1b2345750> assign[=] name[oxpa]
variable[oa] assign[=] list[[<ast.BinOp object at 0x7da1b2347160>, <ast.BinOp object at 0x7da1b2347c70>]]
for taget[name[o]] in starred[name[oa]] begin[:]
call[name[fa].add, parameter[name[o]]]
call[name[fpr].close, parameter[]]
return[name[fa]] | keyword[def] identifier[fuseoptref] ( identifier[cls] ):
literal[string]
keyword[import] identifier[os] , identifier[re]
identifier[pr] , identifier[pw] = identifier[os] . identifier[pipe] ()
identifier[pid] = identifier[os] . identifier[fork] ()
keyword[if] identifier[pid] == literal[int] :
identifier[os] . identifier[dup2] ( identifier[pw] , literal[int] )
identifier[os] . identifier[close] ( identifier[pr] )
identifier[fh] = identifier[cls] ()
identifier[fh] . identifier[fuse_args] = identifier[FuseArgs] ()
identifier[fh] . identifier[fuse_args] . identifier[setmod] ( literal[string] )
identifier[fh] . identifier[main] ()
identifier[sys] . identifier[exit] ()
identifier[os] . identifier[close] ( identifier[pw] )
identifier[fa] = identifier[FuseArgs] ()
identifier[ore] = identifier[re] . identifier[compile] ( literal[string] )
identifier[fpr] = identifier[os] . identifier[fdopen] ( identifier[pr] )
keyword[for] identifier[l] keyword[in] identifier[fpr] :
identifier[m] = identifier[ore] . identifier[search] ( identifier[l] )
keyword[if] identifier[m] :
identifier[o] = identifier[m] . identifier[groups] ()[ literal[int] ]
identifier[oa] =[ identifier[o] ]
identifier[opa] = identifier[o] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[opa] )== literal[int] :
identifier[o1] , identifier[ox] = identifier[opa]
identifier[oxpa] = identifier[ox] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[oxpa] )== literal[int] :
identifier[oo] , identifier[o2] = identifier[oxpa]
identifier[oa] =[ identifier[o1] + identifier[o2] , identifier[o1] + identifier[oo] + identifier[o2] ]
keyword[for] identifier[o] keyword[in] identifier[oa] :
identifier[fa] . identifier[add] ( identifier[o] )
identifier[fpr] . identifier[close] ()
keyword[return] identifier[fa] | def fuseoptref(cls):
"""
Find out which options are recognized by the library.
Result is a `FuseArgs` instance with the list of supported
options, suitable for passing on to the `filter` method of
another `FuseArgs` instance.
"""
import os, re
(pr, pw) = os.pipe()
pid = os.fork()
if pid == 0:
os.dup2(pw, 2)
os.close(pr)
fh = cls()
fh.fuse_args = FuseArgs()
fh.fuse_args.setmod('showhelp')
fh.main()
sys.exit() # depends on [control=['if'], data=[]]
os.close(pw)
fa = FuseArgs()
ore = re.compile('-o\\s+([\\w\\[\\]]+(?:=\\w+)?)')
fpr = os.fdopen(pr)
for l in fpr:
m = ore.search(l)
if m:
o = m.groups()[0]
oa = [o]
# try to catch two-in-one options (like "[no]foo")
opa = o.split('[')
if len(opa) == 2:
(o1, ox) = opa
oxpa = ox.split(']')
if len(oxpa) == 2:
(oo, o2) = oxpa
oa = [o1 + o2, o1 + oo + o2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
for o in oa:
fa.add(o) # depends on [control=['for'], data=['o']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['l']]
fpr.close()
return fa |
def AddTrainingOperators(model, softmax, label):
"""Adds training operators to the model."""
xent = model.LabelCrossEntropy([softmax, label], 'xent')
# compute the expected loss
loss = model.AveragedLoss(xent, "loss")
# track the accuracy of the model
AddAccuracy(model, softmax, label)
# use the average loss we just computed to add gradient operators to the
# model
model.AddGradientOperators([loss])
# do a simple stochastic gradient descent
ITER = brew.iter(model, "iter")
# set the learning rate schedule
LR = model.LearningRate(
ITER, "LR", base_lr=-0.1, policy="step", stepsize=1, gamma=0.999)
# ONE is a constant value that is used in the gradient update. We only need
# to create it once, so it is explicitly placed in param_init_net.
ONE = model.param_init_net.ConstantFill([], "ONE", shape=[1], value=1.0)
# Now, for each parameter, we do the gradient updates.
for param in model.params:
# Note how we get the gradient of each parameter - ModelHelper keeps
# track of that.
param_grad = model.param_to_grad[param]
# The update is a simple weighted sum: param = param + param_grad * LR
model.WeightedSum([param, ONE, param_grad, LR], param) | def function[AddTrainingOperators, parameter[model, softmax, label]]:
constant[Adds training operators to the model.]
variable[xent] assign[=] call[name[model].LabelCrossEntropy, parameter[list[[<ast.Name object at 0x7da1b1f185b0>, <ast.Name object at 0x7da1b1f1be50>]], constant[xent]]]
variable[loss] assign[=] call[name[model].AveragedLoss, parameter[name[xent], constant[loss]]]
call[name[AddAccuracy], parameter[name[model], name[softmax], name[label]]]
call[name[model].AddGradientOperators, parameter[list[[<ast.Name object at 0x7da1b1f18b20>]]]]
variable[ITER] assign[=] call[name[brew].iter, parameter[name[model], constant[iter]]]
variable[LR] assign[=] call[name[model].LearningRate, parameter[name[ITER], constant[LR]]]
variable[ONE] assign[=] call[name[model].param_init_net.ConstantFill, parameter[list[[]], constant[ONE]]]
for taget[name[param]] in starred[name[model].params] begin[:]
variable[param_grad] assign[=] call[name[model].param_to_grad][name[param]]
call[name[model].WeightedSum, parameter[list[[<ast.Name object at 0x7da1b1f184c0>, <ast.Name object at 0x7da1b1f1a830>, <ast.Name object at 0x7da1b1f192a0>, <ast.Name object at 0x7da1b1f1bb50>]], name[param]]] | keyword[def] identifier[AddTrainingOperators] ( identifier[model] , identifier[softmax] , identifier[label] ):
literal[string]
identifier[xent] = identifier[model] . identifier[LabelCrossEntropy] ([ identifier[softmax] , identifier[label] ], literal[string] )
identifier[loss] = identifier[model] . identifier[AveragedLoss] ( identifier[xent] , literal[string] )
identifier[AddAccuracy] ( identifier[model] , identifier[softmax] , identifier[label] )
identifier[model] . identifier[AddGradientOperators] ([ identifier[loss] ])
identifier[ITER] = identifier[brew] . identifier[iter] ( identifier[model] , literal[string] )
identifier[LR] = identifier[model] . identifier[LearningRate] (
identifier[ITER] , literal[string] , identifier[base_lr] =- literal[int] , identifier[policy] = literal[string] , identifier[stepsize] = literal[int] , identifier[gamma] = literal[int] )
identifier[ONE] = identifier[model] . identifier[param_init_net] . identifier[ConstantFill] ([], literal[string] , identifier[shape] =[ literal[int] ], identifier[value] = literal[int] )
keyword[for] identifier[param] keyword[in] identifier[model] . identifier[params] :
identifier[param_grad] = identifier[model] . identifier[param_to_grad] [ identifier[param] ]
identifier[model] . identifier[WeightedSum] ([ identifier[param] , identifier[ONE] , identifier[param_grad] , identifier[LR] ], identifier[param] ) | def AddTrainingOperators(model, softmax, label):
"""Adds training operators to the model."""
xent = model.LabelCrossEntropy([softmax, label], 'xent')
# compute the expected loss
loss = model.AveragedLoss(xent, 'loss')
# track the accuracy of the model
AddAccuracy(model, softmax, label)
# use the average loss we just computed to add gradient operators to the
# model
model.AddGradientOperators([loss])
# do a simple stochastic gradient descent
ITER = brew.iter(model, 'iter')
# set the learning rate schedule
LR = model.LearningRate(ITER, 'LR', base_lr=-0.1, policy='step', stepsize=1, gamma=0.999)
# ONE is a constant value that is used in the gradient update. We only need
# to create it once, so it is explicitly placed in param_init_net.
ONE = model.param_init_net.ConstantFill([], 'ONE', shape=[1], value=1.0)
# Now, for each parameter, we do the gradient updates.
for param in model.params:
# Note how we get the gradient of each parameter - ModelHelper keeps
# track of that.
param_grad = model.param_to_grad[param]
# The update is a simple weighted sum: param = param + param_grad * LR
model.WeightedSum([param, ONE, param_grad, LR], param) # depends on [control=['for'], data=['param']] |
def archive(self, repo_slug=None, format='zip', prefix=''):
""" Get one of your repositories and compress it as an archive.
Return the path of the archive.
format parameter is curently not supported.
"""
prefix = '%s'.lstrip('/') % prefix
self._get_files_in_dir(repo_slug=repo_slug, dir='/')
if self.bitbucket.repo_tree:
with NamedTemporaryFile(delete=False) as archive:
with ZipFile(archive, 'w') as zip_archive:
for name, file in self.bitbucket.repo_tree.items():
with NamedTemporaryFile(delete=False) as temp_file:
temp_file.write(file.encode('utf-8'))
zip_archive.write(temp_file.name, prefix + name)
return (True, archive.name)
return (False, 'Could not archive your project.') | def function[archive, parameter[self, repo_slug, format, prefix]]:
constant[ Get one of your repositories and compress it as an archive.
Return the path of the archive.
format parameter is curently not supported.
]
variable[prefix] assign[=] binary_operation[call[constant[%s].lstrip, parameter[constant[/]]] <ast.Mod object at 0x7da2590d6920> name[prefix]]
call[name[self]._get_files_in_dir, parameter[]]
if name[self].bitbucket.repo_tree begin[:]
with call[name[NamedTemporaryFile], parameter[]] begin[:]
with call[name[ZipFile], parameter[name[archive], constant[w]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1a780a0>, <ast.Name object at 0x7da1b1a79120>]]] in starred[call[name[self].bitbucket.repo_tree.items, parameter[]]] begin[:]
with call[name[NamedTemporaryFile], parameter[]] begin[:]
call[name[temp_file].write, parameter[call[name[file].encode, parameter[constant[utf-8]]]]]
call[name[zip_archive].write, parameter[name[temp_file].name, binary_operation[name[prefix] + name[name]]]]
return[tuple[[<ast.Constant object at 0x7da1b1a7bca0>, <ast.Attribute object at 0x7da1b1a7a710>]]]
return[tuple[[<ast.Constant object at 0x7da1b1a7beb0>, <ast.Constant object at 0x7da1b1a78e20>]]] | keyword[def] identifier[archive] ( identifier[self] , identifier[repo_slug] = keyword[None] , identifier[format] = literal[string] , identifier[prefix] = literal[string] ):
literal[string]
identifier[prefix] = literal[string] . identifier[lstrip] ( literal[string] )% identifier[prefix]
identifier[self] . identifier[_get_files_in_dir] ( identifier[repo_slug] = identifier[repo_slug] , identifier[dir] = literal[string] )
keyword[if] identifier[self] . identifier[bitbucket] . identifier[repo_tree] :
keyword[with] identifier[NamedTemporaryFile] ( identifier[delete] = keyword[False] ) keyword[as] identifier[archive] :
keyword[with] identifier[ZipFile] ( identifier[archive] , literal[string] ) keyword[as] identifier[zip_archive] :
keyword[for] identifier[name] , identifier[file] keyword[in] identifier[self] . identifier[bitbucket] . identifier[repo_tree] . identifier[items] ():
keyword[with] identifier[NamedTemporaryFile] ( identifier[delete] = keyword[False] ) keyword[as] identifier[temp_file] :
identifier[temp_file] . identifier[write] ( identifier[file] . identifier[encode] ( literal[string] ))
identifier[zip_archive] . identifier[write] ( identifier[temp_file] . identifier[name] , identifier[prefix] + identifier[name] )
keyword[return] ( keyword[True] , identifier[archive] . identifier[name] )
keyword[return] ( keyword[False] , literal[string] ) | def archive(self, repo_slug=None, format='zip', prefix=''):
""" Get one of your repositories and compress it as an archive.
Return the path of the archive.
format parameter is curently not supported.
"""
prefix = '%s'.lstrip('/') % prefix
self._get_files_in_dir(repo_slug=repo_slug, dir='/')
if self.bitbucket.repo_tree:
with NamedTemporaryFile(delete=False) as archive:
with ZipFile(archive, 'w') as zip_archive:
for (name, file) in self.bitbucket.repo_tree.items():
with NamedTemporaryFile(delete=False) as temp_file:
temp_file.write(file.encode('utf-8')) # depends on [control=['with'], data=['temp_file']]
zip_archive.write(temp_file.name, prefix + name) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['zip_archive']] # depends on [control=['with'], data=['NamedTemporaryFile', 'archive']]
return (True, archive.name) # depends on [control=['if'], data=[]]
return (False, 'Could not archive your project.') |
def process_config(config, config_data):
""" Populates config with data from the configuration data dict. It handles
components, data, log, management and session sections from the
configuration data.
:param config: The config reference of the object that will hold the
configuration data from the config_data.
:param config_data: The configuration data loaded from a configuration
file.
"""
if 'components' in config_data:
process_components_config_section(config, config_data['components'])
if 'data' in config_data:
process_data_config_section(config, config_data['data'])
if 'log' in config_data:
process_log_config_section(config, config_data['log'])
if 'management' in config_data:
process_management_config_section(config, config_data['management'])
if 'session' in config_data:
process_session_config_section(config, config_data['session']) | def function[process_config, parameter[config, config_data]]:
constant[ Populates config with data from the configuration data dict. It handles
components, data, log, management and session sections from the
configuration data.
:param config: The config reference of the object that will hold the
configuration data from the config_data.
:param config_data: The configuration data loaded from a configuration
file.
]
if compare[constant[components] in name[config_data]] begin[:]
call[name[process_components_config_section], parameter[name[config], call[name[config_data]][constant[components]]]]
if compare[constant[data] in name[config_data]] begin[:]
call[name[process_data_config_section], parameter[name[config], call[name[config_data]][constant[data]]]]
if compare[constant[log] in name[config_data]] begin[:]
call[name[process_log_config_section], parameter[name[config], call[name[config_data]][constant[log]]]]
if compare[constant[management] in name[config_data]] begin[:]
call[name[process_management_config_section], parameter[name[config], call[name[config_data]][constant[management]]]]
if compare[constant[session] in name[config_data]] begin[:]
call[name[process_session_config_section], parameter[name[config], call[name[config_data]][constant[session]]]] | keyword[def] identifier[process_config] ( identifier[config] , identifier[config_data] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[config_data] :
identifier[process_components_config_section] ( identifier[config] , identifier[config_data] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[config_data] :
identifier[process_data_config_section] ( identifier[config] , identifier[config_data] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[config_data] :
identifier[process_log_config_section] ( identifier[config] , identifier[config_data] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[config_data] :
identifier[process_management_config_section] ( identifier[config] , identifier[config_data] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[config_data] :
identifier[process_session_config_section] ( identifier[config] , identifier[config_data] [ literal[string] ]) | def process_config(config, config_data):
""" Populates config with data from the configuration data dict. It handles
components, data, log, management and session sections from the
configuration data.
:param config: The config reference of the object that will hold the
configuration data from the config_data.
:param config_data: The configuration data loaded from a configuration
file.
"""
if 'components' in config_data:
process_components_config_section(config, config_data['components']) # depends on [control=['if'], data=['config_data']]
if 'data' in config_data:
process_data_config_section(config, config_data['data']) # depends on [control=['if'], data=['config_data']]
if 'log' in config_data:
process_log_config_section(config, config_data['log']) # depends on [control=['if'], data=['config_data']]
if 'management' in config_data:
process_management_config_section(config, config_data['management']) # depends on [control=['if'], data=['config_data']]
if 'session' in config_data:
process_session_config_section(config, config_data['session']) # depends on [control=['if'], data=['config_data']] |
def dropout(a, p=0.5, inplace=False):
"""Randomly set elements from `a` equal to zero, with proportion `p`.
Similar in concept to the dropout technique employed within
neural networks.
Parameters
----------
a: numpy.ndarray
Array to be modified.
p: float in [0, 1]
Expected proportion of elements in the result that will equal 0.
inplace: bool
Example
-------
>>> x = np.arange(10, 20, 2, dtype=np.uint8)
>>> z = dropout(x, p=0.6)
>>> z
array([10, 12, 0, 0, 0], dtype=uint8)
>>> x.dtype == z.dtype
True
"""
dt = a.dtype
if p == 0.5:
# Can't pass float dtype to `randint` directly.
rand = np.random.randint(0, high=2, size=a.shape).astype(dtype=dt)
else:
rand = np.random.choice([0, 1], p=[p, 1 - p], size=a.shape).astype(dt)
if inplace:
a *= rand
else:
return a * rand | def function[dropout, parameter[a, p, inplace]]:
constant[Randomly set elements from `a` equal to zero, with proportion `p`.
Similar in concept to the dropout technique employed within
neural networks.
Parameters
----------
a: numpy.ndarray
Array to be modified.
p: float in [0, 1]
Expected proportion of elements in the result that will equal 0.
inplace: bool
Example
-------
>>> x = np.arange(10, 20, 2, dtype=np.uint8)
>>> z = dropout(x, p=0.6)
>>> z
array([10, 12, 0, 0, 0], dtype=uint8)
>>> x.dtype == z.dtype
True
]
variable[dt] assign[=] name[a].dtype
if compare[name[p] equal[==] constant[0.5]] begin[:]
variable[rand] assign[=] call[call[name[np].random.randint, parameter[constant[0]]].astype, parameter[]]
if name[inplace] begin[:]
<ast.AugAssign object at 0x7da1b07eb9d0> | keyword[def] identifier[dropout] ( identifier[a] , identifier[p] = literal[int] , identifier[inplace] = keyword[False] ):
literal[string]
identifier[dt] = identifier[a] . identifier[dtype]
keyword[if] identifier[p] == literal[int] :
identifier[rand] = identifier[np] . identifier[random] . identifier[randint] ( literal[int] , identifier[high] = literal[int] , identifier[size] = identifier[a] . identifier[shape] ). identifier[astype] ( identifier[dtype] = identifier[dt] )
keyword[else] :
identifier[rand] = identifier[np] . identifier[random] . identifier[choice] ([ literal[int] , literal[int] ], identifier[p] =[ identifier[p] , literal[int] - identifier[p] ], identifier[size] = identifier[a] . identifier[shape] ). identifier[astype] ( identifier[dt] )
keyword[if] identifier[inplace] :
identifier[a] *= identifier[rand]
keyword[else] :
keyword[return] identifier[a] * identifier[rand] | def dropout(a, p=0.5, inplace=False):
"""Randomly set elements from `a` equal to zero, with proportion `p`.
Similar in concept to the dropout technique employed within
neural networks.
Parameters
----------
a: numpy.ndarray
Array to be modified.
p: float in [0, 1]
Expected proportion of elements in the result that will equal 0.
inplace: bool
Example
-------
>>> x = np.arange(10, 20, 2, dtype=np.uint8)
>>> z = dropout(x, p=0.6)
>>> z
array([10, 12, 0, 0, 0], dtype=uint8)
>>> x.dtype == z.dtype
True
"""
dt = a.dtype
if p == 0.5: # Can't pass float dtype to `randint` directly.
rand = np.random.randint(0, high=2, size=a.shape).astype(dtype=dt) # depends on [control=['if'], data=[]]
else:
rand = np.random.choice([0, 1], p=[p, 1 - p], size=a.shape).astype(dt)
if inplace:
a *= rand # depends on [control=['if'], data=[]]
else:
return a * rand |
def pic_loggedrequiredremoterelease_v1(self):
"""Update the receiver link sequence."""
log = self.sequences.logs.fastaccess
rec = self.sequences.receivers.fastaccess
log.loggedrequiredremoterelease[0] = rec.d[0] | def function[pic_loggedrequiredremoterelease_v1, parameter[self]]:
constant[Update the receiver link sequence.]
variable[log] assign[=] name[self].sequences.logs.fastaccess
variable[rec] assign[=] name[self].sequences.receivers.fastaccess
call[name[log].loggedrequiredremoterelease][constant[0]] assign[=] call[name[rec].d][constant[0]] | keyword[def] identifier[pic_loggedrequiredremoterelease_v1] ( identifier[self] ):
literal[string]
identifier[log] = identifier[self] . identifier[sequences] . identifier[logs] . identifier[fastaccess]
identifier[rec] = identifier[self] . identifier[sequences] . identifier[receivers] . identifier[fastaccess]
identifier[log] . identifier[loggedrequiredremoterelease] [ literal[int] ]= identifier[rec] . identifier[d] [ literal[int] ] | def pic_loggedrequiredremoterelease_v1(self):
"""Update the receiver link sequence."""
log = self.sequences.logs.fastaccess
rec = self.sequences.receivers.fastaccess
log.loggedrequiredremoterelease[0] = rec.d[0] |
def _get_uniparc_sequences_through_uniprot_ACs(self, mapping_pdb_id, uniprot_ACs, cache_dir):
'''Get the UniParc sequences associated with the UniProt accession number.'''
# Map the UniProt ACs to the UniParc IDs
m = uniprot_map('ACC', 'UPARC', uniprot_ACs, cache_dir = cache_dir)
UniParcIDs = []
for _, v in m.iteritems():
UniParcIDs.extend(v)
# Create a mapping from the mapping_pdb_id to the UniParcEntry objects. This must match the return type from pdb_to_uniparc.
mapping = {mapping_pdb_id : []}
for UniParcID in UniParcIDs:
entry = UniParcEntry(UniParcID, cache_dir = cache_dir)
mapping[mapping_pdb_id].append(entry)
return mapping | def function[_get_uniparc_sequences_through_uniprot_ACs, parameter[self, mapping_pdb_id, uniprot_ACs, cache_dir]]:
constant[Get the UniParc sequences associated with the UniProt accession number.]
variable[m] assign[=] call[name[uniprot_map], parameter[constant[ACC], constant[UPARC], name[uniprot_ACs]]]
variable[UniParcIDs] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2044c0f10>, <ast.Name object at 0x7da2044c16f0>]]] in starred[call[name[m].iteritems, parameter[]]] begin[:]
call[name[UniParcIDs].extend, parameter[name[v]]]
variable[mapping] assign[=] dictionary[[<ast.Name object at 0x7da2044c39d0>], [<ast.List object at 0x7da2044c0dc0>]]
for taget[name[UniParcID]] in starred[name[UniParcIDs]] begin[:]
variable[entry] assign[=] call[name[UniParcEntry], parameter[name[UniParcID]]]
call[call[name[mapping]][name[mapping_pdb_id]].append, parameter[name[entry]]]
return[name[mapping]] | keyword[def] identifier[_get_uniparc_sequences_through_uniprot_ACs] ( identifier[self] , identifier[mapping_pdb_id] , identifier[uniprot_ACs] , identifier[cache_dir] ):
literal[string]
identifier[m] = identifier[uniprot_map] ( literal[string] , literal[string] , identifier[uniprot_ACs] , identifier[cache_dir] = identifier[cache_dir] )
identifier[UniParcIDs] =[]
keyword[for] identifier[_] , identifier[v] keyword[in] identifier[m] . identifier[iteritems] ():
identifier[UniParcIDs] . identifier[extend] ( identifier[v] )
identifier[mapping] ={ identifier[mapping_pdb_id] :[]}
keyword[for] identifier[UniParcID] keyword[in] identifier[UniParcIDs] :
identifier[entry] = identifier[UniParcEntry] ( identifier[UniParcID] , identifier[cache_dir] = identifier[cache_dir] )
identifier[mapping] [ identifier[mapping_pdb_id] ]. identifier[append] ( identifier[entry] )
keyword[return] identifier[mapping] | def _get_uniparc_sequences_through_uniprot_ACs(self, mapping_pdb_id, uniprot_ACs, cache_dir):
"""Get the UniParc sequences associated with the UniProt accession number."""
# Map the UniProt ACs to the UniParc IDs
m = uniprot_map('ACC', 'UPARC', uniprot_ACs, cache_dir=cache_dir)
UniParcIDs = []
for (_, v) in m.iteritems():
UniParcIDs.extend(v) # depends on [control=['for'], data=[]]
# Create a mapping from the mapping_pdb_id to the UniParcEntry objects. This must match the return type from pdb_to_uniparc.
mapping = {mapping_pdb_id: []}
for UniParcID in UniParcIDs:
entry = UniParcEntry(UniParcID, cache_dir=cache_dir)
mapping[mapping_pdb_id].append(entry) # depends on [control=['for'], data=['UniParcID']]
return mapping |
def update_assignment(self, assignment):
"""
Modify an existing assignment.
https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update
"""
url = ASSIGNMENTS_API.format(assignment.course_id) + "/{}".format(
assignment.assignment_id)
data = self._put_resource(url, assignment.json_data())
return Assignment(data=data) | def function[update_assignment, parameter[self, assignment]]:
constant[
Modify an existing assignment.
https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update
]
variable[url] assign[=] binary_operation[call[name[ASSIGNMENTS_API].format, parameter[name[assignment].course_id]] + call[constant[/{}].format, parameter[name[assignment].assignment_id]]]
variable[data] assign[=] call[name[self]._put_resource, parameter[name[url], call[name[assignment].json_data, parameter[]]]]
return[call[name[Assignment], parameter[]]] | keyword[def] identifier[update_assignment] ( identifier[self] , identifier[assignment] ):
literal[string]
identifier[url] = identifier[ASSIGNMENTS_API] . identifier[format] ( identifier[assignment] . identifier[course_id] )+ literal[string] . identifier[format] (
identifier[assignment] . identifier[assignment_id] )
identifier[data] = identifier[self] . identifier[_put_resource] ( identifier[url] , identifier[assignment] . identifier[json_data] ())
keyword[return] identifier[Assignment] ( identifier[data] = identifier[data] ) | def update_assignment(self, assignment):
"""
Modify an existing assignment.
https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update
"""
url = ASSIGNMENTS_API.format(assignment.course_id) + '/{}'.format(assignment.assignment_id)
data = self._put_resource(url, assignment.json_data())
return Assignment(data=data) |
def initialize_indices(self, **kwargs):
"""
creates all the indicies that are defined in the rdf definitions
kwargs:
action: which action is to be perfomed
initialize: (default) tests to see if the index exisits
if not creates it
reset: deletes all of the indexes and recreate them
update: starts a mapping update and reindexing process
"""
action = kwargs.get('action', 'initialize')
if action == 'update':
kwargs['reset_idx'] = False
elif action =='reset':
kwargs['reset_idx'] = True
idx_list = self.list_indexes()
for idx, values in idx_list.items():
if (action == 'initialize' and not self.es.indices.exists(idx)) \
or action != 'initialize':
self.send_es_mapping(self.get_rdf_es_idx_map({idx: values}),
**kwargs) | def function[initialize_indices, parameter[self]]:
constant[
creates all the indicies that are defined in the rdf definitions
kwargs:
action: which action is to be perfomed
initialize: (default) tests to see if the index exisits
if not creates it
reset: deletes all of the indexes and recreate them
update: starts a mapping update and reindexing process
]
variable[action] assign[=] call[name[kwargs].get, parameter[constant[action], constant[initialize]]]
if compare[name[action] equal[==] constant[update]] begin[:]
call[name[kwargs]][constant[reset_idx]] assign[=] constant[False]
variable[idx_list] assign[=] call[name[self].list_indexes, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b13043d0>, <ast.Name object at 0x7da1b1306410>]]] in starred[call[name[idx_list].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b1305540> begin[:]
call[name[self].send_es_mapping, parameter[call[name[self].get_rdf_es_idx_map, parameter[dictionary[[<ast.Name object at 0x7da1b13049d0>], [<ast.Name object at 0x7da1b1304a60>]]]]]] | keyword[def] identifier[initialize_indices] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[action] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[action] == literal[string] :
identifier[kwargs] [ literal[string] ]= keyword[False]
keyword[elif] identifier[action] == literal[string] :
identifier[kwargs] [ literal[string] ]= keyword[True]
identifier[idx_list] = identifier[self] . identifier[list_indexes] ()
keyword[for] identifier[idx] , identifier[values] keyword[in] identifier[idx_list] . identifier[items] ():
keyword[if] ( identifier[action] == literal[string] keyword[and] keyword[not] identifier[self] . identifier[es] . identifier[indices] . identifier[exists] ( identifier[idx] )) keyword[or] identifier[action] != literal[string] :
identifier[self] . identifier[send_es_mapping] ( identifier[self] . identifier[get_rdf_es_idx_map] ({ identifier[idx] : identifier[values] }),
** identifier[kwargs] ) | def initialize_indices(self, **kwargs):
"""
creates all the indicies that are defined in the rdf definitions
kwargs:
action: which action is to be perfomed
initialize: (default) tests to see if the index exisits
if not creates it
reset: deletes all of the indexes and recreate them
update: starts a mapping update and reindexing process
"""
action = kwargs.get('action', 'initialize')
if action == 'update':
kwargs['reset_idx'] = False # depends on [control=['if'], data=[]]
elif action == 'reset':
kwargs['reset_idx'] = True # depends on [control=['if'], data=[]]
idx_list = self.list_indexes()
for (idx, values) in idx_list.items():
if action == 'initialize' and (not self.es.indices.exists(idx)) or action != 'initialize':
self.send_es_mapping(self.get_rdf_es_idx_map({idx: values}), **kwargs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def check_mro(self, bases):
"""Check if C3 MRO is possible with given bases"""
try:
self.add_node("temp")
for base in bases:
nx.DiGraph.add_edge(self, base, "temp")
result = self.get_mro("temp")[1:]
finally:
self.remove_node("temp")
return result | def function[check_mro, parameter[self, bases]]:
constant[Check if C3 MRO is possible with given bases]
<ast.Try object at 0x7da1b00da290>
return[name[result]] | keyword[def] identifier[check_mro] ( identifier[self] , identifier[bases] ):
literal[string]
keyword[try] :
identifier[self] . identifier[add_node] ( literal[string] )
keyword[for] identifier[base] keyword[in] identifier[bases] :
identifier[nx] . identifier[DiGraph] . identifier[add_edge] ( identifier[self] , identifier[base] , literal[string] )
identifier[result] = identifier[self] . identifier[get_mro] ( literal[string] )[ literal[int] :]
keyword[finally] :
identifier[self] . identifier[remove_node] ( literal[string] )
keyword[return] identifier[result] | def check_mro(self, bases):
"""Check if C3 MRO is possible with given bases"""
try:
self.add_node('temp')
for base in bases:
nx.DiGraph.add_edge(self, base, 'temp') # depends on [control=['for'], data=['base']]
result = self.get_mro('temp')[1:] # depends on [control=['try'], data=[]]
finally:
self.remove_node('temp')
return result |
def click(self, x, y):
'''click at arbitrary coordinates.'''
return self.server.jsonrpc.click(x, y) | def function[click, parameter[self, x, y]]:
constant[click at arbitrary coordinates.]
return[call[name[self].server.jsonrpc.click, parameter[name[x], name[y]]]] | keyword[def] identifier[click] ( identifier[self] , identifier[x] , identifier[y] ):
literal[string]
keyword[return] identifier[self] . identifier[server] . identifier[jsonrpc] . identifier[click] ( identifier[x] , identifier[y] ) | def click(self, x, y):
"""click at arbitrary coordinates."""
return self.server.jsonrpc.click(x, y) |
def get(self, page, version=None):
"""Returns the content of *page*. The content of the last version is
returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPage', page)) | def function[get, parameter[self, page, version]]:
constant[Returns the content of *page*. The content of the last version is
returned if *version* is not set.
]
return[<ast.IfExp object at 0x7da20c76dff0>] | keyword[def] identifier[get] ( identifier[self] , identifier[page] , identifier[version] = keyword[None] ):
literal[string]
keyword[return] ( identifier[self] . identifier[_dokuwiki] . identifier[send] ( literal[string] , identifier[page] , identifier[version] )
keyword[if] identifier[version] keyword[is] keyword[not] keyword[None]
keyword[else] identifier[self] . identifier[_dokuwiki] . identifier[send] ( literal[string] , identifier[page] )) | def get(self, page, version=None):
"""Returns the content of *page*. The content of the last version is
returned if *version* is not set.
"""
return self._dokuwiki.send('wiki.getPageVersion', page, version) if version is not None else self._dokuwiki.send('wiki.getPage', page) |
def discount_episode_rewards(rewards=None, gamma=0.99, mode=0):
"""Take 1D float array of rewards and compute discounted rewards for an
episode. When encount a non-zero value, consider as the end a of an episode.
Parameters
----------
rewards : list
List of rewards
gamma : float
Discounted factor
mode : int
Mode for computing the discount rewards.
- If mode == 0, reset the discount process when encount a non-zero reward (Ping-pong game).
- If mode == 1, would not reset the discount process.
Returns
--------
list of float
The discounted rewards.
Examples
----------
>>> rewards = np.asarray([0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1])
>>> gamma = 0.9
>>> discount_rewards = tl.rein.discount_episode_rewards(rewards, gamma)
>>> print(discount_rewards)
[ 0.72899997 0.81 0.89999998 1. 0.72899997 0.81
0.89999998 1. 0.72899997 0.81 0.89999998 1. ]
>>> discount_rewards = tl.rein.discount_episode_rewards(rewards, gamma, mode=1)
>>> print(discount_rewards)
[ 1.52110755 1.69011939 1.87791049 2.08656716 1.20729685 1.34144104
1.49048996 1.65610003 0.72899997 0.81 0.89999998 1. ]
"""
if rewards is None:
raise Exception("rewards should be a list")
discounted_r = np.zeros_like(rewards, dtype=np.float32)
running_add = 0
for t in reversed(xrange(0, rewards.size)):
if mode == 0:
if rewards[t] != 0: running_add = 0
running_add = running_add * gamma + rewards[t]
discounted_r[t] = running_add
return discounted_r | def function[discount_episode_rewards, parameter[rewards, gamma, mode]]:
constant[Take 1D float array of rewards and compute discounted rewards for an
episode. When encount a non-zero value, consider as the end a of an episode.
Parameters
----------
rewards : list
List of rewards
gamma : float
Discounted factor
mode : int
Mode for computing the discount rewards.
- If mode == 0, reset the discount process when encount a non-zero reward (Ping-pong game).
- If mode == 1, would not reset the discount process.
Returns
--------
list of float
The discounted rewards.
Examples
----------
>>> rewards = np.asarray([0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1])
>>> gamma = 0.9
>>> discount_rewards = tl.rein.discount_episode_rewards(rewards, gamma)
>>> print(discount_rewards)
[ 0.72899997 0.81 0.89999998 1. 0.72899997 0.81
0.89999998 1. 0.72899997 0.81 0.89999998 1. ]
>>> discount_rewards = tl.rein.discount_episode_rewards(rewards, gamma, mode=1)
>>> print(discount_rewards)
[ 1.52110755 1.69011939 1.87791049 2.08656716 1.20729685 1.34144104
1.49048996 1.65610003 0.72899997 0.81 0.89999998 1. ]
]
if compare[name[rewards] is constant[None]] begin[:]
<ast.Raise object at 0x7da18bc728f0>
variable[discounted_r] assign[=] call[name[np].zeros_like, parameter[name[rewards]]]
variable[running_add] assign[=] constant[0]
for taget[name[t]] in starred[call[name[reversed], parameter[call[name[xrange], parameter[constant[0], name[rewards].size]]]]] begin[:]
if compare[name[mode] equal[==] constant[0]] begin[:]
if compare[call[name[rewards]][name[t]] not_equal[!=] constant[0]] begin[:]
variable[running_add] assign[=] constant[0]
variable[running_add] assign[=] binary_operation[binary_operation[name[running_add] * name[gamma]] + call[name[rewards]][name[t]]]
call[name[discounted_r]][name[t]] assign[=] name[running_add]
return[name[discounted_r]] | keyword[def] identifier[discount_episode_rewards] ( identifier[rewards] = keyword[None] , identifier[gamma] = literal[int] , identifier[mode] = literal[int] ):
literal[string]
keyword[if] identifier[rewards] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[discounted_r] = identifier[np] . identifier[zeros_like] ( identifier[rewards] , identifier[dtype] = identifier[np] . identifier[float32] )
identifier[running_add] = literal[int]
keyword[for] identifier[t] keyword[in] identifier[reversed] ( identifier[xrange] ( literal[int] , identifier[rewards] . identifier[size] )):
keyword[if] identifier[mode] == literal[int] :
keyword[if] identifier[rewards] [ identifier[t] ]!= literal[int] : identifier[running_add] = literal[int]
identifier[running_add] = identifier[running_add] * identifier[gamma] + identifier[rewards] [ identifier[t] ]
identifier[discounted_r] [ identifier[t] ]= identifier[running_add]
keyword[return] identifier[discounted_r] | def discount_episode_rewards(rewards=None, gamma=0.99, mode=0):
"""Take 1D float array of rewards and compute discounted rewards for an
episode. When encount a non-zero value, consider as the end a of an episode.
Parameters
----------
rewards : list
List of rewards
gamma : float
Discounted factor
mode : int
Mode for computing the discount rewards.
- If mode == 0, reset the discount process when encount a non-zero reward (Ping-pong game).
- If mode == 1, would not reset the discount process.
Returns
--------
list of float
The discounted rewards.
Examples
----------
>>> rewards = np.asarray([0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1])
>>> gamma = 0.9
>>> discount_rewards = tl.rein.discount_episode_rewards(rewards, gamma)
>>> print(discount_rewards)
[ 0.72899997 0.81 0.89999998 1. 0.72899997 0.81
0.89999998 1. 0.72899997 0.81 0.89999998 1. ]
>>> discount_rewards = tl.rein.discount_episode_rewards(rewards, gamma, mode=1)
>>> print(discount_rewards)
[ 1.52110755 1.69011939 1.87791049 2.08656716 1.20729685 1.34144104
1.49048996 1.65610003 0.72899997 0.81 0.89999998 1. ]
"""
if rewards is None:
raise Exception('rewards should be a list') # depends on [control=['if'], data=[]]
discounted_r = np.zeros_like(rewards, dtype=np.float32)
running_add = 0
for t in reversed(xrange(0, rewards.size)):
if mode == 0:
if rewards[t] != 0:
running_add = 0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
running_add = running_add * gamma + rewards[t]
discounted_r[t] = running_add # depends on [control=['for'], data=['t']]
return discounted_r |
def create_training_job(self, TrainingJobName, AlgorithmSpecification, OutputDataConfig,
ResourceConfig, InputDataConfig=None, **kwargs):
"""
Create a training job in Local Mode
Args:
TrainingJobName (str): local training job name.
AlgorithmSpecification (dict): Identifies the training algorithm to use.
InputDataConfig (dict): Describes the training dataset and the location where it is stored.
OutputDataConfig (dict): Identifies the location where you want to save the results of model training.
ResourceConfig (dict): Identifies the resources to use for local model traininig.
HyperParameters (dict) [optional]: Specifies these algorithm-specific parameters to influence the quality of
the final model.
"""
InputDataConfig = InputDataConfig or {}
container = _SageMakerContainer(ResourceConfig['InstanceType'], ResourceConfig['InstanceCount'],
AlgorithmSpecification['TrainingImage'], self.sagemaker_session)
training_job = _LocalTrainingJob(container)
hyperparameters = kwargs['HyperParameters'] if 'HyperParameters' in kwargs else {}
training_job.start(InputDataConfig, OutputDataConfig, hyperparameters, TrainingJobName)
LocalSagemakerClient._training_jobs[TrainingJobName] = training_job | def function[create_training_job, parameter[self, TrainingJobName, AlgorithmSpecification, OutputDataConfig, ResourceConfig, InputDataConfig]]:
constant[
Create a training job in Local Mode
Args:
TrainingJobName (str): local training job name.
AlgorithmSpecification (dict): Identifies the training algorithm to use.
InputDataConfig (dict): Describes the training dataset and the location where it is stored.
OutputDataConfig (dict): Identifies the location where you want to save the results of model training.
ResourceConfig (dict): Identifies the resources to use for local model traininig.
HyperParameters (dict) [optional]: Specifies these algorithm-specific parameters to influence the quality of
the final model.
]
variable[InputDataConfig] assign[=] <ast.BoolOp object at 0x7da1b215f790>
variable[container] assign[=] call[name[_SageMakerContainer], parameter[call[name[ResourceConfig]][constant[InstanceType]], call[name[ResourceConfig]][constant[InstanceCount]], call[name[AlgorithmSpecification]][constant[TrainingImage]], name[self].sagemaker_session]]
variable[training_job] assign[=] call[name[_LocalTrainingJob], parameter[name[container]]]
variable[hyperparameters] assign[=] <ast.IfExp object at 0x7da1b215e0b0>
call[name[training_job].start, parameter[name[InputDataConfig], name[OutputDataConfig], name[hyperparameters], name[TrainingJobName]]]
call[name[LocalSagemakerClient]._training_jobs][name[TrainingJobName]] assign[=] name[training_job] | keyword[def] identifier[create_training_job] ( identifier[self] , identifier[TrainingJobName] , identifier[AlgorithmSpecification] , identifier[OutputDataConfig] ,
identifier[ResourceConfig] , identifier[InputDataConfig] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[InputDataConfig] = identifier[InputDataConfig] keyword[or] {}
identifier[container] = identifier[_SageMakerContainer] ( identifier[ResourceConfig] [ literal[string] ], identifier[ResourceConfig] [ literal[string] ],
identifier[AlgorithmSpecification] [ literal[string] ], identifier[self] . identifier[sagemaker_session] )
identifier[training_job] = identifier[_LocalTrainingJob] ( identifier[container] )
identifier[hyperparameters] = identifier[kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[else] {}
identifier[training_job] . identifier[start] ( identifier[InputDataConfig] , identifier[OutputDataConfig] , identifier[hyperparameters] , identifier[TrainingJobName] )
identifier[LocalSagemakerClient] . identifier[_training_jobs] [ identifier[TrainingJobName] ]= identifier[training_job] | def create_training_job(self, TrainingJobName, AlgorithmSpecification, OutputDataConfig, ResourceConfig, InputDataConfig=None, **kwargs):
"""
Create a training job in Local Mode
Args:
TrainingJobName (str): local training job name.
AlgorithmSpecification (dict): Identifies the training algorithm to use.
InputDataConfig (dict): Describes the training dataset and the location where it is stored.
OutputDataConfig (dict): Identifies the location where you want to save the results of model training.
ResourceConfig (dict): Identifies the resources to use for local model traininig.
HyperParameters (dict) [optional]: Specifies these algorithm-specific parameters to influence the quality of
the final model.
"""
InputDataConfig = InputDataConfig or {}
container = _SageMakerContainer(ResourceConfig['InstanceType'], ResourceConfig['InstanceCount'], AlgorithmSpecification['TrainingImage'], self.sagemaker_session)
training_job = _LocalTrainingJob(container)
hyperparameters = kwargs['HyperParameters'] if 'HyperParameters' in kwargs else {}
training_job.start(InputDataConfig, OutputDataConfig, hyperparameters, TrainingJobName)
LocalSagemakerClient._training_jobs[TrainingJobName] = training_job |
def get_zonefile_data( self, zonefile_hash, zonefile_dir ):
"""
Get a zonefile by hash
Return the serialized zonefile on success
Return None on error
"""
# check cache
atlas_zonefile_data = get_atlas_zonefile_data( zonefile_hash, zonefile_dir, check=False )
if atlas_zonefile_data is not None:
# check hash
zfh = get_zonefile_data_hash( atlas_zonefile_data )
if zfh != zonefile_hash:
log.debug("Invalid local zonefile %s" % zonefile_hash )
remove_atlas_zonefile_data( zonefile_hash, zonefile_dir )
else:
log.debug("Zonefile %s is local" % zonefile_hash)
return atlas_zonefile_data
return None | def function[get_zonefile_data, parameter[self, zonefile_hash, zonefile_dir]]:
constant[
Get a zonefile by hash
Return the serialized zonefile on success
Return None on error
]
variable[atlas_zonefile_data] assign[=] call[name[get_atlas_zonefile_data], parameter[name[zonefile_hash], name[zonefile_dir]]]
if compare[name[atlas_zonefile_data] is_not constant[None]] begin[:]
variable[zfh] assign[=] call[name[get_zonefile_data_hash], parameter[name[atlas_zonefile_data]]]
if compare[name[zfh] not_equal[!=] name[zonefile_hash]] begin[:]
call[name[log].debug, parameter[binary_operation[constant[Invalid local zonefile %s] <ast.Mod object at 0x7da2590d6920> name[zonefile_hash]]]]
call[name[remove_atlas_zonefile_data], parameter[name[zonefile_hash], name[zonefile_dir]]]
return[constant[None]] | keyword[def] identifier[get_zonefile_data] ( identifier[self] , identifier[zonefile_hash] , identifier[zonefile_dir] ):
literal[string]
identifier[atlas_zonefile_data] = identifier[get_atlas_zonefile_data] ( identifier[zonefile_hash] , identifier[zonefile_dir] , identifier[check] = keyword[False] )
keyword[if] identifier[atlas_zonefile_data] keyword[is] keyword[not] keyword[None] :
identifier[zfh] = identifier[get_zonefile_data_hash] ( identifier[atlas_zonefile_data] )
keyword[if] identifier[zfh] != identifier[zonefile_hash] :
identifier[log] . identifier[debug] ( literal[string] % identifier[zonefile_hash] )
identifier[remove_atlas_zonefile_data] ( identifier[zonefile_hash] , identifier[zonefile_dir] )
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] % identifier[zonefile_hash] )
keyword[return] identifier[atlas_zonefile_data]
keyword[return] keyword[None] | def get_zonefile_data(self, zonefile_hash, zonefile_dir):
"""
Get a zonefile by hash
Return the serialized zonefile on success
Return None on error
"""
# check cache
atlas_zonefile_data = get_atlas_zonefile_data(zonefile_hash, zonefile_dir, check=False)
if atlas_zonefile_data is not None:
# check hash
zfh = get_zonefile_data_hash(atlas_zonefile_data)
if zfh != zonefile_hash:
log.debug('Invalid local zonefile %s' % zonefile_hash)
remove_atlas_zonefile_data(zonefile_hash, zonefile_dir) # depends on [control=['if'], data=['zonefile_hash']]
else:
log.debug('Zonefile %s is local' % zonefile_hash)
return atlas_zonefile_data # depends on [control=['if'], data=['atlas_zonefile_data']]
return None |
def compare_schemas(self, db_x, db_y, show=True):
"""
Compare the structures of two databases.
Analysis's and compares the column definitions of each table
in both databases's. Identifies differences in column names,
data types and keys.
"""
# TODO: Improve method
self._printer("\tComparing database schema's {0} and {1}".format(db_x, db_y))
# Run compare_dbs_getter to get row counts
x = self._schema_getter(db_x)
y = self._schema_getter(db_y)
x_count = len(x)
y_count = len(y)
# Check that database does not have zero tables
if x_count == 0:
self._printer('\tThe database {0} has no tables'.format(db_x))
self._printer('\tDatabase differencing was not run')
return None
elif y_count == 0:
self._printer('\tThe database {0} has no tables'.format(db_y))
self._printer('\tDatabase differencing was not run')
return None
# Print comparisons
if show:
uniques_x = diff(x, y, x_only=True)
if len(uniques_x) > 0:
self._printer('\nUnique keys from {0} ({1} of {2}):'.format(db_x, len(uniques_x), x_count))
self._printer('------------------------------')
# print(uniques)
for k, v in sorted(uniques_x):
self._printer('{0:25} {1}'.format(k, v))
self._printer('\n')
uniques_y = diff(x, y, y_only=True)
if len(uniques_y) > 0:
self._printer('Unique keys from {0} ({1} of {2}):'.format(db_y, len(uniques_y), y_count))
self._printer('------------------------------')
for k, v in sorted(uniques_y):
self._printer('{0:25} {1}'.format(k, v))
self._printer('\n')
if len(uniques_y) == 0 and len(uniques_y) == 0:
self._printer("Databases's {0} and {1} are identical:".format(db_x, db_y))
self._printer('------------------------------')
return diff(x, y) | def function[compare_schemas, parameter[self, db_x, db_y, show]]:
constant[
Compare the structures of two databases.
Analysis's and compares the column definitions of each table
in both databases's. Identifies differences in column names,
data types and keys.
]
call[name[self]._printer, parameter[call[constant[ Comparing database schema's {0} and {1}].format, parameter[name[db_x], name[db_y]]]]]
variable[x] assign[=] call[name[self]._schema_getter, parameter[name[db_x]]]
variable[y] assign[=] call[name[self]._schema_getter, parameter[name[db_y]]]
variable[x_count] assign[=] call[name[len], parameter[name[x]]]
variable[y_count] assign[=] call[name[len], parameter[name[y]]]
if compare[name[x_count] equal[==] constant[0]] begin[:]
call[name[self]._printer, parameter[call[constant[ The database {0} has no tables].format, parameter[name[db_x]]]]]
call[name[self]._printer, parameter[constant[ Database differencing was not run]]]
return[constant[None]]
if name[show] begin[:]
variable[uniques_x] assign[=] call[name[diff], parameter[name[x], name[y]]]
if compare[call[name[len], parameter[name[uniques_x]]] greater[>] constant[0]] begin[:]
call[name[self]._printer, parameter[call[constant[
Unique keys from {0} ({1} of {2}):].format, parameter[name[db_x], call[name[len], parameter[name[uniques_x]]], name[x_count]]]]]
call[name[self]._printer, parameter[constant[------------------------------]]]
for taget[tuple[[<ast.Name object at 0x7da1b0bd29e0>, <ast.Name object at 0x7da1b0bd28c0>]]] in starred[call[name[sorted], parameter[name[uniques_x]]]] begin[:]
call[name[self]._printer, parameter[call[constant[{0:25} {1}].format, parameter[name[k], name[v]]]]]
call[name[self]._printer, parameter[constant[
]]]
variable[uniques_y] assign[=] call[name[diff], parameter[name[x], name[y]]]
if compare[call[name[len], parameter[name[uniques_y]]] greater[>] constant[0]] begin[:]
call[name[self]._printer, parameter[call[constant[Unique keys from {0} ({1} of {2}):].format, parameter[name[db_y], call[name[len], parameter[name[uniques_y]]], name[y_count]]]]]
call[name[self]._printer, parameter[constant[------------------------------]]]
for taget[tuple[[<ast.Name object at 0x7da1b0bd1390>, <ast.Name object at 0x7da1b0bd0670>]]] in starred[call[name[sorted], parameter[name[uniques_y]]]] begin[:]
call[name[self]._printer, parameter[call[constant[{0:25} {1}].format, parameter[name[k], name[v]]]]]
call[name[self]._printer, parameter[constant[
]]]
if <ast.BoolOp object at 0x7da1b0bd0400> begin[:]
call[name[self]._printer, parameter[call[constant[Databases's {0} and {1} are identical:].format, parameter[name[db_x], name[db_y]]]]]
call[name[self]._printer, parameter[constant[------------------------------]]]
return[call[name[diff], parameter[name[x], name[y]]]] | keyword[def] identifier[compare_schemas] ( identifier[self] , identifier[db_x] , identifier[db_y] , identifier[show] = keyword[True] ):
literal[string]
identifier[self] . identifier[_printer] ( literal[string] . identifier[format] ( identifier[db_x] , identifier[db_y] ))
identifier[x] = identifier[self] . identifier[_schema_getter] ( identifier[db_x] )
identifier[y] = identifier[self] . identifier[_schema_getter] ( identifier[db_y] )
identifier[x_count] = identifier[len] ( identifier[x] )
identifier[y_count] = identifier[len] ( identifier[y] )
keyword[if] identifier[x_count] == literal[int] :
identifier[self] . identifier[_printer] ( literal[string] . identifier[format] ( identifier[db_x] ))
identifier[self] . identifier[_printer] ( literal[string] )
keyword[return] keyword[None]
keyword[elif] identifier[y_count] == literal[int] :
identifier[self] . identifier[_printer] ( literal[string] . identifier[format] ( identifier[db_y] ))
identifier[self] . identifier[_printer] ( literal[string] )
keyword[return] keyword[None]
keyword[if] identifier[show] :
identifier[uniques_x] = identifier[diff] ( identifier[x] , identifier[y] , identifier[x_only] = keyword[True] )
keyword[if] identifier[len] ( identifier[uniques_x] )> literal[int] :
identifier[self] . identifier[_printer] ( literal[string] . identifier[format] ( identifier[db_x] , identifier[len] ( identifier[uniques_x] ), identifier[x_count] ))
identifier[self] . identifier[_printer] ( literal[string] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[sorted] ( identifier[uniques_x] ):
identifier[self] . identifier[_printer] ( literal[string] . identifier[format] ( identifier[k] , identifier[v] ))
identifier[self] . identifier[_printer] ( literal[string] )
identifier[uniques_y] = identifier[diff] ( identifier[x] , identifier[y] , identifier[y_only] = keyword[True] )
keyword[if] identifier[len] ( identifier[uniques_y] )> literal[int] :
identifier[self] . identifier[_printer] ( literal[string] . identifier[format] ( identifier[db_y] , identifier[len] ( identifier[uniques_y] ), identifier[y_count] ))
identifier[self] . identifier[_printer] ( literal[string] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[sorted] ( identifier[uniques_y] ):
identifier[self] . identifier[_printer] ( literal[string] . identifier[format] ( identifier[k] , identifier[v] ))
identifier[self] . identifier[_printer] ( literal[string] )
keyword[if] identifier[len] ( identifier[uniques_y] )== literal[int] keyword[and] identifier[len] ( identifier[uniques_y] )== literal[int] :
identifier[self] . identifier[_printer] ( literal[string] . identifier[format] ( identifier[db_x] , identifier[db_y] ))
identifier[self] . identifier[_printer] ( literal[string] )
keyword[return] identifier[diff] ( identifier[x] , identifier[y] ) | def compare_schemas(self, db_x, db_y, show=True):
"""
Compare the structures of two databases.
Analysis's and compares the column definitions of each table
in both databases's. Identifies differences in column names,
data types and keys.
"""
# TODO: Improve method
self._printer("\tComparing database schema's {0} and {1}".format(db_x, db_y))
# Run compare_dbs_getter to get row counts
x = self._schema_getter(db_x)
y = self._schema_getter(db_y)
x_count = len(x)
y_count = len(y)
# Check that database does not have zero tables
if x_count == 0:
self._printer('\tThe database {0} has no tables'.format(db_x))
self._printer('\tDatabase differencing was not run')
return None # depends on [control=['if'], data=[]]
elif y_count == 0:
self._printer('\tThe database {0} has no tables'.format(db_y))
self._printer('\tDatabase differencing was not run')
return None # depends on [control=['if'], data=[]]
# Print comparisons
if show:
uniques_x = diff(x, y, x_only=True)
if len(uniques_x) > 0:
self._printer('\nUnique keys from {0} ({1} of {2}):'.format(db_x, len(uniques_x), x_count))
self._printer('------------------------------')
# print(uniques)
for (k, v) in sorted(uniques_x):
self._printer('{0:25} {1}'.format(k, v)) # depends on [control=['for'], data=[]]
self._printer('\n') # depends on [control=['if'], data=[]]
uniques_y = diff(x, y, y_only=True)
if len(uniques_y) > 0:
self._printer('Unique keys from {0} ({1} of {2}):'.format(db_y, len(uniques_y), y_count))
self._printer('------------------------------')
for (k, v) in sorted(uniques_y):
self._printer('{0:25} {1}'.format(k, v)) # depends on [control=['for'], data=[]]
self._printer('\n') # depends on [control=['if'], data=[]]
if len(uniques_y) == 0 and len(uniques_y) == 0:
self._printer("Databases's {0} and {1} are identical:".format(db_x, db_y))
self._printer('------------------------------') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return diff(x, y) |
def get_label(self):
"""
get label rdd from ImageFrame
"""
tensor_rdd = callBigDlFunc(self.bigdl_type, "distributedImageFrameToLabelTensorRdd", self.value)
return tensor_rdd.map(lambda tensor: tensor.to_ndarray()) | def function[get_label, parameter[self]]:
constant[
get label rdd from ImageFrame
]
variable[tensor_rdd] assign[=] call[name[callBigDlFunc], parameter[name[self].bigdl_type, constant[distributedImageFrameToLabelTensorRdd], name[self].value]]
return[call[name[tensor_rdd].map, parameter[<ast.Lambda object at 0x7da2054a7250>]]] | keyword[def] identifier[get_label] ( identifier[self] ):
literal[string]
identifier[tensor_rdd] = identifier[callBigDlFunc] ( identifier[self] . identifier[bigdl_type] , literal[string] , identifier[self] . identifier[value] )
keyword[return] identifier[tensor_rdd] . identifier[map] ( keyword[lambda] identifier[tensor] : identifier[tensor] . identifier[to_ndarray] ()) | def get_label(self):
"""
get label rdd from ImageFrame
"""
tensor_rdd = callBigDlFunc(self.bigdl_type, 'distributedImageFrameToLabelTensorRdd', self.value)
return tensor_rdd.map(lambda tensor: tensor.to_ndarray()) |
async def delete(self):
"""Delete this VLAN."""
# Since the VID can be changed for the VLAN, we always use the vid
# from the original data. That way if the user changes the vid the
# delete still works, until the VLAN has been saved.
await self._handler.delete(
fabric_id=self.fabric.id, vid=self._orig_data['vid']) | <ast.AsyncFunctionDef object at 0x7da20c993be0> | keyword[async] keyword[def] identifier[delete] ( identifier[self] ):
literal[string]
keyword[await] identifier[self] . identifier[_handler] . identifier[delete] (
identifier[fabric_id] = identifier[self] . identifier[fabric] . identifier[id] , identifier[vid] = identifier[self] . identifier[_orig_data] [ literal[string] ]) | async def delete(self):
"""Delete this VLAN."""
# Since the VID can be changed for the VLAN, we always use the vid
# from the original data. That way if the user changes the vid the
# delete still works, until the VLAN has been saved.
await self._handler.delete(fabric_id=self.fabric.id, vid=self._orig_data['vid']) |
def get_module_can_publish(cursor, id):
"""Return userids allowed to publish this book."""
cursor.execute("""
SELECT DISTINCT user_id
FROM document_acl
WHERE uuid = %s AND permission = 'publish'""", (id,))
return [i[0] for i in cursor.fetchall()] | def function[get_module_can_publish, parameter[cursor, id]]:
constant[Return userids allowed to publish this book.]
call[name[cursor].execute, parameter[constant[
SELECT DISTINCT user_id
FROM document_acl
WHERE uuid = %s AND permission = 'publish'], tuple[[<ast.Name object at 0x7da1b1803a90>]]]]
return[<ast.ListComp object at 0x7da1b1803550>] | keyword[def] identifier[get_module_can_publish] ( identifier[cursor] , identifier[id] ):
literal[string]
identifier[cursor] . identifier[execute] ( literal[string] ,( identifier[id] ,))
keyword[return] [ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[cursor] . identifier[fetchall] ()] | def get_module_can_publish(cursor, id):
"""Return userids allowed to publish this book."""
cursor.execute("\nSELECT DISTINCT user_id\nFROM document_acl\nWHERE uuid = %s AND permission = 'publish'", (id,))
return [i[0] for i in cursor.fetchall()] |
def del_work_units(self, work_spec_name, work_unit_keys=None,
state=None, all=False):
'''Delete work units from a work spec.
The parameters are considered in order as follows:
* If `all` is :const:`True`, then all work units in
`work_spec_name` are deleted; otherwise
* If `state` is not :const:`None`, then all work units
in the named state are deleted; otherwise
* If `work_unit_keys` are specified, then those specific
work units are deleted; otherwise
* Nothing is deleted.
:param str work_spec_name: name of the work spec
:param list work_unit_keys: if not :const:`None`, only delete
these specific keys
:param str state: only delete work units in this state
:param bool all: if true, delete all work units
:return: number of work units deleted
'''
count = 0
if (state is None) or (state == AVAILABLE):
count += self.remove_available_work_units(work_spec_name, work_unit_keys)
if (state is None) or (state == PENDING):
count += self.remove_pending_work_units(work_spec_name, work_unit_keys)
if (state is None) or (state == BLOCKED):
count += self.remove_blocked_work_units(work_spec_name, work_unit_keys)
if (state is None) or (state == FAILED):
count += self.remove_failed_work_units(work_spec_name, work_unit_keys)
if (state is None) or (state == FINISHED):
count += self.remove_finished_work_units(work_spec_name, work_unit_keys)
return count | def function[del_work_units, parameter[self, work_spec_name, work_unit_keys, state, all]]:
constant[Delete work units from a work spec.
The parameters are considered in order as follows:
* If `all` is :const:`True`, then all work units in
`work_spec_name` are deleted; otherwise
* If `state` is not :const:`None`, then all work units
in the named state are deleted; otherwise
* If `work_unit_keys` are specified, then those specific
work units are deleted; otherwise
* Nothing is deleted.
:param str work_spec_name: name of the work spec
:param list work_unit_keys: if not :const:`None`, only delete
these specific keys
:param str state: only delete work units in this state
:param bool all: if true, delete all work units
:return: number of work units deleted
]
variable[count] assign[=] constant[0]
if <ast.BoolOp object at 0x7da1b14d4970> begin[:]
<ast.AugAssign object at 0x7da1b14d5960>
if <ast.BoolOp object at 0x7da1b14d6fe0> begin[:]
<ast.AugAssign object at 0x7da1b14d78b0>
if <ast.BoolOp object at 0x7da1b14d6da0> begin[:]
<ast.AugAssign object at 0x7da1b14e7160>
if <ast.BoolOp object at 0x7da1b14e46d0> begin[:]
<ast.AugAssign object at 0x7da1b14e4490>
if <ast.BoolOp object at 0x7da1b14e4f10> begin[:]
<ast.AugAssign object at 0x7da1b14e4670>
return[name[count]] | keyword[def] identifier[del_work_units] ( identifier[self] , identifier[work_spec_name] , identifier[work_unit_keys] = keyword[None] ,
identifier[state] = keyword[None] , identifier[all] = keyword[False] ):
literal[string]
identifier[count] = literal[int]
keyword[if] ( identifier[state] keyword[is] keyword[None] ) keyword[or] ( identifier[state] == identifier[AVAILABLE] ):
identifier[count] += identifier[self] . identifier[remove_available_work_units] ( identifier[work_spec_name] , identifier[work_unit_keys] )
keyword[if] ( identifier[state] keyword[is] keyword[None] ) keyword[or] ( identifier[state] == identifier[PENDING] ):
identifier[count] += identifier[self] . identifier[remove_pending_work_units] ( identifier[work_spec_name] , identifier[work_unit_keys] )
keyword[if] ( identifier[state] keyword[is] keyword[None] ) keyword[or] ( identifier[state] == identifier[BLOCKED] ):
identifier[count] += identifier[self] . identifier[remove_blocked_work_units] ( identifier[work_spec_name] , identifier[work_unit_keys] )
keyword[if] ( identifier[state] keyword[is] keyword[None] ) keyword[or] ( identifier[state] == identifier[FAILED] ):
identifier[count] += identifier[self] . identifier[remove_failed_work_units] ( identifier[work_spec_name] , identifier[work_unit_keys] )
keyword[if] ( identifier[state] keyword[is] keyword[None] ) keyword[or] ( identifier[state] == identifier[FINISHED] ):
identifier[count] += identifier[self] . identifier[remove_finished_work_units] ( identifier[work_spec_name] , identifier[work_unit_keys] )
keyword[return] identifier[count] | def del_work_units(self, work_spec_name, work_unit_keys=None, state=None, all=False):
"""Delete work units from a work spec.
The parameters are considered in order as follows:
* If `all` is :const:`True`, then all work units in
`work_spec_name` are deleted; otherwise
* If `state` is not :const:`None`, then all work units
in the named state are deleted; otherwise
* If `work_unit_keys` are specified, then those specific
work units are deleted; otherwise
* Nothing is deleted.
:param str work_spec_name: name of the work spec
:param list work_unit_keys: if not :const:`None`, only delete
these specific keys
:param str state: only delete work units in this state
:param bool all: if true, delete all work units
:return: number of work units deleted
"""
count = 0
if state is None or state == AVAILABLE:
count += self.remove_available_work_units(work_spec_name, work_unit_keys) # depends on [control=['if'], data=[]]
if state is None or state == PENDING:
count += self.remove_pending_work_units(work_spec_name, work_unit_keys) # depends on [control=['if'], data=[]]
if state is None or state == BLOCKED:
count += self.remove_blocked_work_units(work_spec_name, work_unit_keys) # depends on [control=['if'], data=[]]
if state is None or state == FAILED:
count += self.remove_failed_work_units(work_spec_name, work_unit_keys) # depends on [control=['if'], data=[]]
if state is None or state == FINISHED:
count += self.remove_finished_work_units(work_spec_name, work_unit_keys) # depends on [control=['if'], data=[]]
return count |
def undo_filter_sub(filter_unit, scanline, previous, result):
"""Undo sub filter."""
ai = 0
# Loops starts at index fu. Observe that the initial part
# of the result is already filled in correctly with
# scanline.
for i in range(filter_unit, len(result)):
x = scanline[i]
a = result[ai]
result[i] = (x + a) & 0xff
ai += 1 | def function[undo_filter_sub, parameter[filter_unit, scanline, previous, result]]:
constant[Undo sub filter.]
variable[ai] assign[=] constant[0]
for taget[name[i]] in starred[call[name[range], parameter[name[filter_unit], call[name[len], parameter[name[result]]]]]] begin[:]
variable[x] assign[=] call[name[scanline]][name[i]]
variable[a] assign[=] call[name[result]][name[ai]]
call[name[result]][name[i]] assign[=] binary_operation[binary_operation[name[x] + name[a]] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]
<ast.AugAssign object at 0x7da1b0780970> | keyword[def] identifier[undo_filter_sub] ( identifier[filter_unit] , identifier[scanline] , identifier[previous] , identifier[result] ):
literal[string]
identifier[ai] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[filter_unit] , identifier[len] ( identifier[result] )):
identifier[x] = identifier[scanline] [ identifier[i] ]
identifier[a] = identifier[result] [ identifier[ai] ]
identifier[result] [ identifier[i] ]=( identifier[x] + identifier[a] )& literal[int]
identifier[ai] += literal[int] | def undo_filter_sub(filter_unit, scanline, previous, result):
"""Undo sub filter."""
ai = 0
# Loops starts at index fu. Observe that the initial part
# of the result is already filled in correctly with
# scanline.
for i in range(filter_unit, len(result)):
x = scanline[i]
a = result[ai]
result[i] = x + a & 255
ai += 1 # depends on [control=['for'], data=['i']] |
def to_tnw(orbit):
"""In the TNW Local Orbital Reference Frame, x is oriented along the velocity vector,
z along the angular momentum, and y complete the frame.
Args:
orbit (list): Array of length 6
Return:
numpy.ndarray: matrix to convert from inertial frame to TNW.
>>> delta_tnw = [1, 0, 0]
>>> p = [-6142438.668, 3492467.560, -25767.25680]
>>> v = [505.8479685, 942.7809215, 7435.922231]
>>> pv = p + v
>>> mat = to_tnw(pv).T
>>> delta_inert = mat @ delta_tnw
>>> all(delta_inert == v / norm(v))
True
"""
pos, vel = _split(orbit)
t = vel / norm(vel)
w = np.cross(pos, vel) / (norm(pos) * norm(vel))
n = np.cross(w, t)
return np.array([t, n, w]) | def function[to_tnw, parameter[orbit]]:
constant[In the TNW Local Orbital Reference Frame, x is oriented along the velocity vector,
z along the angular momentum, and y complete the frame.
Args:
orbit (list): Array of length 6
Return:
numpy.ndarray: matrix to convert from inertial frame to TNW.
>>> delta_tnw = [1, 0, 0]
>>> p = [-6142438.668, 3492467.560, -25767.25680]
>>> v = [505.8479685, 942.7809215, 7435.922231]
>>> pv = p + v
>>> mat = to_tnw(pv).T
>>> delta_inert = mat @ delta_tnw
>>> all(delta_inert == v / norm(v))
True
]
<ast.Tuple object at 0x7da1b0cb6140> assign[=] call[name[_split], parameter[name[orbit]]]
variable[t] assign[=] binary_operation[name[vel] / call[name[norm], parameter[name[vel]]]]
variable[w] assign[=] binary_operation[call[name[np].cross, parameter[name[pos], name[vel]]] / binary_operation[call[name[norm], parameter[name[pos]]] * call[name[norm], parameter[name[vel]]]]]
variable[n] assign[=] call[name[np].cross, parameter[name[w], name[t]]]
return[call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b0ebed40>, <ast.Name object at 0x7da1b0ebc310>, <ast.Name object at 0x7da1b0ebe3b0>]]]]] | keyword[def] identifier[to_tnw] ( identifier[orbit] ):
literal[string]
identifier[pos] , identifier[vel] = identifier[_split] ( identifier[orbit] )
identifier[t] = identifier[vel] / identifier[norm] ( identifier[vel] )
identifier[w] = identifier[np] . identifier[cross] ( identifier[pos] , identifier[vel] )/( identifier[norm] ( identifier[pos] )* identifier[norm] ( identifier[vel] ))
identifier[n] = identifier[np] . identifier[cross] ( identifier[w] , identifier[t] )
keyword[return] identifier[np] . identifier[array] ([ identifier[t] , identifier[n] , identifier[w] ]) | def to_tnw(orbit):
"""In the TNW Local Orbital Reference Frame, x is oriented along the velocity vector,
z along the angular momentum, and y complete the frame.
Args:
orbit (list): Array of length 6
Return:
numpy.ndarray: matrix to convert from inertial frame to TNW.
>>> delta_tnw = [1, 0, 0]
>>> p = [-6142438.668, 3492467.560, -25767.25680]
>>> v = [505.8479685, 942.7809215, 7435.922231]
>>> pv = p + v
>>> mat = to_tnw(pv).T
>>> delta_inert = mat @ delta_tnw
>>> all(delta_inert == v / norm(v))
True
"""
(pos, vel) = _split(orbit)
t = vel / norm(vel)
w = np.cross(pos, vel) / (norm(pos) * norm(vel))
n = np.cross(w, t)
return np.array([t, n, w]) |
def gcs_write(self, log, remote_log_location, append=True):
"""
Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: str
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
:type append: bool
"""
if append:
try:
old_log = self.gcs_read(remote_log_location)
log = '\n'.join([old_log, log]) if old_log else log
except Exception as e:
if not hasattr(e, 'resp') or e.resp.get('status') != '404':
log = '*** Previous log discarded: {}\n\n'.format(str(e)) + log
try:
bkt, blob = self.parse_gcs_url(remote_log_location)
from tempfile import NamedTemporaryFile
with NamedTemporaryFile(mode='w+') as tmpfile:
tmpfile.write(log)
# Force the file to be flushed, since we're doing the
# upload from within the file context (it hasn't been
# closed).
tmpfile.flush()
self.hook.upload(bkt, blob, tmpfile.name)
except Exception as e:
self.log.error('Could not write logs to %s: %s', remote_log_location, e) | def function[gcs_write, parameter[self, log, remote_log_location, append]]:
constant[
Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: str
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
:type append: bool
]
if name[append] begin[:]
<ast.Try object at 0x7da20c6c6ce0>
<ast.Try object at 0x7da20c6c5e70> | keyword[def] identifier[gcs_write] ( identifier[self] , identifier[log] , identifier[remote_log_location] , identifier[append] = keyword[True] ):
literal[string]
keyword[if] identifier[append] :
keyword[try] :
identifier[old_log] = identifier[self] . identifier[gcs_read] ( identifier[remote_log_location] )
identifier[log] = literal[string] . identifier[join] ([ identifier[old_log] , identifier[log] ]) keyword[if] identifier[old_log] keyword[else] identifier[log]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[e] , literal[string] ) keyword[or] identifier[e] . identifier[resp] . identifier[get] ( literal[string] )!= literal[string] :
identifier[log] = literal[string] . identifier[format] ( identifier[str] ( identifier[e] ))+ identifier[log]
keyword[try] :
identifier[bkt] , identifier[blob] = identifier[self] . identifier[parse_gcs_url] ( identifier[remote_log_location] )
keyword[from] identifier[tempfile] keyword[import] identifier[NamedTemporaryFile]
keyword[with] identifier[NamedTemporaryFile] ( identifier[mode] = literal[string] ) keyword[as] identifier[tmpfile] :
identifier[tmpfile] . identifier[write] ( identifier[log] )
identifier[tmpfile] . identifier[flush] ()
identifier[self] . identifier[hook] . identifier[upload] ( identifier[bkt] , identifier[blob] , identifier[tmpfile] . identifier[name] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[log] . identifier[error] ( literal[string] , identifier[remote_log_location] , identifier[e] ) | def gcs_write(self, log, remote_log_location, append=True):
"""
Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: str
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
:type append: bool
"""
if append:
try:
old_log = self.gcs_read(remote_log_location)
log = '\n'.join([old_log, log]) if old_log else log # depends on [control=['try'], data=[]]
except Exception as e:
if not hasattr(e, 'resp') or e.resp.get('status') != '404':
log = '*** Previous log discarded: {}\n\n'.format(str(e)) + log # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
try:
(bkt, blob) = self.parse_gcs_url(remote_log_location)
from tempfile import NamedTemporaryFile
with NamedTemporaryFile(mode='w+') as tmpfile:
tmpfile.write(log)
# Force the file to be flushed, since we're doing the
# upload from within the file context (it hasn't been
# closed).
tmpfile.flush()
self.hook.upload(bkt, blob, tmpfile.name) # depends on [control=['with'], data=['tmpfile']] # depends on [control=['try'], data=[]]
except Exception as e:
self.log.error('Could not write logs to %s: %s', remote_log_location, e) # depends on [control=['except'], data=['e']] |
def is_subdirectory(path_a, path_b):
"""Returns True if `path_a` is a subdirectory of `path_b`."""
path_a = os.path.realpath(path_a)
path_b = os.path.realpath(path_b)
relative = os.path.relpath(path_a, path_b)
return (not relative.startswith(os.pardir + os.sep)) | def function[is_subdirectory, parameter[path_a, path_b]]:
constant[Returns True if `path_a` is a subdirectory of `path_b`.]
variable[path_a] assign[=] call[name[os].path.realpath, parameter[name[path_a]]]
variable[path_b] assign[=] call[name[os].path.realpath, parameter[name[path_b]]]
variable[relative] assign[=] call[name[os].path.relpath, parameter[name[path_a], name[path_b]]]
return[<ast.UnaryOp object at 0x7da18f811cc0>] | keyword[def] identifier[is_subdirectory] ( identifier[path_a] , identifier[path_b] ):
literal[string]
identifier[path_a] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[path_a] )
identifier[path_b] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[path_b] )
identifier[relative] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[path_a] , identifier[path_b] )
keyword[return] ( keyword[not] identifier[relative] . identifier[startswith] ( identifier[os] . identifier[pardir] + identifier[os] . identifier[sep] )) | def is_subdirectory(path_a, path_b):
"""Returns True if `path_a` is a subdirectory of `path_b`."""
path_a = os.path.realpath(path_a)
path_b = os.path.realpath(path_b)
relative = os.path.relpath(path_a, path_b)
return not relative.startswith(os.pardir + os.sep) |
def get_leading_spaces(data):
"""Get the leading space of a string if it is not empty
:type data: str
"""
spaces = ''
m = re.match(r'^(\s*)', data)
if m:
spaces = m.group(1)
return spaces | def function[get_leading_spaces, parameter[data]]:
constant[Get the leading space of a string if it is not empty
:type data: str
]
variable[spaces] assign[=] constant[]
variable[m] assign[=] call[name[re].match, parameter[constant[^(\s*)], name[data]]]
if name[m] begin[:]
variable[spaces] assign[=] call[name[m].group, parameter[constant[1]]]
return[name[spaces]] | keyword[def] identifier[get_leading_spaces] ( identifier[data] ):
literal[string]
identifier[spaces] = literal[string]
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[data] )
keyword[if] identifier[m] :
identifier[spaces] = identifier[m] . identifier[group] ( literal[int] )
keyword[return] identifier[spaces] | def get_leading_spaces(data):
"""Get the leading space of a string if it is not empty
:type data: str
"""
spaces = ''
m = re.match('^(\\s*)', data)
if m:
spaces = m.group(1) # depends on [control=['if'], data=[]]
return spaces |
def random_weights(size, sumto=1.0):
"""Generate an array of random weights that sum to `sumto`.
The result may be of arbitrary dimensions. `size` is passed to
the `size` parameter of `np.random.random`, which acts as a shape
parameter in this case.
Note that `sumto` is subject to typical Python floating point limitations.
This function does not implement a softmax check.
Parameters
----------
size: int or tuple of ints, optional
Output shape. If the given shape is, e.g., ``(m, n, k)``, then
``m * n * k`` samples are drawn.
sumto: float, default 1.
Each vector of weights should sum to this in decimal terms.
Returns
-------
np.ndarray
"""
w = np.random.random(size)
if w.ndim == 2:
if isinstance(sumto, (np.ndarray, list, tuple)):
sumto = np.asarray(sumto)[:, None]
w = sumto * w / w.sum(axis=-1)[:, None]
elif w.ndim == 1:
w = sumto * w / w.sum()
else:
raise ValueError("`w.ndim` must be 1 or 2, not %s" % w.ndim)
return w | def function[random_weights, parameter[size, sumto]]:
constant[Generate an array of random weights that sum to `sumto`.
The result may be of arbitrary dimensions. `size` is passed to
the `size` parameter of `np.random.random`, which acts as a shape
parameter in this case.
Note that `sumto` is subject to typical Python floating point limitations.
This function does not implement a softmax check.
Parameters
----------
size: int or tuple of ints, optional
Output shape. If the given shape is, e.g., ``(m, n, k)``, then
``m * n * k`` samples are drawn.
sumto: float, default 1.
Each vector of weights should sum to this in decimal terms.
Returns
-------
np.ndarray
]
variable[w] assign[=] call[name[np].random.random, parameter[name[size]]]
if compare[name[w].ndim equal[==] constant[2]] begin[:]
if call[name[isinstance], parameter[name[sumto], tuple[[<ast.Attribute object at 0x7da1b0778760>, <ast.Name object at 0x7da1b07780a0>, <ast.Name object at 0x7da1b07788b0>]]]] begin[:]
variable[sumto] assign[=] call[call[name[np].asarray, parameter[name[sumto]]]][tuple[[<ast.Slice object at 0x7da1b0778f40>, <ast.Constant object at 0x7da1b0778fa0>]]]
variable[w] assign[=] binary_operation[binary_operation[name[sumto] * name[w]] / call[call[name[w].sum, parameter[]]][tuple[[<ast.Slice object at 0x7da1b077bf10>, <ast.Constant object at 0x7da1b077aa10>]]]]
return[name[w]] | keyword[def] identifier[random_weights] ( identifier[size] , identifier[sumto] = literal[int] ):
literal[string]
identifier[w] = identifier[np] . identifier[random] . identifier[random] ( identifier[size] )
keyword[if] identifier[w] . identifier[ndim] == literal[int] :
keyword[if] identifier[isinstance] ( identifier[sumto] ,( identifier[np] . identifier[ndarray] , identifier[list] , identifier[tuple] )):
identifier[sumto] = identifier[np] . identifier[asarray] ( identifier[sumto] )[:, keyword[None] ]
identifier[w] = identifier[sumto] * identifier[w] / identifier[w] . identifier[sum] ( identifier[axis] =- literal[int] )[:, keyword[None] ]
keyword[elif] identifier[w] . identifier[ndim] == literal[int] :
identifier[w] = identifier[sumto] * identifier[w] / identifier[w] . identifier[sum] ()
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[w] . identifier[ndim] )
keyword[return] identifier[w] | def random_weights(size, sumto=1.0):
"""Generate an array of random weights that sum to `sumto`.
The result may be of arbitrary dimensions. `size` is passed to
the `size` parameter of `np.random.random`, which acts as a shape
parameter in this case.
Note that `sumto` is subject to typical Python floating point limitations.
This function does not implement a softmax check.
Parameters
----------
size: int or tuple of ints, optional
Output shape. If the given shape is, e.g., ``(m, n, k)``, then
``m * n * k`` samples are drawn.
sumto: float, default 1.
Each vector of weights should sum to this in decimal terms.
Returns
-------
np.ndarray
"""
w = np.random.random(size)
if w.ndim == 2:
if isinstance(sumto, (np.ndarray, list, tuple)):
sumto = np.asarray(sumto)[:, None] # depends on [control=['if'], data=[]]
w = sumto * w / w.sum(axis=-1)[:, None] # depends on [control=['if'], data=[]]
elif w.ndim == 1:
w = sumto * w / w.sum() # depends on [control=['if'], data=[]]
else:
raise ValueError('`w.ndim` must be 1 or 2, not %s' % w.ndim)
return w |
def _initConstants(ptc):
"""
Create localized versions of the units, week and month names
"""
# build weekday offsets - yes, it assumes the Weekday and shortWeekday
# lists are in the same order and Mon..Sun (Python style)
ptc.WeekdayOffsets = {}
o = 0
for key in ptc.Weekdays:
ptc.WeekdayOffsets[key] = o
o += 1
o = 0
for key in ptc.shortWeekdays:
ptc.WeekdayOffsets[key] = o
o += 1
# build month offsets - yes, it assumes the Months and shortMonths
# lists are in the same order and Jan..Dec
ptc.MonthOffsets = {}
o = 1
for key in ptc.Months:
ptc.MonthOffsets[key] = o
o += 1
o = 1
for key in ptc.shortMonths:
ptc.MonthOffsets[key] = o
o += 1 | def function[_initConstants, parameter[ptc]]:
constant[
Create localized versions of the units, week and month names
]
name[ptc].WeekdayOffsets assign[=] dictionary[[], []]
variable[o] assign[=] constant[0]
for taget[name[key]] in starred[name[ptc].Weekdays] begin[:]
call[name[ptc].WeekdayOffsets][name[key]] assign[=] name[o]
<ast.AugAssign object at 0x7da1b164abf0>
variable[o] assign[=] constant[0]
for taget[name[key]] in starred[name[ptc].shortWeekdays] begin[:]
call[name[ptc].WeekdayOffsets][name[key]] assign[=] name[o]
<ast.AugAssign object at 0x7da1b1471c90>
name[ptc].MonthOffsets assign[=] dictionary[[], []]
variable[o] assign[=] constant[1]
for taget[name[key]] in starred[name[ptc].Months] begin[:]
call[name[ptc].MonthOffsets][name[key]] assign[=] name[o]
<ast.AugAssign object at 0x7da1b1473ee0>
variable[o] assign[=] constant[1]
for taget[name[key]] in starred[name[ptc].shortMonths] begin[:]
call[name[ptc].MonthOffsets][name[key]] assign[=] name[o]
<ast.AugAssign object at 0x7da18f09d150> | keyword[def] identifier[_initConstants] ( identifier[ptc] ):
literal[string]
identifier[ptc] . identifier[WeekdayOffsets] ={}
identifier[o] = literal[int]
keyword[for] identifier[key] keyword[in] identifier[ptc] . identifier[Weekdays] :
identifier[ptc] . identifier[WeekdayOffsets] [ identifier[key] ]= identifier[o]
identifier[o] += literal[int]
identifier[o] = literal[int]
keyword[for] identifier[key] keyword[in] identifier[ptc] . identifier[shortWeekdays] :
identifier[ptc] . identifier[WeekdayOffsets] [ identifier[key] ]= identifier[o]
identifier[o] += literal[int]
identifier[ptc] . identifier[MonthOffsets] ={}
identifier[o] = literal[int]
keyword[for] identifier[key] keyword[in] identifier[ptc] . identifier[Months] :
identifier[ptc] . identifier[MonthOffsets] [ identifier[key] ]= identifier[o]
identifier[o] += literal[int]
identifier[o] = literal[int]
keyword[for] identifier[key] keyword[in] identifier[ptc] . identifier[shortMonths] :
identifier[ptc] . identifier[MonthOffsets] [ identifier[key] ]= identifier[o]
identifier[o] += literal[int] | def _initConstants(ptc):
"""
Create localized versions of the units, week and month names
"""
# build weekday offsets - yes, it assumes the Weekday and shortWeekday
# lists are in the same order and Mon..Sun (Python style)
ptc.WeekdayOffsets = {}
o = 0
for key in ptc.Weekdays:
ptc.WeekdayOffsets[key] = o
o += 1 # depends on [control=['for'], data=['key']]
o = 0
for key in ptc.shortWeekdays:
ptc.WeekdayOffsets[key] = o
o += 1 # depends on [control=['for'], data=['key']]
# build month offsets - yes, it assumes the Months and shortMonths
# lists are in the same order and Jan..Dec
ptc.MonthOffsets = {}
o = 1
for key in ptc.Months:
ptc.MonthOffsets[key] = o
o += 1 # depends on [control=['for'], data=['key']]
o = 1
for key in ptc.shortMonths:
ptc.MonthOffsets[key] = o
o += 1 # depends on [control=['for'], data=['key']] |
def get_num_branches(self):
"""
Return the number of effective branches for tectonic region type,
as a dictionary.
"""
num = {}
for trt, branches in itertools.groupby(
self.branches, operator.attrgetter('trt')):
num[trt] = sum(1 for br in branches if br.effective)
return num | def function[get_num_branches, parameter[self]]:
constant[
Return the number of effective branches for tectonic region type,
as a dictionary.
]
variable[num] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2054a5db0>, <ast.Name object at 0x7da2054a5b10>]]] in starred[call[name[itertools].groupby, parameter[name[self].branches, call[name[operator].attrgetter, parameter[constant[trt]]]]]] begin[:]
call[name[num]][name[trt]] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da2054a6b30>]]
return[name[num]] | keyword[def] identifier[get_num_branches] ( identifier[self] ):
literal[string]
identifier[num] ={}
keyword[for] identifier[trt] , identifier[branches] keyword[in] identifier[itertools] . identifier[groupby] (
identifier[self] . identifier[branches] , identifier[operator] . identifier[attrgetter] ( literal[string] )):
identifier[num] [ identifier[trt] ]= identifier[sum] ( literal[int] keyword[for] identifier[br] keyword[in] identifier[branches] keyword[if] identifier[br] . identifier[effective] )
keyword[return] identifier[num] | def get_num_branches(self):
"""
Return the number of effective branches for tectonic region type,
as a dictionary.
"""
num = {}
for (trt, branches) in itertools.groupby(self.branches, operator.attrgetter('trt')):
num[trt] = sum((1 for br in branches if br.effective)) # depends on [control=['for'], data=[]]
return num |
def run_set_int(obj, arg, msg_on_error, min_value=None, max_value=None):
"""set an Integer-valued debugger setting. 'obj' is a generally a
subcommand that has 'name' and 'debugger.settings' attributes"""
if '' == arg.strip():
obj.errmsg("You need to supply a number.")
return
obj.debugger.settings[obj.name] = \
get_an_int(obj.errmsg, arg, msg_on_error, min_value, max_value)
return obj.debugger.settings[obj.name] | def function[run_set_int, parameter[obj, arg, msg_on_error, min_value, max_value]]:
constant[set an Integer-valued debugger setting. 'obj' is a generally a
subcommand that has 'name' and 'debugger.settings' attributes]
if compare[constant[] equal[==] call[name[arg].strip, parameter[]]] begin[:]
call[name[obj].errmsg, parameter[constant[You need to supply a number.]]]
return[None]
call[name[obj].debugger.settings][name[obj].name] assign[=] call[name[get_an_int], parameter[name[obj].errmsg, name[arg], name[msg_on_error], name[min_value], name[max_value]]]
return[call[name[obj].debugger.settings][name[obj].name]] | keyword[def] identifier[run_set_int] ( identifier[obj] , identifier[arg] , identifier[msg_on_error] , identifier[min_value] = keyword[None] , identifier[max_value] = keyword[None] ):
literal[string]
keyword[if] literal[string] == identifier[arg] . identifier[strip] ():
identifier[obj] . identifier[errmsg] ( literal[string] )
keyword[return]
identifier[obj] . identifier[debugger] . identifier[settings] [ identifier[obj] . identifier[name] ]= identifier[get_an_int] ( identifier[obj] . identifier[errmsg] , identifier[arg] , identifier[msg_on_error] , identifier[min_value] , identifier[max_value] )
keyword[return] identifier[obj] . identifier[debugger] . identifier[settings] [ identifier[obj] . identifier[name] ] | def run_set_int(obj, arg, msg_on_error, min_value=None, max_value=None):
"""set an Integer-valued debugger setting. 'obj' is a generally a
subcommand that has 'name' and 'debugger.settings' attributes"""
if '' == arg.strip():
obj.errmsg('You need to supply a number.')
return # depends on [control=['if'], data=[]]
obj.debugger.settings[obj.name] = get_an_int(obj.errmsg, arg, msg_on_error, min_value, max_value)
return obj.debugger.settings[obj.name] |
def docserver(self, server_name, package_documentation, methods):
"""Produce HTML documentation for an XML-RPC server."""
fdict = {}
for key, value in methods.items():
fdict[key] = '#-' + key
fdict[value] = fdict[key]
server_name = self.escape(server_name)
head = '<big><big><strong>%s</strong></big></big>' % server_name
result = self.heading(head, '#ffffff', '#7799ee')
doc = self.markup(package_documentation, self.preformat, fdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
contents = []
method_items = sorted(methods.items())
for key, value in method_items:
contents.append(self.docroutine(value, key, funcs=fdict))
result = result + self.bigsection(
'Methods', '#ffffff', '#eeaa77', ''.join(contents))
return result | def function[docserver, parameter[self, server_name, package_documentation, methods]]:
constant[Produce HTML documentation for an XML-RPC server.]
variable[fdict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18f8139d0>, <ast.Name object at 0x7da18f811cf0>]]] in starred[call[name[methods].items, parameter[]]] begin[:]
call[name[fdict]][name[key]] assign[=] binary_operation[constant[#-] + name[key]]
call[name[fdict]][name[value]] assign[=] call[name[fdict]][name[key]]
variable[server_name] assign[=] call[name[self].escape, parameter[name[server_name]]]
variable[head] assign[=] binary_operation[constant[<big><big><strong>%s</strong></big></big>] <ast.Mod object at 0x7da2590d6920> name[server_name]]
variable[result] assign[=] call[name[self].heading, parameter[name[head], constant[#ffffff], constant[#7799ee]]]
variable[doc] assign[=] call[name[self].markup, parameter[name[package_documentation], name[self].preformat, name[fdict]]]
variable[doc] assign[=] <ast.BoolOp object at 0x7da18f810be0>
variable[result] assign[=] binary_operation[name[result] + binary_operation[constant[<p>%s</p>
] <ast.Mod object at 0x7da2590d6920> name[doc]]]
variable[contents] assign[=] list[[]]
variable[method_items] assign[=] call[name[sorted], parameter[call[name[methods].items, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da20c6c76d0>, <ast.Name object at 0x7da20c6c4790>]]] in starred[name[method_items]] begin[:]
call[name[contents].append, parameter[call[name[self].docroutine, parameter[name[value], name[key]]]]]
variable[result] assign[=] binary_operation[name[result] + call[name[self].bigsection, parameter[constant[Methods], constant[#ffffff], constant[#eeaa77], call[constant[].join, parameter[name[contents]]]]]]
return[name[result]] | keyword[def] identifier[docserver] ( identifier[self] , identifier[server_name] , identifier[package_documentation] , identifier[methods] ):
literal[string]
identifier[fdict] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[methods] . identifier[items] ():
identifier[fdict] [ identifier[key] ]= literal[string] + identifier[key]
identifier[fdict] [ identifier[value] ]= identifier[fdict] [ identifier[key] ]
identifier[server_name] = identifier[self] . identifier[escape] ( identifier[server_name] )
identifier[head] = literal[string] % identifier[server_name]
identifier[result] = identifier[self] . identifier[heading] ( identifier[head] , literal[string] , literal[string] )
identifier[doc] = identifier[self] . identifier[markup] ( identifier[package_documentation] , identifier[self] . identifier[preformat] , identifier[fdict] )
identifier[doc] = identifier[doc] keyword[and] literal[string] % identifier[doc]
identifier[result] = identifier[result] + literal[string] % identifier[doc]
identifier[contents] =[]
identifier[method_items] = identifier[sorted] ( identifier[methods] . identifier[items] ())
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[method_items] :
identifier[contents] . identifier[append] ( identifier[self] . identifier[docroutine] ( identifier[value] , identifier[key] , identifier[funcs] = identifier[fdict] ))
identifier[result] = identifier[result] + identifier[self] . identifier[bigsection] (
literal[string] , literal[string] , literal[string] , literal[string] . identifier[join] ( identifier[contents] ))
keyword[return] identifier[result] | def docserver(self, server_name, package_documentation, methods):
"""Produce HTML documentation for an XML-RPC server."""
fdict = {}
for (key, value) in methods.items():
fdict[key] = '#-' + key
fdict[value] = fdict[key] # depends on [control=['for'], data=[]]
server_name = self.escape(server_name)
head = '<big><big><strong>%s</strong></big></big>' % server_name
result = self.heading(head, '#ffffff', '#7799ee')
doc = self.markup(package_documentation, self.preformat, fdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
contents = []
method_items = sorted(methods.items())
for (key, value) in method_items:
contents.append(self.docroutine(value, key, funcs=fdict)) # depends on [control=['for'], data=[]]
result = result + self.bigsection('Methods', '#ffffff', '#eeaa77', ''.join(contents))
return result |
def owned_by(self, owner, also_check_group=False):
"""
Checks if the specified user or user and group own the file.
Args:
owner (str): the user (or group) name for which we ask about ownership
also_check_group (bool): if set to True, both user owner and group owner checked
if set to False, only user owner checked
Returns:
bool: True if owner of the file is the specified owner
"""
if also_check_group:
return self.owner == owner and self.group == owner
else:
return self.owner == owner | def function[owned_by, parameter[self, owner, also_check_group]]:
constant[
Checks if the specified user or user and group own the file.
Args:
owner (str): the user (or group) name for which we ask about ownership
also_check_group (bool): if set to True, both user owner and group owner checked
if set to False, only user owner checked
Returns:
bool: True if owner of the file is the specified owner
]
if name[also_check_group] begin[:]
return[<ast.BoolOp object at 0x7da18f810340>] | keyword[def] identifier[owned_by] ( identifier[self] , identifier[owner] , identifier[also_check_group] = keyword[False] ):
literal[string]
keyword[if] identifier[also_check_group] :
keyword[return] identifier[self] . identifier[owner] == identifier[owner] keyword[and] identifier[self] . identifier[group] == identifier[owner]
keyword[else] :
keyword[return] identifier[self] . identifier[owner] == identifier[owner] | def owned_by(self, owner, also_check_group=False):
"""
Checks if the specified user or user and group own the file.
Args:
owner (str): the user (or group) name for which we ask about ownership
also_check_group (bool): if set to True, both user owner and group owner checked
if set to False, only user owner checked
Returns:
bool: True if owner of the file is the specified owner
"""
if also_check_group:
return self.owner == owner and self.group == owner # depends on [control=['if'], data=[]]
else:
return self.owner == owner |
def value_or_default(self, value):
''' Returns the given value or the specified default value for this
field '''
if value is None:
if callable(self.default):
return self.default()
else:
return self.default
return value | def function[value_or_default, parameter[self, value]]:
constant[ Returns the given value or the specified default value for this
field ]
if compare[name[value] is constant[None]] begin[:]
if call[name[callable], parameter[name[self].default]] begin[:]
return[call[name[self].default, parameter[]]]
return[name[value]] | keyword[def] identifier[value_or_default] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[if] identifier[callable] ( identifier[self] . identifier[default] ):
keyword[return] identifier[self] . identifier[default] ()
keyword[else] :
keyword[return] identifier[self] . identifier[default]
keyword[return] identifier[value] | def value_or_default(self, value):
""" Returns the given value or the specified default value for this
field """
if value is None:
if callable(self.default):
return self.default() # depends on [control=['if'], data=[]]
else:
return self.default # depends on [control=['if'], data=[]]
return value |
def check_version_consistency(self):
"""
Determine if any releasers have inconsistent versions
"""
version = None
releaser_name = None
for releaser in self.releasers:
try:
next_version = releaser.determine_current_version()
except NotImplementedError:
continue
if next_version and version and version != next_version:
raise Exception('Inconsistent versions, {} is at {} but {} is at {}.'.format(
releaser_name, version, releaser.name, next_version))
version = next_version
releaser_name = releaser.name | def function[check_version_consistency, parameter[self]]:
constant[
Determine if any releasers have inconsistent versions
]
variable[version] assign[=] constant[None]
variable[releaser_name] assign[=] constant[None]
for taget[name[releaser]] in starred[name[self].releasers] begin[:]
<ast.Try object at 0x7da20c6c4250>
if <ast.BoolOp object at 0x7da20c6c5720> begin[:]
<ast.Raise object at 0x7da20c6c7a60>
variable[version] assign[=] name[next_version]
variable[releaser_name] assign[=] name[releaser].name | keyword[def] identifier[check_version_consistency] ( identifier[self] ):
literal[string]
identifier[version] = keyword[None]
identifier[releaser_name] = keyword[None]
keyword[for] identifier[releaser] keyword[in] identifier[self] . identifier[releasers] :
keyword[try] :
identifier[next_version] = identifier[releaser] . identifier[determine_current_version] ()
keyword[except] identifier[NotImplementedError] :
keyword[continue]
keyword[if] identifier[next_version] keyword[and] identifier[version] keyword[and] identifier[version] != identifier[next_version] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] (
identifier[releaser_name] , identifier[version] , identifier[releaser] . identifier[name] , identifier[next_version] ))
identifier[version] = identifier[next_version]
identifier[releaser_name] = identifier[releaser] . identifier[name] | def check_version_consistency(self):
"""
Determine if any releasers have inconsistent versions
"""
version = None
releaser_name = None
for releaser in self.releasers:
try:
next_version = releaser.determine_current_version() # depends on [control=['try'], data=[]]
except NotImplementedError:
continue # depends on [control=['except'], data=[]]
if next_version and version and (version != next_version):
raise Exception('Inconsistent versions, {} is at {} but {} is at {}.'.format(releaser_name, version, releaser.name, next_version)) # depends on [control=['if'], data=[]]
version = next_version
releaser_name = releaser.name # depends on [control=['for'], data=['releaser']] |
def __process_inequalities(self, block_index):
"""Generate localizing matrices
Arguments:
inequalities -- list of inequality constraints
monomials -- localizing monomials
block_index -- the current block index in constraint matrices of the
SDP relaxation
"""
initial_block_index = block_index
row_offsets = [0]
for block, block_size in enumerate(self.block_struct):
row_offsets.append(row_offsets[block] + block_size ** 2)
if self._parallel:
pool = Pool()
for k, ineq in enumerate(self.constraints):
block_index += 1
monomials = self.localizing_monomial_sets[block_index -
initial_block_index-1]
lm = len(monomials)
if isinstance(ineq, str):
self.__parse_expression(ineq, row_offsets[block_index-1])
continue
if ineq.is_Relational:
ineq = convert_relational(ineq)
func = partial(moment_of_entry, monomials=monomials, ineq=ineq,
substitutions=self.substitutions)
if self._parallel and lm > 1:
chunksize = max(int(np.sqrt(lm*lm/2) /
cpu_count()), 1)
iter_ = pool.map(func, ([row, column] for row in range(lm)
for column in range(row, lm)),
chunksize)
else:
iter_ = imap(func, ([row, column] for row in range(lm)
for column in range(row, lm)))
if block_index > self.constraint_starting_block + \
self._n_inequalities and lm > 1:
is_equality = True
else:
is_equality = False
for row, column, polynomial in iter_:
if is_equality:
row, column = 0, 0
self.__push_facvar_sparse(polynomial, block_index,
row_offsets[block_index-1],
row, column)
if is_equality:
block_index += 1
if is_equality:
block_index -= 1
if self.verbose > 0:
sys.stdout.write("\r\x1b[KProcessing %d/%d constraints..." %
(k+1, len(self.constraints)))
sys.stdout.flush()
if self._parallel:
pool.close()
pool.join()
if self.verbose > 0:
sys.stdout.write("\n")
return block_index | def function[__process_inequalities, parameter[self, block_index]]:
constant[Generate localizing matrices
Arguments:
inequalities -- list of inequality constraints
monomials -- localizing monomials
block_index -- the current block index in constraint matrices of the
SDP relaxation
]
variable[initial_block_index] assign[=] name[block_index]
variable[row_offsets] assign[=] list[[<ast.Constant object at 0x7da1b0ffb700>]]
for taget[tuple[[<ast.Name object at 0x7da1b0ffb670>, <ast.Name object at 0x7da1b0ffb640>]]] in starred[call[name[enumerate], parameter[name[self].block_struct]]] begin[:]
call[name[row_offsets].append, parameter[binary_operation[call[name[row_offsets]][name[block]] + binary_operation[name[block_size] ** constant[2]]]]]
if name[self]._parallel begin[:]
variable[pool] assign[=] call[name[Pool], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0ffb160>, <ast.Name object at 0x7da1b0ffb130>]]] in starred[call[name[enumerate], parameter[name[self].constraints]]] begin[:]
<ast.AugAssign object at 0x7da1b0ffb040>
variable[monomials] assign[=] call[name[self].localizing_monomial_sets][binary_operation[binary_operation[name[block_index] - name[initial_block_index]] - constant[1]]]
variable[lm] assign[=] call[name[len], parameter[name[monomials]]]
if call[name[isinstance], parameter[name[ineq], name[str]]] begin[:]
call[name[self].__parse_expression, parameter[name[ineq], call[name[row_offsets]][binary_operation[name[block_index] - constant[1]]]]]
continue
if name[ineq].is_Relational begin[:]
variable[ineq] assign[=] call[name[convert_relational], parameter[name[ineq]]]
variable[func] assign[=] call[name[partial], parameter[name[moment_of_entry]]]
if <ast.BoolOp object at 0x7da1b0ffa590> begin[:]
variable[chunksize] assign[=] call[name[max], parameter[call[name[int], parameter[binary_operation[call[name[np].sqrt, parameter[binary_operation[binary_operation[name[lm] * name[lm]] / constant[2]]]] / call[name[cpu_count], parameter[]]]]], constant[1]]]
variable[iter_] assign[=] call[name[pool].map, parameter[name[func], <ast.GeneratorExp object at 0x7da1b0ff9ff0>, name[chunksize]]]
if <ast.BoolOp object at 0x7da1b0f118d0> begin[:]
variable[is_equality] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da1b0f110c0>, <ast.Name object at 0x7da1b0f13ee0>, <ast.Name object at 0x7da1b0f11ff0>]]] in starred[name[iter_]] begin[:]
if name[is_equality] begin[:]
<ast.Tuple object at 0x7da1b0f13d90> assign[=] tuple[[<ast.Constant object at 0x7da1b0f10640>, <ast.Constant object at 0x7da1b0f11390>]]
call[name[self].__push_facvar_sparse, parameter[name[polynomial], name[block_index], call[name[row_offsets]][binary_operation[name[block_index] - constant[1]]], name[row], name[column]]]
if name[is_equality] begin[:]
<ast.AugAssign object at 0x7da1b0f13d60>
if name[is_equality] begin[:]
<ast.AugAssign object at 0x7da1b0f138b0>
if compare[name[self].verbose greater[>] constant[0]] begin[:]
call[name[sys].stdout.write, parameter[binary_operation[constant[
[KProcessing %d/%d constraints...] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b0f12890>, <ast.Call object at 0x7da1b0f12f20>]]]]]
call[name[sys].stdout.flush, parameter[]]
if name[self]._parallel begin[:]
call[name[pool].close, parameter[]]
call[name[pool].join, parameter[]]
if compare[name[self].verbose greater[>] constant[0]] begin[:]
call[name[sys].stdout.write, parameter[constant[
]]]
return[name[block_index]] | keyword[def] identifier[__process_inequalities] ( identifier[self] , identifier[block_index] ):
literal[string]
identifier[initial_block_index] = identifier[block_index]
identifier[row_offsets] =[ literal[int] ]
keyword[for] identifier[block] , identifier[block_size] keyword[in] identifier[enumerate] ( identifier[self] . identifier[block_struct] ):
identifier[row_offsets] . identifier[append] ( identifier[row_offsets] [ identifier[block] ]+ identifier[block_size] ** literal[int] )
keyword[if] identifier[self] . identifier[_parallel] :
identifier[pool] = identifier[Pool] ()
keyword[for] identifier[k] , identifier[ineq] keyword[in] identifier[enumerate] ( identifier[self] . identifier[constraints] ):
identifier[block_index] += literal[int]
identifier[monomials] = identifier[self] . identifier[localizing_monomial_sets] [ identifier[block_index] -
identifier[initial_block_index] - literal[int] ]
identifier[lm] = identifier[len] ( identifier[monomials] )
keyword[if] identifier[isinstance] ( identifier[ineq] , identifier[str] ):
identifier[self] . identifier[__parse_expression] ( identifier[ineq] , identifier[row_offsets] [ identifier[block_index] - literal[int] ])
keyword[continue]
keyword[if] identifier[ineq] . identifier[is_Relational] :
identifier[ineq] = identifier[convert_relational] ( identifier[ineq] )
identifier[func] = identifier[partial] ( identifier[moment_of_entry] , identifier[monomials] = identifier[monomials] , identifier[ineq] = identifier[ineq] ,
identifier[substitutions] = identifier[self] . identifier[substitutions] )
keyword[if] identifier[self] . identifier[_parallel] keyword[and] identifier[lm] > literal[int] :
identifier[chunksize] = identifier[max] ( identifier[int] ( identifier[np] . identifier[sqrt] ( identifier[lm] * identifier[lm] / literal[int] )/
identifier[cpu_count] ()), literal[int] )
identifier[iter_] = identifier[pool] . identifier[map] ( identifier[func] ,([ identifier[row] , identifier[column] ] keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[lm] )
keyword[for] identifier[column] keyword[in] identifier[range] ( identifier[row] , identifier[lm] )),
identifier[chunksize] )
keyword[else] :
identifier[iter_] = identifier[imap] ( identifier[func] ,([ identifier[row] , identifier[column] ] keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[lm] )
keyword[for] identifier[column] keyword[in] identifier[range] ( identifier[row] , identifier[lm] )))
keyword[if] identifier[block_index] > identifier[self] . identifier[constraint_starting_block] + identifier[self] . identifier[_n_inequalities] keyword[and] identifier[lm] > literal[int] :
identifier[is_equality] = keyword[True]
keyword[else] :
identifier[is_equality] = keyword[False]
keyword[for] identifier[row] , identifier[column] , identifier[polynomial] keyword[in] identifier[iter_] :
keyword[if] identifier[is_equality] :
identifier[row] , identifier[column] = literal[int] , literal[int]
identifier[self] . identifier[__push_facvar_sparse] ( identifier[polynomial] , identifier[block_index] ,
identifier[row_offsets] [ identifier[block_index] - literal[int] ],
identifier[row] , identifier[column] )
keyword[if] identifier[is_equality] :
identifier[block_index] += literal[int]
keyword[if] identifier[is_equality] :
identifier[block_index] -= literal[int]
keyword[if] identifier[self] . identifier[verbose] > literal[int] :
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] %
( identifier[k] + literal[int] , identifier[len] ( identifier[self] . identifier[constraints] )))
identifier[sys] . identifier[stdout] . identifier[flush] ()
keyword[if] identifier[self] . identifier[_parallel] :
identifier[pool] . identifier[close] ()
identifier[pool] . identifier[join] ()
keyword[if] identifier[self] . identifier[verbose] > literal[int] :
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] )
keyword[return] identifier[block_index] | def __process_inequalities(self, block_index):
"""Generate localizing matrices
Arguments:
inequalities -- list of inequality constraints
monomials -- localizing monomials
block_index -- the current block index in constraint matrices of the
SDP relaxation
"""
initial_block_index = block_index
row_offsets = [0]
for (block, block_size) in enumerate(self.block_struct):
row_offsets.append(row_offsets[block] + block_size ** 2) # depends on [control=['for'], data=[]]
if self._parallel:
pool = Pool() # depends on [control=['if'], data=[]]
for (k, ineq) in enumerate(self.constraints):
block_index += 1
monomials = self.localizing_monomial_sets[block_index - initial_block_index - 1]
lm = len(monomials)
if isinstance(ineq, str):
self.__parse_expression(ineq, row_offsets[block_index - 1])
continue # depends on [control=['if'], data=[]]
if ineq.is_Relational:
ineq = convert_relational(ineq) # depends on [control=['if'], data=[]]
func = partial(moment_of_entry, monomials=monomials, ineq=ineq, substitutions=self.substitutions)
if self._parallel and lm > 1:
chunksize = max(int(np.sqrt(lm * lm / 2) / cpu_count()), 1)
iter_ = pool.map(func, ([row, column] for row in range(lm) for column in range(row, lm)), chunksize) # depends on [control=['if'], data=[]]
else:
iter_ = imap(func, ([row, column] for row in range(lm) for column in range(row, lm)))
if block_index > self.constraint_starting_block + self._n_inequalities and lm > 1:
is_equality = True # depends on [control=['if'], data=[]]
else:
is_equality = False
for (row, column, polynomial) in iter_:
if is_equality:
(row, column) = (0, 0) # depends on [control=['if'], data=[]]
self.__push_facvar_sparse(polynomial, block_index, row_offsets[block_index - 1], row, column)
if is_equality:
block_index += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if is_equality:
block_index -= 1 # depends on [control=['if'], data=[]]
if self.verbose > 0:
sys.stdout.write('\r\x1b[KProcessing %d/%d constraints...' % (k + 1, len(self.constraints)))
sys.stdout.flush() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if self._parallel:
pool.close()
pool.join() # depends on [control=['if'], data=[]]
if self.verbose > 0:
sys.stdout.write('\n') # depends on [control=['if'], data=[]]
return block_index |
def session_callback(self, signal):
"""Signalling from stream session.
Data - new data available for processing.
Playing - Connection is healthy.
Retry - if there is no connection to device.
"""
if signal == SIGNAL_DATA:
self.event.new_event(self.data)
elif signal == SIGNAL_FAILED:
self.retry()
if signal in [SIGNAL_PLAYING, SIGNAL_FAILED] and \
self.connection_status_callback:
self.connection_status_callback(signal) | def function[session_callback, parameter[self, signal]]:
constant[Signalling from stream session.
Data - new data available for processing.
Playing - Connection is healthy.
Retry - if there is no connection to device.
]
if compare[name[signal] equal[==] name[SIGNAL_DATA]] begin[:]
call[name[self].event.new_event, parameter[name[self].data]]
if <ast.BoolOp object at 0x7da1b1854e20> begin[:]
call[name[self].connection_status_callback, parameter[name[signal]]] | keyword[def] identifier[session_callback] ( identifier[self] , identifier[signal] ):
literal[string]
keyword[if] identifier[signal] == identifier[SIGNAL_DATA] :
identifier[self] . identifier[event] . identifier[new_event] ( identifier[self] . identifier[data] )
keyword[elif] identifier[signal] == identifier[SIGNAL_FAILED] :
identifier[self] . identifier[retry] ()
keyword[if] identifier[signal] keyword[in] [ identifier[SIGNAL_PLAYING] , identifier[SIGNAL_FAILED] ] keyword[and] identifier[self] . identifier[connection_status_callback] :
identifier[self] . identifier[connection_status_callback] ( identifier[signal] ) | def session_callback(self, signal):
"""Signalling from stream session.
Data - new data available for processing.
Playing - Connection is healthy.
Retry - if there is no connection to device.
"""
if signal == SIGNAL_DATA:
self.event.new_event(self.data) # depends on [control=['if'], data=[]]
elif signal == SIGNAL_FAILED:
self.retry() # depends on [control=['if'], data=[]]
if signal in [SIGNAL_PLAYING, SIGNAL_FAILED] and self.connection_status_callback:
self.connection_status_callback(signal) # depends on [control=['if'], data=[]] |
def _linepoint(self, t, x0, y0, x1, y1):
""" Returns coordinates for point at t on the line.
Calculates the coordinates of x and y for a point at t on a straight line.
The t parameter is a number between 0.0 and 1.0,
x0 and y0 define the starting point of the line,
x1 and y1 the ending point of the line.
"""
# Originally from nodebox-gl
out_x = x0 + t * (x1 - x0)
out_y = y0 + t * (y1 - y0)
return (out_x, out_y) | def function[_linepoint, parameter[self, t, x0, y0, x1, y1]]:
constant[ Returns coordinates for point at t on the line.
Calculates the coordinates of x and y for a point at t on a straight line.
The t parameter is a number between 0.0 and 1.0,
x0 and y0 define the starting point of the line,
x1 and y1 the ending point of the line.
]
variable[out_x] assign[=] binary_operation[name[x0] + binary_operation[name[t] * binary_operation[name[x1] - name[x0]]]]
variable[out_y] assign[=] binary_operation[name[y0] + binary_operation[name[t] * binary_operation[name[y1] - name[y0]]]]
return[tuple[[<ast.Name object at 0x7da18dc07460>, <ast.Name object at 0x7da18dc07be0>]]] | keyword[def] identifier[_linepoint] ( identifier[self] , identifier[t] , identifier[x0] , identifier[y0] , identifier[x1] , identifier[y1] ):
literal[string]
identifier[out_x] = identifier[x0] + identifier[t] *( identifier[x1] - identifier[x0] )
identifier[out_y] = identifier[y0] + identifier[t] *( identifier[y1] - identifier[y0] )
keyword[return] ( identifier[out_x] , identifier[out_y] ) | def _linepoint(self, t, x0, y0, x1, y1):
""" Returns coordinates for point at t on the line.
Calculates the coordinates of x and y for a point at t on a straight line.
The t parameter is a number between 0.0 and 1.0,
x0 and y0 define the starting point of the line,
x1 and y1 the ending point of the line.
"""
# Originally from nodebox-gl
out_x = x0 + t * (x1 - x0)
out_y = y0 + t * (y1 - y0)
return (out_x, out_y) |
def convertToIntRange(val, minValue, maxValue, invalidDefault, emptyValue=''):
'''
converToIntRange - Convert input value to an integer within a certain range
@param val <None/str/int/float> - The input value
@param minValue <None/int> - The minimum value (inclusive), or None if no minimum
@param maxValue <None/int> - The maximum value (inclusive), or None if no maximum
@param invalidDefault <None/str/Exception> - The value to return if "val" is not empty string/None
and "val" is not in #possibleValues
If instantiated Exception (like ValueError('blah')): Raise this exception
If an Exception type ( like ValueError ) - Instantiate and raise this exception type
Otherwise, use this raw value
@param emptyValue Default '', used for an empty value (empty string or None)
'''
from .utils import tostr
# If null, retain null
if val is None or val == '':
if emptyValue is EMPTY_IS_INVALID:
return _handleInvalid(invalidDefault)
return emptyValue
try:
val = int(val)
except ValueError:
return _handleInvalid(invalidDefault)
if minValue is not None and val < minValue:
return _handleInvalid(invalidDefault)
if maxValue is not None and val > maxValue:
return _handleInvalid(invalidDefault)
return val | def function[convertToIntRange, parameter[val, minValue, maxValue, invalidDefault, emptyValue]]:
constant[
converToIntRange - Convert input value to an integer within a certain range
@param val <None/str/int/float> - The input value
@param minValue <None/int> - The minimum value (inclusive), or None if no minimum
@param maxValue <None/int> - The maximum value (inclusive), or None if no maximum
@param invalidDefault <None/str/Exception> - The value to return if "val" is not empty string/None
and "val" is not in #possibleValues
If instantiated Exception (like ValueError('blah')): Raise this exception
If an Exception type ( like ValueError ) - Instantiate and raise this exception type
Otherwise, use this raw value
@param emptyValue Default '', used for an empty value (empty string or None)
]
from relative_module[utils] import module[tostr]
if <ast.BoolOp object at 0x7da1b11bed10> begin[:]
if compare[name[emptyValue] is name[EMPTY_IS_INVALID]] begin[:]
return[call[name[_handleInvalid], parameter[name[invalidDefault]]]]
return[name[emptyValue]]
<ast.Try object at 0x7da1b11bf7f0>
if <ast.BoolOp object at 0x7da1b11bd900> begin[:]
return[call[name[_handleInvalid], parameter[name[invalidDefault]]]]
if <ast.BoolOp object at 0x7da1b11bf580> begin[:]
return[call[name[_handleInvalid], parameter[name[invalidDefault]]]]
return[name[val]] | keyword[def] identifier[convertToIntRange] ( identifier[val] , identifier[minValue] , identifier[maxValue] , identifier[invalidDefault] , identifier[emptyValue] = literal[string] ):
literal[string]
keyword[from] . identifier[utils] keyword[import] identifier[tostr]
keyword[if] identifier[val] keyword[is] keyword[None] keyword[or] identifier[val] == literal[string] :
keyword[if] identifier[emptyValue] keyword[is] identifier[EMPTY_IS_INVALID] :
keyword[return] identifier[_handleInvalid] ( identifier[invalidDefault] )
keyword[return] identifier[emptyValue]
keyword[try] :
identifier[val] = identifier[int] ( identifier[val] )
keyword[except] identifier[ValueError] :
keyword[return] identifier[_handleInvalid] ( identifier[invalidDefault] )
keyword[if] identifier[minValue] keyword[is] keyword[not] keyword[None] keyword[and] identifier[val] < identifier[minValue] :
keyword[return] identifier[_handleInvalid] ( identifier[invalidDefault] )
keyword[if] identifier[maxValue] keyword[is] keyword[not] keyword[None] keyword[and] identifier[val] > identifier[maxValue] :
keyword[return] identifier[_handleInvalid] ( identifier[invalidDefault] )
keyword[return] identifier[val] | def convertToIntRange(val, minValue, maxValue, invalidDefault, emptyValue=''):
"""
converToIntRange - Convert input value to an integer within a certain range
@param val <None/str/int/float> - The input value
@param minValue <None/int> - The minimum value (inclusive), or None if no minimum
@param maxValue <None/int> - The maximum value (inclusive), or None if no maximum
@param invalidDefault <None/str/Exception> - The value to return if "val" is not empty string/None
and "val" is not in #possibleValues
If instantiated Exception (like ValueError('blah')): Raise this exception
If an Exception type ( like ValueError ) - Instantiate and raise this exception type
Otherwise, use this raw value
@param emptyValue Default '', used for an empty value (empty string or None)
"""
from .utils import tostr
# If null, retain null
if val is None or val == '':
if emptyValue is EMPTY_IS_INVALID:
return _handleInvalid(invalidDefault) # depends on [control=['if'], data=[]]
return emptyValue # depends on [control=['if'], data=[]]
try:
val = int(val) # depends on [control=['try'], data=[]]
except ValueError:
return _handleInvalid(invalidDefault) # depends on [control=['except'], data=[]]
if minValue is not None and val < minValue:
return _handleInvalid(invalidDefault) # depends on [control=['if'], data=[]]
if maxValue is not None and val > maxValue:
return _handleInvalid(invalidDefault) # depends on [control=['if'], data=[]]
return val |
def fma(x, y, z, context=None):
"""
Return (x * y) + z, with a single rounding according to the current
context.
"""
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_fma,
(
BigFloat._implicit_convert(x),
BigFloat._implicit_convert(y),
BigFloat._implicit_convert(z),
),
context,
) | def function[fma, parameter[x, y, z, context]]:
constant[
Return (x * y) + z, with a single rounding according to the current
context.
]
return[call[name[_apply_function_in_current_context], parameter[name[BigFloat], name[mpfr].mpfr_fma, tuple[[<ast.Call object at 0x7da207f99cf0>, <ast.Call object at 0x7da207f994e0>, <ast.Call object at 0x7da207f9ac20>]], name[context]]]] | keyword[def] identifier[fma] ( identifier[x] , identifier[y] , identifier[z] , identifier[context] = keyword[None] ):
literal[string]
keyword[return] identifier[_apply_function_in_current_context] (
identifier[BigFloat] ,
identifier[mpfr] . identifier[mpfr_fma] ,
(
identifier[BigFloat] . identifier[_implicit_convert] ( identifier[x] ),
identifier[BigFloat] . identifier[_implicit_convert] ( identifier[y] ),
identifier[BigFloat] . identifier[_implicit_convert] ( identifier[z] ),
),
identifier[context] ,
) | def fma(x, y, z, context=None):
"""
Return (x * y) + z, with a single rounding according to the current
context.
"""
return _apply_function_in_current_context(BigFloat, mpfr.mpfr_fma, (BigFloat._implicit_convert(x), BigFloat._implicit_convert(y), BigFloat._implicit_convert(z)), context) |
def default_profiler(f, _type, _value):
''' inspects an input frame and pretty prints the following:
<src-path>:<src-line> -> <function-name>
<source-code>
<local-variables>
----------------------------------------
'''
try:
profile_print(
'\n'.join([
get_frame_src(f),
get_locals(f),
'----------------------------------------'
])
)
except:
pass | def function[default_profiler, parameter[f, _type, _value]]:
constant[ inspects an input frame and pretty prints the following:
<src-path>:<src-line> -> <function-name>
<source-code>
<local-variables>
----------------------------------------
]
<ast.Try object at 0x7da2045668f0> | keyword[def] identifier[default_profiler] ( identifier[f] , identifier[_type] , identifier[_value] ):
literal[string]
keyword[try] :
identifier[profile_print] (
literal[string] . identifier[join] ([
identifier[get_frame_src] ( identifier[f] ),
identifier[get_locals] ( identifier[f] ),
literal[string]
])
)
keyword[except] :
keyword[pass] | def default_profiler(f, _type, _value):
""" inspects an input frame and pretty prints the following:
<src-path>:<src-line> -> <function-name>
<source-code>
<local-variables>
----------------------------------------
"""
try:
profile_print('\n'.join([get_frame_src(f), get_locals(f), '----------------------------------------'])) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] |
def GenerarAjusteFisico(self):
"Generar Ajuste Físico de Liquidación de Tabaco Verde (WSLTVv1.3)"
# renombrar la clave principal de la estructura
if 'liquidacion' in self.solicitud:
liq = self.solicitud.pop('liquidacion')
self.solicitud = liq
# llamar al webservice:
ret = self.client.generarAjusteFisico(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
solicitud=self.solicitud,
)
# analizar el resultado:
ret = ret['respuesta']
self.__analizar_errores(ret)
liqs = ret.get('liquidacion', [])
liq = liqs[0] if liqs else None
self.AnalizarLiquidacion(liq)
return True | def function[GenerarAjusteFisico, parameter[self]]:
constant[Generar Ajuste Físico de Liquidación de Tabaco Verde (WSLTVv1.3)]
if compare[constant[liquidacion] in name[self].solicitud] begin[:]
variable[liq] assign[=] call[name[self].solicitud.pop, parameter[constant[liquidacion]]]
name[self].solicitud assign[=] name[liq]
variable[ret] assign[=] call[name[self].client.generarAjusteFisico, parameter[]]
variable[ret] assign[=] call[name[ret]][constant[respuesta]]
call[name[self].__analizar_errores, parameter[name[ret]]]
variable[liqs] assign[=] call[name[ret].get, parameter[constant[liquidacion], list[[]]]]
variable[liq] assign[=] <ast.IfExp object at 0x7da1b1d541f0>
call[name[self].AnalizarLiquidacion, parameter[name[liq]]]
return[constant[True]] | keyword[def] identifier[GenerarAjusteFisico] ( identifier[self] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[solicitud] :
identifier[liq] = identifier[self] . identifier[solicitud] . identifier[pop] ( literal[string] )
identifier[self] . identifier[solicitud] = identifier[liq]
identifier[ret] = identifier[self] . identifier[client] . identifier[generarAjusteFisico] (
identifier[auth] ={
literal[string] : identifier[self] . identifier[Token] , literal[string] : identifier[self] . identifier[Sign] ,
literal[string] : identifier[self] . identifier[Cuit] ,},
identifier[solicitud] = identifier[self] . identifier[solicitud] ,
)
identifier[ret] = identifier[ret] [ literal[string] ]
identifier[self] . identifier[__analizar_errores] ( identifier[ret] )
identifier[liqs] = identifier[ret] . identifier[get] ( literal[string] ,[])
identifier[liq] = identifier[liqs] [ literal[int] ] keyword[if] identifier[liqs] keyword[else] keyword[None]
identifier[self] . identifier[AnalizarLiquidacion] ( identifier[liq] )
keyword[return] keyword[True] | def GenerarAjusteFisico(self):
"""Generar Ajuste Físico de Liquidación de Tabaco Verde (WSLTVv1.3)"""
# renombrar la clave principal de la estructura
if 'liquidacion' in self.solicitud:
liq = self.solicitud.pop('liquidacion')
self.solicitud = liq # depends on [control=['if'], data=[]]
# llamar al webservice:
ret = self.client.generarAjusteFisico(auth={'token': self.Token, 'sign': self.Sign, 'cuit': self.Cuit}, solicitud=self.solicitud)
# analizar el resultado:
ret = ret['respuesta']
self.__analizar_errores(ret)
liqs = ret.get('liquidacion', [])
liq = liqs[0] if liqs else None
self.AnalizarLiquidacion(liq)
return True |
def parse_partlist(str):
'''parse partlist text delivered by eagle.
header is converted to lowercase
:param str: input string
:rtype: tuple of header list and dict list: (['part','value',..], [{'part':'C1', 'value':'1n'}, ..])
'''
lines = str.strip().splitlines()
lines = filter(len, lines)
hind = header_index(lines)
if hind is None:
log.debug('empty partlist found')
return ([], [])
header_line = lines[hind]
header = header_line.split(' ')
header = filter(len, header)
positions = [header_line.index(x) for x in header]
header = [x.strip().split()[0].lower() for x in header]
data_lines = lines[hind + 1:]
def parse_data_line(line):
y = [(h, line[pos1:pos2].strip()) for h, pos1, pos2 in zip(
header, positions, positions[1:] + [1000])]
return dict(y)
data = [parse_data_line(x) for x in data_lines]
return (header, data) | def function[parse_partlist, parameter[str]]:
constant[parse partlist text delivered by eagle.
header is converted to lowercase
:param str: input string
:rtype: tuple of header list and dict list: (['part','value',..], [{'part':'C1', 'value':'1n'}, ..])
]
variable[lines] assign[=] call[call[name[str].strip, parameter[]].splitlines, parameter[]]
variable[lines] assign[=] call[name[filter], parameter[name[len], name[lines]]]
variable[hind] assign[=] call[name[header_index], parameter[name[lines]]]
if compare[name[hind] is constant[None]] begin[:]
call[name[log].debug, parameter[constant[empty partlist found]]]
return[tuple[[<ast.List object at 0x7da1b25d02e0>, <ast.List object at 0x7da1b25d1960>]]]
variable[header_line] assign[=] call[name[lines]][name[hind]]
variable[header] assign[=] call[name[header_line].split, parameter[constant[ ]]]
variable[header] assign[=] call[name[filter], parameter[name[len], name[header]]]
variable[positions] assign[=] <ast.ListComp object at 0x7da1b25d3730>
variable[header] assign[=] <ast.ListComp object at 0x7da1b25d17e0>
variable[data_lines] assign[=] call[name[lines]][<ast.Slice object at 0x7da1b25d2650>]
def function[parse_data_line, parameter[line]]:
variable[y] assign[=] <ast.ListComp object at 0x7da1b25d0ee0>
return[call[name[dict], parameter[name[y]]]]
variable[data] assign[=] <ast.ListComp object at 0x7da18f811330>
return[tuple[[<ast.Name object at 0x7da18f813700>, <ast.Name object at 0x7da18f8136d0>]]] | keyword[def] identifier[parse_partlist] ( identifier[str] ):
literal[string]
identifier[lines] = identifier[str] . identifier[strip] (). identifier[splitlines] ()
identifier[lines] = identifier[filter] ( identifier[len] , identifier[lines] )
identifier[hind] = identifier[header_index] ( identifier[lines] )
keyword[if] identifier[hind] keyword[is] keyword[None] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[return] ([],[])
identifier[header_line] = identifier[lines] [ identifier[hind] ]
identifier[header] = identifier[header_line] . identifier[split] ( literal[string] )
identifier[header] = identifier[filter] ( identifier[len] , identifier[header] )
identifier[positions] =[ identifier[header_line] . identifier[index] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[header] ]
identifier[header] =[ identifier[x] . identifier[strip] (). identifier[split] ()[ literal[int] ]. identifier[lower] () keyword[for] identifier[x] keyword[in] identifier[header] ]
identifier[data_lines] = identifier[lines] [ identifier[hind] + literal[int] :]
keyword[def] identifier[parse_data_line] ( identifier[line] ):
identifier[y] =[( identifier[h] , identifier[line] [ identifier[pos1] : identifier[pos2] ]. identifier[strip] ()) keyword[for] identifier[h] , identifier[pos1] , identifier[pos2] keyword[in] identifier[zip] (
identifier[header] , identifier[positions] , identifier[positions] [ literal[int] :]+[ literal[int] ])]
keyword[return] identifier[dict] ( identifier[y] )
identifier[data] =[ identifier[parse_data_line] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[data_lines] ]
keyword[return] ( identifier[header] , identifier[data] ) | def parse_partlist(str):
"""parse partlist text delivered by eagle.
header is converted to lowercase
:param str: input string
:rtype: tuple of header list and dict list: (['part','value',..], [{'part':'C1', 'value':'1n'}, ..])
"""
lines = str.strip().splitlines()
lines = filter(len, lines)
hind = header_index(lines)
if hind is None:
log.debug('empty partlist found')
return ([], []) # depends on [control=['if'], data=[]]
header_line = lines[hind]
header = header_line.split(' ')
header = filter(len, header)
positions = [header_line.index(x) for x in header]
header = [x.strip().split()[0].lower() for x in header]
data_lines = lines[hind + 1:]
def parse_data_line(line):
y = [(h, line[pos1:pos2].strip()) for (h, pos1, pos2) in zip(header, positions, positions[1:] + [1000])]
return dict(y)
data = [parse_data_line(x) for x in data_lines]
return (header, data) |
def _setup_stats(self):
'''
Sets up the stats collection
'''
self.stats_dict = {}
redis_conn = redis.Redis(host=self.settings['REDIS_HOST'],
port=self.settings['REDIS_PORT'],
db=self.settings.get('REDIS_DB'))
try:
redis_conn.info()
self.logger.debug("Connected to Redis in StatsCollector Setup")
self.redis_conn = redis_conn
except ConnectionError:
self.logger.warn("Failed to connect to Redis in StatsCollector"
" Setup, no stats will be collected")
return
if self.settings['STATS_TOTAL']:
self._setup_stats_total(redis_conn)
if self.settings['STATS_PLUGINS']:
self._setup_stats_plugins(redis_conn) | def function[_setup_stats, parameter[self]]:
constant[
Sets up the stats collection
]
name[self].stats_dict assign[=] dictionary[[], []]
variable[redis_conn] assign[=] call[name[redis].Redis, parameter[]]
<ast.Try object at 0x7da1b18a1c60>
if call[name[self].settings][constant[STATS_TOTAL]] begin[:]
call[name[self]._setup_stats_total, parameter[name[redis_conn]]]
if call[name[self].settings][constant[STATS_PLUGINS]] begin[:]
call[name[self]._setup_stats_plugins, parameter[name[redis_conn]]] | keyword[def] identifier[_setup_stats] ( identifier[self] ):
literal[string]
identifier[self] . identifier[stats_dict] ={}
identifier[redis_conn] = identifier[redis] . identifier[Redis] ( identifier[host] = identifier[self] . identifier[settings] [ literal[string] ],
identifier[port] = identifier[self] . identifier[settings] [ literal[string] ],
identifier[db] = identifier[self] . identifier[settings] . identifier[get] ( literal[string] ))
keyword[try] :
identifier[redis_conn] . identifier[info] ()
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[redis_conn] = identifier[redis_conn]
keyword[except] identifier[ConnectionError] :
identifier[self] . identifier[logger] . identifier[warn] ( literal[string]
literal[string] )
keyword[return]
keyword[if] identifier[self] . identifier[settings] [ literal[string] ]:
identifier[self] . identifier[_setup_stats_total] ( identifier[redis_conn] )
keyword[if] identifier[self] . identifier[settings] [ literal[string] ]:
identifier[self] . identifier[_setup_stats_plugins] ( identifier[redis_conn] ) | def _setup_stats(self):
"""
Sets up the stats collection
"""
self.stats_dict = {}
redis_conn = redis.Redis(host=self.settings['REDIS_HOST'], port=self.settings['REDIS_PORT'], db=self.settings.get('REDIS_DB'))
try:
redis_conn.info()
self.logger.debug('Connected to Redis in StatsCollector Setup')
self.redis_conn = redis_conn # depends on [control=['try'], data=[]]
except ConnectionError:
self.logger.warn('Failed to connect to Redis in StatsCollector Setup, no stats will be collected')
return # depends on [control=['except'], data=[]]
if self.settings['STATS_TOTAL']:
self._setup_stats_total(redis_conn) # depends on [control=['if'], data=[]]
if self.settings['STATS_PLUGINS']:
self._setup_stats_plugins(redis_conn) # depends on [control=['if'], data=[]] |
def add_flowspec_local(flowspec_family, route_dist, rules, **kwargs):
"""Adds Flow Specification route from VRF identified by *route_dist*.
"""
try:
# Create new path and insert into appropriate VRF table.
tm = CORE_MANAGER.get_core_service().table_manager
tm.update_flowspec_vrf_table(
flowspec_family=flowspec_family, route_dist=route_dist,
rules=rules, **kwargs)
# Send success response.
return [{FLOWSPEC_FAMILY: flowspec_family,
ROUTE_DISTINGUISHER: route_dist,
FLOWSPEC_RULES: rules}.update(kwargs)]
except BgpCoreError as e:
raise PrefixError(desc=e) | def function[add_flowspec_local, parameter[flowspec_family, route_dist, rules]]:
constant[Adds Flow Specification route from VRF identified by *route_dist*.
]
<ast.Try object at 0x7da1b1b0c250> | keyword[def] identifier[add_flowspec_local] ( identifier[flowspec_family] , identifier[route_dist] , identifier[rules] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[tm] = identifier[CORE_MANAGER] . identifier[get_core_service] (). identifier[table_manager]
identifier[tm] . identifier[update_flowspec_vrf_table] (
identifier[flowspec_family] = identifier[flowspec_family] , identifier[route_dist] = identifier[route_dist] ,
identifier[rules] = identifier[rules] ,** identifier[kwargs] )
keyword[return] [{ identifier[FLOWSPEC_FAMILY] : identifier[flowspec_family] ,
identifier[ROUTE_DISTINGUISHER] : identifier[route_dist] ,
identifier[FLOWSPEC_RULES] : identifier[rules] }. identifier[update] ( identifier[kwargs] )]
keyword[except] identifier[BgpCoreError] keyword[as] identifier[e] :
keyword[raise] identifier[PrefixError] ( identifier[desc] = identifier[e] ) | def add_flowspec_local(flowspec_family, route_dist, rules, **kwargs):
"""Adds Flow Specification route from VRF identified by *route_dist*.
"""
try:
# Create new path and insert into appropriate VRF table.
tm = CORE_MANAGER.get_core_service().table_manager
tm.update_flowspec_vrf_table(flowspec_family=flowspec_family, route_dist=route_dist, rules=rules, **kwargs)
# Send success response.
return [{FLOWSPEC_FAMILY: flowspec_family, ROUTE_DISTINGUISHER: route_dist, FLOWSPEC_RULES: rules}.update(kwargs)] # depends on [control=['try'], data=[]]
except BgpCoreError as e:
raise PrefixError(desc=e) # depends on [control=['except'], data=['e']] |
def direct_mode_cluster_role_env(cluster_role_env, config_path):
"""Check cluster/[role]/[environ], if they are required"""
# otherwise, get the client.yaml file
cli_conf_file = os.path.join(config_path, CLIENT_YAML)
# if client conf doesn't exist, use default value
if not os.path.isfile(cli_conf_file):
return True
client_confs = {}
with open(cli_conf_file, 'r') as conf_file:
client_confs = yaml.load(conf_file)
# the return value of yaml.load can be None if conf_file is an empty file
if not client_confs:
return True
# if role is required but not provided, raise exception
role_present = True if len(cluster_role_env[1]) > 0 else False
if ROLE_REQUIRED in client_confs and client_confs[ROLE_REQUIRED] and not role_present:
raise Exception("role required but not provided (cluster/role/env = %s). See %s in %s"
% (cluster_role_env, ROLE_REQUIRED, cli_conf_file))
# if environ is required but not provided, raise exception
environ_present = True if len(cluster_role_env[2]) > 0 else False
if ENV_REQUIRED in client_confs and client_confs[ENV_REQUIRED] and not environ_present:
raise Exception("environ required but not provided (cluster/role/env = %s). See %s in %s"
% (cluster_role_env, ENV_REQUIRED, cli_conf_file))
return True | def function[direct_mode_cluster_role_env, parameter[cluster_role_env, config_path]]:
constant[Check cluster/[role]/[environ], if they are required]
variable[cli_conf_file] assign[=] call[name[os].path.join, parameter[name[config_path], name[CLIENT_YAML]]]
if <ast.UnaryOp object at 0x7da2054a71f0> begin[:]
return[constant[True]]
variable[client_confs] assign[=] dictionary[[], []]
with call[name[open], parameter[name[cli_conf_file], constant[r]]] begin[:]
variable[client_confs] assign[=] call[name[yaml].load, parameter[name[conf_file]]]
if <ast.UnaryOp object at 0x7da2054a5f60> begin[:]
return[constant[True]]
variable[role_present] assign[=] <ast.IfExp object at 0x7da2054a50c0>
if <ast.BoolOp object at 0x7da20c76d450> begin[:]
<ast.Raise object at 0x7da20c76f100>
variable[environ_present] assign[=] <ast.IfExp object at 0x7da20c76f6d0>
if <ast.BoolOp object at 0x7da20c76cd60> begin[:]
<ast.Raise object at 0x7da20c76e590>
return[constant[True]] | keyword[def] identifier[direct_mode_cluster_role_env] ( identifier[cluster_role_env] , identifier[config_path] ):
literal[string]
identifier[cli_conf_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[config_path] , identifier[CLIENT_YAML] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[cli_conf_file] ):
keyword[return] keyword[True]
identifier[client_confs] ={}
keyword[with] identifier[open] ( identifier[cli_conf_file] , literal[string] ) keyword[as] identifier[conf_file] :
identifier[client_confs] = identifier[yaml] . identifier[load] ( identifier[conf_file] )
keyword[if] keyword[not] identifier[client_confs] :
keyword[return] keyword[True]
identifier[role_present] = keyword[True] keyword[if] identifier[len] ( identifier[cluster_role_env] [ literal[int] ])> literal[int] keyword[else] keyword[False]
keyword[if] identifier[ROLE_REQUIRED] keyword[in] identifier[client_confs] keyword[and] identifier[client_confs] [ identifier[ROLE_REQUIRED] ] keyword[and] keyword[not] identifier[role_present] :
keyword[raise] identifier[Exception] ( literal[string]
%( identifier[cluster_role_env] , identifier[ROLE_REQUIRED] , identifier[cli_conf_file] ))
identifier[environ_present] = keyword[True] keyword[if] identifier[len] ( identifier[cluster_role_env] [ literal[int] ])> literal[int] keyword[else] keyword[False]
keyword[if] identifier[ENV_REQUIRED] keyword[in] identifier[client_confs] keyword[and] identifier[client_confs] [ identifier[ENV_REQUIRED] ] keyword[and] keyword[not] identifier[environ_present] :
keyword[raise] identifier[Exception] ( literal[string]
%( identifier[cluster_role_env] , identifier[ENV_REQUIRED] , identifier[cli_conf_file] ))
keyword[return] keyword[True] | def direct_mode_cluster_role_env(cluster_role_env, config_path):
"""Check cluster/[role]/[environ], if they are required"""
# otherwise, get the client.yaml file
cli_conf_file = os.path.join(config_path, CLIENT_YAML)
# if client conf doesn't exist, use default value
if not os.path.isfile(cli_conf_file):
return True # depends on [control=['if'], data=[]]
client_confs = {}
with open(cli_conf_file, 'r') as conf_file:
client_confs = yaml.load(conf_file)
# the return value of yaml.load can be None if conf_file is an empty file
if not client_confs:
return True # depends on [control=['if'], data=[]]
# if role is required but not provided, raise exception
role_present = True if len(cluster_role_env[1]) > 0 else False
if ROLE_REQUIRED in client_confs and client_confs[ROLE_REQUIRED] and (not role_present):
raise Exception('role required but not provided (cluster/role/env = %s). See %s in %s' % (cluster_role_env, ROLE_REQUIRED, cli_conf_file)) # depends on [control=['if'], data=[]]
# if environ is required but not provided, raise exception
environ_present = True if len(cluster_role_env[2]) > 0 else False
if ENV_REQUIRED in client_confs and client_confs[ENV_REQUIRED] and (not environ_present):
raise Exception('environ required but not provided (cluster/role/env = %s). See %s in %s' % (cluster_role_env, ENV_REQUIRED, cli_conf_file)) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['conf_file']]
return True |
def as_phononwebsite(self):
"""
Return a dictionary with the phononwebsite format:
http://henriquemiranda.github.io/phononwebsite
"""
d = {}
#define the lattice
d["lattice"] = self.structure.lattice._matrix.tolist()
#define atoms
atom_pos_car = []
atom_pos_red = []
atom_types = []
for site in self.structure.sites:
atom_pos_car.append(site.coords.tolist())
atom_pos_red.append(site.frac_coords.tolist())
atom_types.append(site.species_string)
#default for now
d["repetitions"] = get_reasonable_repetitions(len(atom_pos_car))
d["natoms"] = len(atom_pos_car)
d["atom_pos_car"] = atom_pos_car
d["atom_pos_red"] = atom_pos_red
d["atom_types"] = atom_types
d["atom_numbers"] = self.structure.atomic_numbers
d["formula"] = self.structure.formula
d["name"] = self.structure.formula
#get qpoints
qpoints = []
for q in self.qpoints:
qpoints.append(list(q.frac_coords))
d["qpoints"] = qpoints
# get labels
hsq_dict = collections.OrderedDict()
for nq,q in enumerate(self.qpoints):
if q.label is not None:
hsq_dict[nq] = q.label
#get distances
dist = 0
nqstart = 0
distances = [dist]
line_breaks = []
for nq in range(1,len(qpoints)):
q1 = np.array(qpoints[nq])
q2 = np.array(qpoints[nq-1])
#detect jumps
if ((nq in hsq_dict) and (nq-1 in hsq_dict)):
if (hsq_dict[nq] != hsq_dict[nq-1]):
hsq_dict[nq-1] += "|"+hsq_dict[nq]
del hsq_dict[nq]
line_breaks.append((nqstart,nq))
nqstart = nq
else:
dist += np.linalg.norm(q1-q2)
distances.append(dist)
line_breaks.append((nqstart,len(qpoints)))
d["distances"] = distances
d["line_breaks"] = line_breaks
d["highsym_qpts"] = list(hsq_dict.items())
#eigenvalues
thz2cm1 = 33.35641
bands = self.bands.copy()*thz2cm1
d["eigenvalues"] = bands.T.tolist()
#eigenvectors
eigenvectors = self.eigendisplacements.copy()
eigenvectors /= np.linalg.norm(eigenvectors[0,0])
eigenvectors = eigenvectors.swapaxes(0,1)
eigenvectors = np.array([eigenvectors.real, eigenvectors.imag])
eigenvectors = np.rollaxis(eigenvectors,0,5)
d["vectors"] = eigenvectors.tolist()
return d | def function[as_phononwebsite, parameter[self]]:
constant[
Return a dictionary with the phononwebsite format:
http://henriquemiranda.github.io/phononwebsite
]
variable[d] assign[=] dictionary[[], []]
call[name[d]][constant[lattice]] assign[=] call[name[self].structure.lattice._matrix.tolist, parameter[]]
variable[atom_pos_car] assign[=] list[[]]
variable[atom_pos_red] assign[=] list[[]]
variable[atom_types] assign[=] list[[]]
for taget[name[site]] in starred[name[self].structure.sites] begin[:]
call[name[atom_pos_car].append, parameter[call[name[site].coords.tolist, parameter[]]]]
call[name[atom_pos_red].append, parameter[call[name[site].frac_coords.tolist, parameter[]]]]
call[name[atom_types].append, parameter[name[site].species_string]]
call[name[d]][constant[repetitions]] assign[=] call[name[get_reasonable_repetitions], parameter[call[name[len], parameter[name[atom_pos_car]]]]]
call[name[d]][constant[natoms]] assign[=] call[name[len], parameter[name[atom_pos_car]]]
call[name[d]][constant[atom_pos_car]] assign[=] name[atom_pos_car]
call[name[d]][constant[atom_pos_red]] assign[=] name[atom_pos_red]
call[name[d]][constant[atom_types]] assign[=] name[atom_types]
call[name[d]][constant[atom_numbers]] assign[=] name[self].structure.atomic_numbers
call[name[d]][constant[formula]] assign[=] name[self].structure.formula
call[name[d]][constant[name]] assign[=] name[self].structure.formula
variable[qpoints] assign[=] list[[]]
for taget[name[q]] in starred[name[self].qpoints] begin[:]
call[name[qpoints].append, parameter[call[name[list], parameter[name[q].frac_coords]]]]
call[name[d]][constant[qpoints]] assign[=] name[qpoints]
variable[hsq_dict] assign[=] call[name[collections].OrderedDict, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18eb57880>, <ast.Name object at 0x7da18eb54a30>]]] in starred[call[name[enumerate], parameter[name[self].qpoints]]] begin[:]
if compare[name[q].label is_not constant[None]] begin[:]
call[name[hsq_dict]][name[nq]] assign[=] name[q].label
variable[dist] assign[=] constant[0]
variable[nqstart] assign[=] constant[0]
variable[distances] assign[=] list[[<ast.Name object at 0x7da18eb545e0>]]
variable[line_breaks] assign[=] list[[]]
for taget[name[nq]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[qpoints]]]]]] begin[:]
variable[q1] assign[=] call[name[np].array, parameter[call[name[qpoints]][name[nq]]]]
variable[q2] assign[=] call[name[np].array, parameter[call[name[qpoints]][binary_operation[name[nq] - constant[1]]]]]
if <ast.BoolOp object at 0x7da18ede50c0> begin[:]
if compare[call[name[hsq_dict]][name[nq]] not_equal[!=] call[name[hsq_dict]][binary_operation[name[nq] - constant[1]]]] begin[:]
<ast.AugAssign object at 0x7da18ede6e30>
<ast.Delete object at 0x7da18ede6aa0>
call[name[line_breaks].append, parameter[tuple[[<ast.Name object at 0x7da18ede71c0>, <ast.Name object at 0x7da18ede7e50>]]]]
variable[nqstart] assign[=] name[nq]
call[name[distances].append, parameter[name[dist]]]
call[name[line_breaks].append, parameter[tuple[[<ast.Name object at 0x7da18eb57e20>, <ast.Call object at 0x7da18eb55a20>]]]]
call[name[d]][constant[distances]] assign[=] name[distances]
call[name[d]][constant[line_breaks]] assign[=] name[line_breaks]
call[name[d]][constant[highsym_qpts]] assign[=] call[name[list], parameter[call[name[hsq_dict].items, parameter[]]]]
variable[thz2cm1] assign[=] constant[33.35641]
variable[bands] assign[=] binary_operation[call[name[self].bands.copy, parameter[]] * name[thz2cm1]]
call[name[d]][constant[eigenvalues]] assign[=] call[name[bands].T.tolist, parameter[]]
variable[eigenvectors] assign[=] call[name[self].eigendisplacements.copy, parameter[]]
<ast.AugAssign object at 0x7da18eb55900>
variable[eigenvectors] assign[=] call[name[eigenvectors].swapaxes, parameter[constant[0], constant[1]]]
variable[eigenvectors] assign[=] call[name[np].array, parameter[list[[<ast.Attribute object at 0x7da18eb55810>, <ast.Attribute object at 0x7da18eb56650>]]]]
variable[eigenvectors] assign[=] call[name[np].rollaxis, parameter[name[eigenvectors], constant[0], constant[5]]]
call[name[d]][constant[vectors]] assign[=] call[name[eigenvectors].tolist, parameter[]]
return[name[d]] | keyword[def] identifier[as_phononwebsite] ( identifier[self] ):
literal[string]
identifier[d] ={}
identifier[d] [ literal[string] ]= identifier[self] . identifier[structure] . identifier[lattice] . identifier[_matrix] . identifier[tolist] ()
identifier[atom_pos_car] =[]
identifier[atom_pos_red] =[]
identifier[atom_types] =[]
keyword[for] identifier[site] keyword[in] identifier[self] . identifier[structure] . identifier[sites] :
identifier[atom_pos_car] . identifier[append] ( identifier[site] . identifier[coords] . identifier[tolist] ())
identifier[atom_pos_red] . identifier[append] ( identifier[site] . identifier[frac_coords] . identifier[tolist] ())
identifier[atom_types] . identifier[append] ( identifier[site] . identifier[species_string] )
identifier[d] [ literal[string] ]= identifier[get_reasonable_repetitions] ( identifier[len] ( identifier[atom_pos_car] ))
identifier[d] [ literal[string] ]= identifier[len] ( identifier[atom_pos_car] )
identifier[d] [ literal[string] ]= identifier[atom_pos_car]
identifier[d] [ literal[string] ]= identifier[atom_pos_red]
identifier[d] [ literal[string] ]= identifier[atom_types]
identifier[d] [ literal[string] ]= identifier[self] . identifier[structure] . identifier[atomic_numbers]
identifier[d] [ literal[string] ]= identifier[self] . identifier[structure] . identifier[formula]
identifier[d] [ literal[string] ]= identifier[self] . identifier[structure] . identifier[formula]
identifier[qpoints] =[]
keyword[for] identifier[q] keyword[in] identifier[self] . identifier[qpoints] :
identifier[qpoints] . identifier[append] ( identifier[list] ( identifier[q] . identifier[frac_coords] ))
identifier[d] [ literal[string] ]= identifier[qpoints]
identifier[hsq_dict] = identifier[collections] . identifier[OrderedDict] ()
keyword[for] identifier[nq] , identifier[q] keyword[in] identifier[enumerate] ( identifier[self] . identifier[qpoints] ):
keyword[if] identifier[q] . identifier[label] keyword[is] keyword[not] keyword[None] :
identifier[hsq_dict] [ identifier[nq] ]= identifier[q] . identifier[label]
identifier[dist] = literal[int]
identifier[nqstart] = literal[int]
identifier[distances] =[ identifier[dist] ]
identifier[line_breaks] =[]
keyword[for] identifier[nq] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[qpoints] )):
identifier[q1] = identifier[np] . identifier[array] ( identifier[qpoints] [ identifier[nq] ])
identifier[q2] = identifier[np] . identifier[array] ( identifier[qpoints] [ identifier[nq] - literal[int] ])
keyword[if] (( identifier[nq] keyword[in] identifier[hsq_dict] ) keyword[and] ( identifier[nq] - literal[int] keyword[in] identifier[hsq_dict] )):
keyword[if] ( identifier[hsq_dict] [ identifier[nq] ]!= identifier[hsq_dict] [ identifier[nq] - literal[int] ]):
identifier[hsq_dict] [ identifier[nq] - literal[int] ]+= literal[string] + identifier[hsq_dict] [ identifier[nq] ]
keyword[del] identifier[hsq_dict] [ identifier[nq] ]
identifier[line_breaks] . identifier[append] (( identifier[nqstart] , identifier[nq] ))
identifier[nqstart] = identifier[nq]
keyword[else] :
identifier[dist] += identifier[np] . identifier[linalg] . identifier[norm] ( identifier[q1] - identifier[q2] )
identifier[distances] . identifier[append] ( identifier[dist] )
identifier[line_breaks] . identifier[append] (( identifier[nqstart] , identifier[len] ( identifier[qpoints] )))
identifier[d] [ literal[string] ]= identifier[distances]
identifier[d] [ literal[string] ]= identifier[line_breaks]
identifier[d] [ literal[string] ]= identifier[list] ( identifier[hsq_dict] . identifier[items] ())
identifier[thz2cm1] = literal[int]
identifier[bands] = identifier[self] . identifier[bands] . identifier[copy] ()* identifier[thz2cm1]
identifier[d] [ literal[string] ]= identifier[bands] . identifier[T] . identifier[tolist] ()
identifier[eigenvectors] = identifier[self] . identifier[eigendisplacements] . identifier[copy] ()
identifier[eigenvectors] /= identifier[np] . identifier[linalg] . identifier[norm] ( identifier[eigenvectors] [ literal[int] , literal[int] ])
identifier[eigenvectors] = identifier[eigenvectors] . identifier[swapaxes] ( literal[int] , literal[int] )
identifier[eigenvectors] = identifier[np] . identifier[array] ([ identifier[eigenvectors] . identifier[real] , identifier[eigenvectors] . identifier[imag] ])
identifier[eigenvectors] = identifier[np] . identifier[rollaxis] ( identifier[eigenvectors] , literal[int] , literal[int] )
identifier[d] [ literal[string] ]= identifier[eigenvectors] . identifier[tolist] ()
keyword[return] identifier[d] | def as_phononwebsite(self):
"""
Return a dictionary with the phononwebsite format:
http://henriquemiranda.github.io/phononwebsite
"""
d = {}
#define the lattice
d['lattice'] = self.structure.lattice._matrix.tolist()
#define atoms
atom_pos_car = []
atom_pos_red = []
atom_types = []
for site in self.structure.sites:
atom_pos_car.append(site.coords.tolist())
atom_pos_red.append(site.frac_coords.tolist())
atom_types.append(site.species_string) # depends on [control=['for'], data=['site']]
#default for now
d['repetitions'] = get_reasonable_repetitions(len(atom_pos_car))
d['natoms'] = len(atom_pos_car)
d['atom_pos_car'] = atom_pos_car
d['atom_pos_red'] = atom_pos_red
d['atom_types'] = atom_types
d['atom_numbers'] = self.structure.atomic_numbers
d['formula'] = self.structure.formula
d['name'] = self.structure.formula
#get qpoints
qpoints = []
for q in self.qpoints:
qpoints.append(list(q.frac_coords)) # depends on [control=['for'], data=['q']]
d['qpoints'] = qpoints
# get labels
hsq_dict = collections.OrderedDict()
for (nq, q) in enumerate(self.qpoints):
if q.label is not None:
hsq_dict[nq] = q.label # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
#get distances
dist = 0
nqstart = 0
distances = [dist]
line_breaks = []
for nq in range(1, len(qpoints)):
q1 = np.array(qpoints[nq])
q2 = np.array(qpoints[nq - 1])
#detect jumps
if nq in hsq_dict and nq - 1 in hsq_dict:
if hsq_dict[nq] != hsq_dict[nq - 1]:
hsq_dict[nq - 1] += '|' + hsq_dict[nq] # depends on [control=['if'], data=[]]
del hsq_dict[nq]
line_breaks.append((nqstart, nq))
nqstart = nq # depends on [control=['if'], data=[]]
else:
dist += np.linalg.norm(q1 - q2)
distances.append(dist) # depends on [control=['for'], data=['nq']]
line_breaks.append((nqstart, len(qpoints)))
d['distances'] = distances
d['line_breaks'] = line_breaks
d['highsym_qpts'] = list(hsq_dict.items())
#eigenvalues
thz2cm1 = 33.35641
bands = self.bands.copy() * thz2cm1
d['eigenvalues'] = bands.T.tolist()
#eigenvectors
eigenvectors = self.eigendisplacements.copy()
eigenvectors /= np.linalg.norm(eigenvectors[0, 0])
eigenvectors = eigenvectors.swapaxes(0, 1)
eigenvectors = np.array([eigenvectors.real, eigenvectors.imag])
eigenvectors = np.rollaxis(eigenvectors, 0, 5)
d['vectors'] = eigenvectors.tolist()
return d |
def parse_data_to_internal(self, include_custom=False):
"""
Invoke parse_data_to_internal() for every element
in self.data
"""
for element in self.data:
self.data[element].parse_data_to_internal()
if include_custom:
for element in self.custom_data:
self.custom_data[element].parse_data_to_internal() | def function[parse_data_to_internal, parameter[self, include_custom]]:
constant[
Invoke parse_data_to_internal() for every element
in self.data
]
for taget[name[element]] in starred[name[self].data] begin[:]
call[call[name[self].data][name[element]].parse_data_to_internal, parameter[]]
if name[include_custom] begin[:]
for taget[name[element]] in starred[name[self].custom_data] begin[:]
call[call[name[self].custom_data][name[element]].parse_data_to_internal, parameter[]] | keyword[def] identifier[parse_data_to_internal] ( identifier[self] , identifier[include_custom] = keyword[False] ):
literal[string]
keyword[for] identifier[element] keyword[in] identifier[self] . identifier[data] :
identifier[self] . identifier[data] [ identifier[element] ]. identifier[parse_data_to_internal] ()
keyword[if] identifier[include_custom] :
keyword[for] identifier[element] keyword[in] identifier[self] . identifier[custom_data] :
identifier[self] . identifier[custom_data] [ identifier[element] ]. identifier[parse_data_to_internal] () | def parse_data_to_internal(self, include_custom=False):
"""
Invoke parse_data_to_internal() for every element
in self.data
"""
for element in self.data:
self.data[element].parse_data_to_internal() # depends on [control=['for'], data=['element']]
if include_custom:
for element in self.custom_data:
self.custom_data[element].parse_data_to_internal() # depends on [control=['for'], data=['element']] # depends on [control=['if'], data=[]] |
async def play_url(self, url, position=0):
"""Play media from an URL on the device."""
headers = {'User-Agent': 'MediaControl/1.0',
'Content-Type': 'application/x-apple-binary-plist'}
body = {'Content-Location': url, 'Start-Position': position}
address = self._url(self.port, 'play')
_LOGGER.debug('AirPlay %s to %s', url, address)
resp = None
try:
# pylint: disable=no-member
resp = await self.session.post(
address, headers=headers,
data=plistlib.dumps(body, fmt=plistlib.FMT_BINARY),
timeout=TIMEOUT)
await self._wait_for_media_to_end()
finally:
if resp is not None:
resp.close() | <ast.AsyncFunctionDef object at 0x7da1b26ae7a0> | keyword[async] keyword[def] identifier[play_url] ( identifier[self] , identifier[url] , identifier[position] = literal[int] ):
literal[string]
identifier[headers] ={ literal[string] : literal[string] ,
literal[string] : literal[string] }
identifier[body] ={ literal[string] : identifier[url] , literal[string] : identifier[position] }
identifier[address] = identifier[self] . identifier[_url] ( identifier[self] . identifier[port] , literal[string] )
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[url] , identifier[address] )
identifier[resp] = keyword[None]
keyword[try] :
identifier[resp] = keyword[await] identifier[self] . identifier[session] . identifier[post] (
identifier[address] , identifier[headers] = identifier[headers] ,
identifier[data] = identifier[plistlib] . identifier[dumps] ( identifier[body] , identifier[fmt] = identifier[plistlib] . identifier[FMT_BINARY] ),
identifier[timeout] = identifier[TIMEOUT] )
keyword[await] identifier[self] . identifier[_wait_for_media_to_end] ()
keyword[finally] :
keyword[if] identifier[resp] keyword[is] keyword[not] keyword[None] :
identifier[resp] . identifier[close] () | async def play_url(self, url, position=0):
"""Play media from an URL on the device."""
headers = {'User-Agent': 'MediaControl/1.0', 'Content-Type': 'application/x-apple-binary-plist'}
body = {'Content-Location': url, 'Start-Position': position}
address = self._url(self.port, 'play')
_LOGGER.debug('AirPlay %s to %s', url, address)
resp = None
try:
# pylint: disable=no-member
resp = await self.session.post(address, headers=headers, data=plistlib.dumps(body, fmt=plistlib.FMT_BINARY), timeout=TIMEOUT)
await self._wait_for_media_to_end() # depends on [control=['try'], data=[]]
finally:
if resp is not None:
resp.close() # depends on [control=['if'], data=['resp']] |
def determine_push_rights(*, branch_whitelist, TRAVIS_BRANCH,
TRAVIS_PULL_REQUEST, TRAVIS_TAG, build_tags, fork):
"""Check if Travis is running on ``master`` (or a whitelisted branch) to
determine if we can/should push the docs to the deploy repo
"""
canpush = True
if TRAVIS_TAG:
if not build_tags:
print("The docs are not pushed on tag builds. To push on future tag builds, use --build-tags")
return build_tags
if not any([re.compile(x).match(TRAVIS_BRANCH) for x in branch_whitelist]):
print("The docs are only pushed to gh-pages from master. To allow pushing from "
"a non-master branch, use the --no-require-master flag", file=sys.stderr)
print("This is the {TRAVIS_BRANCH} branch".format(TRAVIS_BRANCH=TRAVIS_BRANCH), file=sys.stderr)
canpush = False
if TRAVIS_PULL_REQUEST != "false":
print("The website and docs are not pushed to gh-pages on pull requests", file=sys.stderr)
canpush = False
if fork:
print("The website and docs are not pushed to gh-pages on fork builds.", file=sys.stderr)
canpush = False
if last_commit_by_doctr():
print(red("The last commit on this branch was pushed by doctr. Not pushing to "
"avoid an infinite build-loop."), file=sys.stderr)
canpush = False
return canpush | def function[determine_push_rights, parameter[]]:
constant[Check if Travis is running on ``master`` (or a whitelisted branch) to
determine if we can/should push the docs to the deploy repo
]
variable[canpush] assign[=] constant[True]
if name[TRAVIS_TAG] begin[:]
if <ast.UnaryOp object at 0x7da1b1081960> begin[:]
call[name[print], parameter[constant[The docs are not pushed on tag builds. To push on future tag builds, use --build-tags]]]
return[name[build_tags]]
if <ast.UnaryOp object at 0x7da1b1080280> begin[:]
call[name[print], parameter[constant[The docs are only pushed to gh-pages from master. To allow pushing from a non-master branch, use the --no-require-master flag]]]
call[name[print], parameter[call[constant[This is the {TRAVIS_BRANCH} branch].format, parameter[]]]]
variable[canpush] assign[=] constant[False]
if compare[name[TRAVIS_PULL_REQUEST] not_equal[!=] constant[false]] begin[:]
call[name[print], parameter[constant[The website and docs are not pushed to gh-pages on pull requests]]]
variable[canpush] assign[=] constant[False]
if name[fork] begin[:]
call[name[print], parameter[constant[The website and docs are not pushed to gh-pages on fork builds.]]]
variable[canpush] assign[=] constant[False]
if call[name[last_commit_by_doctr], parameter[]] begin[:]
call[name[print], parameter[call[name[red], parameter[constant[The last commit on this branch was pushed by doctr. Not pushing to avoid an infinite build-loop.]]]]]
variable[canpush] assign[=] constant[False]
return[name[canpush]] | keyword[def] identifier[determine_push_rights] (*, identifier[branch_whitelist] , identifier[TRAVIS_BRANCH] ,
identifier[TRAVIS_PULL_REQUEST] , identifier[TRAVIS_TAG] , identifier[build_tags] , identifier[fork] ):
literal[string]
identifier[canpush] = keyword[True]
keyword[if] identifier[TRAVIS_TAG] :
keyword[if] keyword[not] identifier[build_tags] :
identifier[print] ( literal[string] )
keyword[return] identifier[build_tags]
keyword[if] keyword[not] identifier[any] ([ identifier[re] . identifier[compile] ( identifier[x] ). identifier[match] ( identifier[TRAVIS_BRANCH] ) keyword[for] identifier[x] keyword[in] identifier[branch_whitelist] ]):
identifier[print] ( literal[string]
literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[print] ( literal[string] . identifier[format] ( identifier[TRAVIS_BRANCH] = identifier[TRAVIS_BRANCH] ), identifier[file] = identifier[sys] . identifier[stderr] )
identifier[canpush] = keyword[False]
keyword[if] identifier[TRAVIS_PULL_REQUEST] != literal[string] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[canpush] = keyword[False]
keyword[if] identifier[fork] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[canpush] = keyword[False]
keyword[if] identifier[last_commit_by_doctr] ():
identifier[print] ( identifier[red] ( literal[string]
literal[string] ), identifier[file] = identifier[sys] . identifier[stderr] )
identifier[canpush] = keyword[False]
keyword[return] identifier[canpush] | def determine_push_rights(*, branch_whitelist, TRAVIS_BRANCH, TRAVIS_PULL_REQUEST, TRAVIS_TAG, build_tags, fork):
"""Check if Travis is running on ``master`` (or a whitelisted branch) to
determine if we can/should push the docs to the deploy repo
"""
canpush = True
if TRAVIS_TAG:
if not build_tags:
print('The docs are not pushed on tag builds. To push on future tag builds, use --build-tags') # depends on [control=['if'], data=[]]
return build_tags # depends on [control=['if'], data=[]]
if not any([re.compile(x).match(TRAVIS_BRANCH) for x in branch_whitelist]):
print('The docs are only pushed to gh-pages from master. To allow pushing from a non-master branch, use the --no-require-master flag', file=sys.stderr)
print('This is the {TRAVIS_BRANCH} branch'.format(TRAVIS_BRANCH=TRAVIS_BRANCH), file=sys.stderr)
canpush = False # depends on [control=['if'], data=[]]
if TRAVIS_PULL_REQUEST != 'false':
print('The website and docs are not pushed to gh-pages on pull requests', file=sys.stderr)
canpush = False # depends on [control=['if'], data=[]]
if fork:
print('The website and docs are not pushed to gh-pages on fork builds.', file=sys.stderr)
canpush = False # depends on [control=['if'], data=[]]
if last_commit_by_doctr():
print(red('The last commit on this branch was pushed by doctr. Not pushing to avoid an infinite build-loop.'), file=sys.stderr)
canpush = False # depends on [control=['if'], data=[]]
return canpush |
def set_clbit(self, clbit, element):
"""
Sets the clbit to the element
Args:
clbit (cbit): Element of self.cregs.
element (DrawElement): Element to set in the clbit
"""
self.clbit_layer[self.cregs.index(clbit)] = element | def function[set_clbit, parameter[self, clbit, element]]:
constant[
Sets the clbit to the element
Args:
clbit (cbit): Element of self.cregs.
element (DrawElement): Element to set in the clbit
]
call[name[self].clbit_layer][call[name[self].cregs.index, parameter[name[clbit]]]] assign[=] name[element] | keyword[def] identifier[set_clbit] ( identifier[self] , identifier[clbit] , identifier[element] ):
literal[string]
identifier[self] . identifier[clbit_layer] [ identifier[self] . identifier[cregs] . identifier[index] ( identifier[clbit] )]= identifier[element] | def set_clbit(self, clbit, element):
"""
Sets the clbit to the element
Args:
clbit (cbit): Element of self.cregs.
element (DrawElement): Element to set in the clbit
"""
self.clbit_layer[self.cregs.index(clbit)] = element |
def parsebytes(self, text, headersonly=False):
"""Create a message structure from a byte string.
Returns the root of the message structure. Optional headersonly is a
flag specifying whether to stop parsing after reading the headers or
not. The default is False, meaning it parses the entire contents of
the file.
"""
text = text.decode('ASCII', errors='surrogateescape')
return self.parser.parsestr(text, headersonly) | def function[parsebytes, parameter[self, text, headersonly]]:
constant[Create a message structure from a byte string.
Returns the root of the message structure. Optional headersonly is a
flag specifying whether to stop parsing after reading the headers or
not. The default is False, meaning it parses the entire contents of
the file.
]
variable[text] assign[=] call[name[text].decode, parameter[constant[ASCII]]]
return[call[name[self].parser.parsestr, parameter[name[text], name[headersonly]]]] | keyword[def] identifier[parsebytes] ( identifier[self] , identifier[text] , identifier[headersonly] = keyword[False] ):
literal[string]
identifier[text] = identifier[text] . identifier[decode] ( literal[string] , identifier[errors] = literal[string] )
keyword[return] identifier[self] . identifier[parser] . identifier[parsestr] ( identifier[text] , identifier[headersonly] ) | def parsebytes(self, text, headersonly=False):
"""Create a message structure from a byte string.
Returns the root of the message structure. Optional headersonly is a
flag specifying whether to stop parsing after reading the headers or
not. The default is False, meaning it parses the entire contents of
the file.
"""
text = text.decode('ASCII', errors='surrogateescape')
return self.parser.parsestr(text, headersonly) |
def _set_group_best(self, v, load=False):
"""
Setter method for group_best, mapped from YANG variable /routing_system/route_map/content/match/additional_paths/advertise_set/group_best (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_group_best is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_group_best() directly.
YANG Description: BGP Add-Path advertise group-best path
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="group-best", rest_name="group-best", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP Add-Path advertise group-best path', u'code-name': u'additional-paths-advertise-set-group-best', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """group_best must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="group-best", rest_name="group-best", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP Add-Path advertise group-best path', u'code-name': u'additional-paths-advertise-set-group-best', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='empty', is_config=True)""",
})
self.__group_best = t
if hasattr(self, '_set'):
self._set() | def function[_set_group_best, parameter[self, v, load]]:
constant[
Setter method for group_best, mapped from YANG variable /routing_system/route_map/content/match/additional_paths/advertise_set/group_best (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_group_best is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_group_best() directly.
YANG Description: BGP Add-Path advertise group-best path
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f58d8d0>
name[self].__group_best assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_group_best] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGBool] , identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__group_best] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_group_best(self, v, load=False):
"""
Setter method for group_best, mapped from YANG variable /routing_system/route_map/content/match/additional_paths/advertise_set/group_best (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_group_best is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_group_best() directly.
YANG Description: BGP Add-Path advertise group-best path
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGBool, is_leaf=True, yang_name='group-best', rest_name='group-best', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP Add-Path advertise group-best path', u'code-name': u'additional-paths-advertise-set-group-best', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='empty', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'group_best must be of a type compatible with empty', 'defined-type': 'empty', 'generated-type': 'YANGDynClass(base=YANGBool, is_leaf=True, yang_name="group-best", rest_name="group-best", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'BGP Add-Path advertise group-best path\', u\'code-name\': u\'additional-paths-advertise-set-group-best\', u\'cli-full-command\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-ip-policy\', defining_module=\'brocade-ip-policy\', yang_type=\'empty\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__group_best = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def to_resolvers(sweepable: Sweepable) -> List[ParamResolver]:
"""Convert a Sweepable to a list of ParamResolvers."""
if isinstance(sweepable, ParamResolver):
return [sweepable]
elif isinstance(sweepable, Sweep):
return list(sweepable)
elif isinstance(sweepable, collections.Iterable):
iterable = cast(collections.Iterable, sweepable)
return list(iterable) if isinstance(next(iter(iterable)),
ParamResolver) else sum(
[list(s) for s in iterable], [])
raise TypeError('Unexpected Sweepable type.') | def function[to_resolvers, parameter[sweepable]]:
constant[Convert a Sweepable to a list of ParamResolvers.]
if call[name[isinstance], parameter[name[sweepable], name[ParamResolver]]] begin[:]
return[list[[<ast.Name object at 0x7da1b1c6f7c0>]]]
<ast.Raise object at 0x7da1b1c6ee00> | keyword[def] identifier[to_resolvers] ( identifier[sweepable] : identifier[Sweepable] )-> identifier[List] [ identifier[ParamResolver] ]:
literal[string]
keyword[if] identifier[isinstance] ( identifier[sweepable] , identifier[ParamResolver] ):
keyword[return] [ identifier[sweepable] ]
keyword[elif] identifier[isinstance] ( identifier[sweepable] , identifier[Sweep] ):
keyword[return] identifier[list] ( identifier[sweepable] )
keyword[elif] identifier[isinstance] ( identifier[sweepable] , identifier[collections] . identifier[Iterable] ):
identifier[iterable] = identifier[cast] ( identifier[collections] . identifier[Iterable] , identifier[sweepable] )
keyword[return] identifier[list] ( identifier[iterable] ) keyword[if] identifier[isinstance] ( identifier[next] ( identifier[iter] ( identifier[iterable] )),
identifier[ParamResolver] ) keyword[else] identifier[sum] (
[ identifier[list] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[iterable] ],[])
keyword[raise] identifier[TypeError] ( literal[string] ) | def to_resolvers(sweepable: Sweepable) -> List[ParamResolver]:
"""Convert a Sweepable to a list of ParamResolvers."""
if isinstance(sweepable, ParamResolver):
return [sweepable] # depends on [control=['if'], data=[]]
elif isinstance(sweepable, Sweep):
return list(sweepable) # depends on [control=['if'], data=[]]
elif isinstance(sweepable, collections.Iterable):
iterable = cast(collections.Iterable, sweepable)
return list(iterable) if isinstance(next(iter(iterable)), ParamResolver) else sum([list(s) for s in iterable], []) # depends on [control=['if'], data=[]]
raise TypeError('Unexpected Sweepable type.') |
def get_stream_action_type(stream_arn):
"""Returns the awacs Action for a stream type given an arn
Args:
stream_arn (str): The Arn of the stream.
Returns:
:class:`awacs.aws.Action`: The appropriate stream type awacs Action
class
Raises:
ValueError: If the stream type doesn't match kinesis or dynamodb.
"""
stream_type_map = {
"kinesis": awacs.kinesis.Action,
"dynamodb": awacs.dynamodb.Action,
}
stream_type = stream_arn.split(":")[2]
try:
return stream_type_map[stream_type]
except KeyError:
raise ValueError(
"Invalid stream type '%s' in arn '%s'" % (stream_type, stream_arn)
) | def function[get_stream_action_type, parameter[stream_arn]]:
constant[Returns the awacs Action for a stream type given an arn
Args:
stream_arn (str): The Arn of the stream.
Returns:
:class:`awacs.aws.Action`: The appropriate stream type awacs Action
class
Raises:
ValueError: If the stream type doesn't match kinesis or dynamodb.
]
variable[stream_type_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b061ac80>, <ast.Constant object at 0x7da1b061bdc0>], [<ast.Attribute object at 0x7da1b0618400>, <ast.Attribute object at 0x7da1b06186d0>]]
variable[stream_type] assign[=] call[call[name[stream_arn].split, parameter[constant[:]]]][constant[2]]
<ast.Try object at 0x7da1b0619db0> | keyword[def] identifier[get_stream_action_type] ( identifier[stream_arn] ):
literal[string]
identifier[stream_type_map] ={
literal[string] : identifier[awacs] . identifier[kinesis] . identifier[Action] ,
literal[string] : identifier[awacs] . identifier[dynamodb] . identifier[Action] ,
}
identifier[stream_type] = identifier[stream_arn] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[try] :
keyword[return] identifier[stream_type_map] [ identifier[stream_type] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ValueError] (
literal[string] %( identifier[stream_type] , identifier[stream_arn] )
) | def get_stream_action_type(stream_arn):
"""Returns the awacs Action for a stream type given an arn
Args:
stream_arn (str): The Arn of the stream.
Returns:
:class:`awacs.aws.Action`: The appropriate stream type awacs Action
class
Raises:
ValueError: If the stream type doesn't match kinesis or dynamodb.
"""
stream_type_map = {'kinesis': awacs.kinesis.Action, 'dynamodb': awacs.dynamodb.Action}
stream_type = stream_arn.split(':')[2]
try:
return stream_type_map[stream_type] # depends on [control=['try'], data=[]]
except KeyError:
raise ValueError("Invalid stream type '%s' in arn '%s'" % (stream_type, stream_arn)) # depends on [control=['except'], data=[]] |
def normalize_boolop(expr):
"""
Normalize a boolop by folding together nested And/Or exprs.
"""
optype = expr.op
newvalues = []
for subexpr in expr.values:
if not isinstance(subexpr, ast.BoolOp):
newvalues.append(subexpr)
elif type(subexpr.op) != type(optype):
newvalues.append(normalize_boolop(subexpr))
else:
# Normalize subexpression, then inline its values into the
# top-level subexpr.
newvalues.extend(normalize_boolop(subexpr).values)
return ast.BoolOp(op=optype, values=newvalues) | def function[normalize_boolop, parameter[expr]]:
constant[
Normalize a boolop by folding together nested And/Or exprs.
]
variable[optype] assign[=] name[expr].op
variable[newvalues] assign[=] list[[]]
for taget[name[subexpr]] in starred[name[expr].values] begin[:]
if <ast.UnaryOp object at 0x7da1b05b6a40> begin[:]
call[name[newvalues].append, parameter[name[subexpr]]]
return[call[name[ast].BoolOp, parameter[]]] | keyword[def] identifier[normalize_boolop] ( identifier[expr] ):
literal[string]
identifier[optype] = identifier[expr] . identifier[op]
identifier[newvalues] =[]
keyword[for] identifier[subexpr] keyword[in] identifier[expr] . identifier[values] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[subexpr] , identifier[ast] . identifier[BoolOp] ):
identifier[newvalues] . identifier[append] ( identifier[subexpr] )
keyword[elif] identifier[type] ( identifier[subexpr] . identifier[op] )!= identifier[type] ( identifier[optype] ):
identifier[newvalues] . identifier[append] ( identifier[normalize_boolop] ( identifier[subexpr] ))
keyword[else] :
identifier[newvalues] . identifier[extend] ( identifier[normalize_boolop] ( identifier[subexpr] ). identifier[values] )
keyword[return] identifier[ast] . identifier[BoolOp] ( identifier[op] = identifier[optype] , identifier[values] = identifier[newvalues] ) | def normalize_boolop(expr):
"""
Normalize a boolop by folding together nested And/Or exprs.
"""
optype = expr.op
newvalues = []
for subexpr in expr.values:
if not isinstance(subexpr, ast.BoolOp):
newvalues.append(subexpr) # depends on [control=['if'], data=[]]
elif type(subexpr.op) != type(optype):
newvalues.append(normalize_boolop(subexpr)) # depends on [control=['if'], data=[]]
else:
# Normalize subexpression, then inline its values into the
# top-level subexpr.
newvalues.extend(normalize_boolop(subexpr).values) # depends on [control=['for'], data=['subexpr']]
return ast.BoolOp(op=optype, values=newvalues) |
def check(self, var):
"""Return True if the variable matches this type, and False otherwise."""
if not isinstance(var, dict): return False
if any(key not in self._types for key in var): return False
for key, ktype in viewitems(self._types):
val = var.get(key, None)
if not _check_type(val, ktype):
return False
return True | def function[check, parameter[self, var]]:
constant[Return True if the variable matches this type, and False otherwise.]
if <ast.UnaryOp object at 0x7da1b0372890> begin[:]
return[constant[False]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b0357910>]] begin[:]
return[constant[False]]
for taget[tuple[[<ast.Name object at 0x7da1b0354460>, <ast.Name object at 0x7da1b03576a0>]]] in starred[call[name[viewitems], parameter[name[self]._types]]] begin[:]
variable[val] assign[=] call[name[var].get, parameter[name[key], constant[None]]]
if <ast.UnaryOp object at 0x7da1b0354580> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[check] ( identifier[self] , identifier[var] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[var] , identifier[dict] ): keyword[return] keyword[False]
keyword[if] identifier[any] ( identifier[key] keyword[not] keyword[in] identifier[self] . identifier[_types] keyword[for] identifier[key] keyword[in] identifier[var] ): keyword[return] keyword[False]
keyword[for] identifier[key] , identifier[ktype] keyword[in] identifier[viewitems] ( identifier[self] . identifier[_types] ):
identifier[val] = identifier[var] . identifier[get] ( identifier[key] , keyword[None] )
keyword[if] keyword[not] identifier[_check_type] ( identifier[val] , identifier[ktype] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def check(self, var):
"""Return True if the variable matches this type, and False otherwise."""
if not isinstance(var, dict):
return False # depends on [control=['if'], data=[]]
if any((key not in self._types for key in var)):
return False # depends on [control=['if'], data=[]]
for (key, ktype) in viewitems(self._types):
val = var.get(key, None)
if not _check_type(val, ktype):
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return True |
def ossystem(cmd):
"""Like os.system, but returns output of command as string."""
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(stdout, stderr) = p.communicate()
return stdout.decode('ascii') | def function[ossystem, parameter[cmd]]:
constant[Like os.system, but returns output of command as string.]
variable[p] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]]
<ast.Tuple object at 0x7da20c7945b0> assign[=] call[name[p].communicate, parameter[]]
return[call[name[stdout].decode, parameter[constant[ascii]]]] | keyword[def] identifier[ossystem] ( identifier[cmd] ):
literal[string]
identifier[p] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[shell] = keyword[True] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[STDOUT] )
( identifier[stdout] , identifier[stderr] )= identifier[p] . identifier[communicate] ()
keyword[return] identifier[stdout] . identifier[decode] ( literal[string] ) | def ossystem(cmd):
"""Like os.system, but returns output of command as string."""
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(stdout, stderr) = p.communicate()
return stdout.decode('ascii') |
def wallet_representative_set(self, wallet, representative):
"""
Sets the default **representative** for **wallet**
.. enable_control required
:param wallet: Wallet to set default representative account for
:type wallet: str
:param representative: Representative account to set for **wallet**
:type representative: str
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.wallet_representative_set(
... wallet="000D1BAEC8EC208142C99059B393051BAC8380F9B5A2E6B2489A277D81789F3F",
... representative="xrb_3e3j5tkog48pnny9dmfzj1r16pg8t1e76dz5tmac6iq689wyjfpi00000000"
... )
True
"""
wallet = self._process_value(wallet, 'wallet')
representative = self._process_value(representative, 'account')
payload = {"wallet": wallet, "representative": representative}
resp = self.call('wallet_representative_set', payload)
return resp['set'] == '1' | def function[wallet_representative_set, parameter[self, wallet, representative]]:
constant[
Sets the default **representative** for **wallet**
.. enable_control required
:param wallet: Wallet to set default representative account for
:type wallet: str
:param representative: Representative account to set for **wallet**
:type representative: str
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.wallet_representative_set(
... wallet="000D1BAEC8EC208142C99059B393051BAC8380F9B5A2E6B2489A277D81789F3F",
... representative="xrb_3e3j5tkog48pnny9dmfzj1r16pg8t1e76dz5tmac6iq689wyjfpi00000000"
... )
True
]
variable[wallet] assign[=] call[name[self]._process_value, parameter[name[wallet], constant[wallet]]]
variable[representative] assign[=] call[name[self]._process_value, parameter[name[representative], constant[account]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b2589810>, <ast.Constant object at 0x7da1b2589ab0>], [<ast.Name object at 0x7da1b258b8b0>, <ast.Name object at 0x7da1b25899c0>]]
variable[resp] assign[=] call[name[self].call, parameter[constant[wallet_representative_set], name[payload]]]
return[compare[call[name[resp]][constant[set]] equal[==] constant[1]]] | keyword[def] identifier[wallet_representative_set] ( identifier[self] , identifier[wallet] , identifier[representative] ):
literal[string]
identifier[wallet] = identifier[self] . identifier[_process_value] ( identifier[wallet] , literal[string] )
identifier[representative] = identifier[self] . identifier[_process_value] ( identifier[representative] , literal[string] )
identifier[payload] ={ literal[string] : identifier[wallet] , literal[string] : identifier[representative] }
identifier[resp] = identifier[self] . identifier[call] ( literal[string] , identifier[payload] )
keyword[return] identifier[resp] [ literal[string] ]== literal[string] | def wallet_representative_set(self, wallet, representative):
"""
Sets the default **representative** for **wallet**
.. enable_control required
:param wallet: Wallet to set default representative account for
:type wallet: str
:param representative: Representative account to set for **wallet**
:type representative: str
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.wallet_representative_set(
... wallet="000D1BAEC8EC208142C99059B393051BAC8380F9B5A2E6B2489A277D81789F3F",
... representative="xrb_3e3j5tkog48pnny9dmfzj1r16pg8t1e76dz5tmac6iq689wyjfpi00000000"
... )
True
"""
wallet = self._process_value(wallet, 'wallet')
representative = self._process_value(representative, 'account')
payload = {'wallet': wallet, 'representative': representative}
resp = self.call('wallet_representative_set', payload)
return resp['set'] == '1' |
def get_intent(self, intent_id):
"""Returns the intent object with the given intent_id"""
endpoint = self._intent_uri(intent_id=intent_id)
return self._get(endpoint) | def function[get_intent, parameter[self, intent_id]]:
constant[Returns the intent object with the given intent_id]
variable[endpoint] assign[=] call[name[self]._intent_uri, parameter[]]
return[call[name[self]._get, parameter[name[endpoint]]]] | keyword[def] identifier[get_intent] ( identifier[self] , identifier[intent_id] ):
literal[string]
identifier[endpoint] = identifier[self] . identifier[_intent_uri] ( identifier[intent_id] = identifier[intent_id] )
keyword[return] identifier[self] . identifier[_get] ( identifier[endpoint] ) | def get_intent(self, intent_id):
"""Returns the intent object with the given intent_id"""
endpoint = self._intent_uri(intent_id=intent_id)
return self._get(endpoint) |
def make_decoder(base_depth, activation, input_size, output_shape):
"""Creates the decoder function.
Args:
base_depth: Layer base depth in decoder net.
activation: Activation function in hidden layers.
input_size: The flattened latent input shape as an int.
output_shape: The output image shape as a list.
Returns:
decoder: A `callable` mapping a `Tensor` of encodings to a
`tfd.Distribution` instance over images.
"""
deconv = functools.partial(
tf.keras.layers.Conv2DTranspose, padding="SAME", activation=activation)
conv = functools.partial(
tf.keras.layers.Conv2D, padding="SAME", activation=activation)
decoder_net = tf.keras.Sequential([
tf.keras.layers.Reshape((1, 1, input_size)),
deconv(2 * base_depth, 7, padding="VALID"),
deconv(2 * base_depth, 5),
deconv(2 * base_depth, 5, 2),
deconv(base_depth, 5),
deconv(base_depth, 5, 2),
deconv(base_depth, 5),
conv(output_shape[-1], 5, activation=None),
tf.keras.layers.Reshape(output_shape),
])
def decoder(codes):
"""Builds a distribution over images given codes.
Args:
codes: A `Tensor` representing the inputs to be decoded, of shape `[...,
code_size]`.
Returns:
decoder_distribution: A multivariate `Bernoulli` distribution.
"""
logits = decoder_net(codes)
return tfd.Independent(tfd.Bernoulli(logits=logits),
reinterpreted_batch_ndims=len(output_shape),
name="decoder_distribution")
return decoder | def function[make_decoder, parameter[base_depth, activation, input_size, output_shape]]:
constant[Creates the decoder function.
Args:
base_depth: Layer base depth in decoder net.
activation: Activation function in hidden layers.
input_size: The flattened latent input shape as an int.
output_shape: The output image shape as a list.
Returns:
decoder: A `callable` mapping a `Tensor` of encodings to a
`tfd.Distribution` instance over images.
]
variable[deconv] assign[=] call[name[functools].partial, parameter[name[tf].keras.layers.Conv2DTranspose]]
variable[conv] assign[=] call[name[functools].partial, parameter[name[tf].keras.layers.Conv2D]]
variable[decoder_net] assign[=] call[name[tf].keras.Sequential, parameter[list[[<ast.Call object at 0x7da1b02d15d0>, <ast.Call object at 0x7da1b02d0160>, <ast.Call object at 0x7da1b02d1e40>, <ast.Call object at 0x7da1b02d1cf0>, <ast.Call object at 0x7da1b02d0490>, <ast.Call object at 0x7da1b02d1d50>, <ast.Call object at 0x7da1b02d17b0>, <ast.Call object at 0x7da1b02d0640>, <ast.Call object at 0x7da1b02d0a90>]]]]
def function[decoder, parameter[codes]]:
constant[Builds a distribution over images given codes.
Args:
codes: A `Tensor` representing the inputs to be decoded, of shape `[...,
code_size]`.
Returns:
decoder_distribution: A multivariate `Bernoulli` distribution.
]
variable[logits] assign[=] call[name[decoder_net], parameter[name[codes]]]
return[call[name[tfd].Independent, parameter[call[name[tfd].Bernoulli, parameter[]]]]]
return[name[decoder]] | keyword[def] identifier[make_decoder] ( identifier[base_depth] , identifier[activation] , identifier[input_size] , identifier[output_shape] ):
literal[string]
identifier[deconv] = identifier[functools] . identifier[partial] (
identifier[tf] . identifier[keras] . identifier[layers] . identifier[Conv2DTranspose] , identifier[padding] = literal[string] , identifier[activation] = identifier[activation] )
identifier[conv] = identifier[functools] . identifier[partial] (
identifier[tf] . identifier[keras] . identifier[layers] . identifier[Conv2D] , identifier[padding] = literal[string] , identifier[activation] = identifier[activation] )
identifier[decoder_net] = identifier[tf] . identifier[keras] . identifier[Sequential] ([
identifier[tf] . identifier[keras] . identifier[layers] . identifier[Reshape] (( literal[int] , literal[int] , identifier[input_size] )),
identifier[deconv] ( literal[int] * identifier[base_depth] , literal[int] , identifier[padding] = literal[string] ),
identifier[deconv] ( literal[int] * identifier[base_depth] , literal[int] ),
identifier[deconv] ( literal[int] * identifier[base_depth] , literal[int] , literal[int] ),
identifier[deconv] ( identifier[base_depth] , literal[int] ),
identifier[deconv] ( identifier[base_depth] , literal[int] , literal[int] ),
identifier[deconv] ( identifier[base_depth] , literal[int] ),
identifier[conv] ( identifier[output_shape] [- literal[int] ], literal[int] , identifier[activation] = keyword[None] ),
identifier[tf] . identifier[keras] . identifier[layers] . identifier[Reshape] ( identifier[output_shape] ),
])
keyword[def] identifier[decoder] ( identifier[codes] ):
literal[string]
identifier[logits] = identifier[decoder_net] ( identifier[codes] )
keyword[return] identifier[tfd] . identifier[Independent] ( identifier[tfd] . identifier[Bernoulli] ( identifier[logits] = identifier[logits] ),
identifier[reinterpreted_batch_ndims] = identifier[len] ( identifier[output_shape] ),
identifier[name] = literal[string] )
keyword[return] identifier[decoder] | def make_decoder(base_depth, activation, input_size, output_shape):
"""Creates the decoder function.
Args:
base_depth: Layer base depth in decoder net.
activation: Activation function in hidden layers.
input_size: The flattened latent input shape as an int.
output_shape: The output image shape as a list.
Returns:
decoder: A `callable` mapping a `Tensor` of encodings to a
`tfd.Distribution` instance over images.
"""
deconv = functools.partial(tf.keras.layers.Conv2DTranspose, padding='SAME', activation=activation)
conv = functools.partial(tf.keras.layers.Conv2D, padding='SAME', activation=activation)
decoder_net = tf.keras.Sequential([tf.keras.layers.Reshape((1, 1, input_size)), deconv(2 * base_depth, 7, padding='VALID'), deconv(2 * base_depth, 5), deconv(2 * base_depth, 5, 2), deconv(base_depth, 5), deconv(base_depth, 5, 2), deconv(base_depth, 5), conv(output_shape[-1], 5, activation=None), tf.keras.layers.Reshape(output_shape)])
def decoder(codes):
"""Builds a distribution over images given codes.
Args:
codes: A `Tensor` representing the inputs to be decoded, of shape `[...,
code_size]`.
Returns:
decoder_distribution: A multivariate `Bernoulli` distribution.
"""
logits = decoder_net(codes)
return tfd.Independent(tfd.Bernoulli(logits=logits), reinterpreted_batch_ndims=len(output_shape), name='decoder_distribution')
return decoder |
def read_sql_table(table_name, con, schema=None, index_col=None,
coerce_float=True, parse_dates=None, columns=None,
chunksize=None):
"""
Read SQL database table into a DataFrame.
Given a table name and a SQLAlchemy connectable, returns a DataFrame.
This function does not support DBAPI connections.
Parameters
----------
table_name : str
Name of SQL table in database.
con : SQLAlchemy connectable or str
A database URI could be provided as as str.
SQLite DBAPI connection mode not supported.
schema : str, default None
Name of SQL schema in database to query (if database flavor
supports this). Uses default schema if None (default).
index_col : str or list of str, optional, default: None
Column(s) to set as index(MultiIndex).
coerce_float : bool, default True
Attempts to convert values of non-string, non-numeric objects (like
decimal.Decimal) to floating point. Can result in loss of Precision.
parse_dates : list or dict, default None
- List of column names to parse as dates.
- Dict of ``{column_name: format string}`` where format string is
strftime compatible in case of parsing string times or is one of
(D, s, ns, ms, us) in case of parsing integer timestamps.
- Dict of ``{column_name: arg dict}``, where the arg dict corresponds
to the keyword arguments of :func:`pandas.to_datetime`
Especially useful with databases without native Datetime support,
such as SQLite.
columns : list, default None
List of column names to select from SQL table.
chunksize : int, default None
If specified, returns an iterator where `chunksize` is the number of
rows to include in each chunk.
Returns
-------
DataFrame
A SQL table is returned as two-dimensional data structure with labeled
axes.
See Also
--------
read_sql_query : Read SQL query into a DataFrame.
read_sql : Read SQL query or database table into a DataFrame.
Notes
-----
Any datetime values with time zone information will be converted to UTC.
Examples
--------
>>> pd.read_sql_table('table_name', 'postgres:///db_name') # doctest:+SKIP
"""
con = _engine_builder(con)
if not _is_sqlalchemy_connectable(con):
raise NotImplementedError("read_sql_table only supported for "
"SQLAlchemy connectable.")
import sqlalchemy
from sqlalchemy.schema import MetaData
meta = MetaData(con, schema=schema)
try:
meta.reflect(only=[table_name], views=True)
except sqlalchemy.exc.InvalidRequestError:
raise ValueError("Table {name} not found".format(name=table_name))
pandas_sql = SQLDatabase(con, meta=meta)
table = pandas_sql.read_table(
table_name, index_col=index_col, coerce_float=coerce_float,
parse_dates=parse_dates, columns=columns, chunksize=chunksize)
if table is not None:
return table
else:
raise ValueError("Table {name} not found".format(name=table_name), con) | def function[read_sql_table, parameter[table_name, con, schema, index_col, coerce_float, parse_dates, columns, chunksize]]:
constant[
Read SQL database table into a DataFrame.
Given a table name and a SQLAlchemy connectable, returns a DataFrame.
This function does not support DBAPI connections.
Parameters
----------
table_name : str
Name of SQL table in database.
con : SQLAlchemy connectable or str
A database URI could be provided as as str.
SQLite DBAPI connection mode not supported.
schema : str, default None
Name of SQL schema in database to query (if database flavor
supports this). Uses default schema if None (default).
index_col : str or list of str, optional, default: None
Column(s) to set as index(MultiIndex).
coerce_float : bool, default True
Attempts to convert values of non-string, non-numeric objects (like
decimal.Decimal) to floating point. Can result in loss of Precision.
parse_dates : list or dict, default None
- List of column names to parse as dates.
- Dict of ``{column_name: format string}`` where format string is
strftime compatible in case of parsing string times or is one of
(D, s, ns, ms, us) in case of parsing integer timestamps.
- Dict of ``{column_name: arg dict}``, where the arg dict corresponds
to the keyword arguments of :func:`pandas.to_datetime`
Especially useful with databases without native Datetime support,
such as SQLite.
columns : list, default None
List of column names to select from SQL table.
chunksize : int, default None
If specified, returns an iterator where `chunksize` is the number of
rows to include in each chunk.
Returns
-------
DataFrame
A SQL table is returned as two-dimensional data structure with labeled
axes.
See Also
--------
read_sql_query : Read SQL query into a DataFrame.
read_sql : Read SQL query or database table into a DataFrame.
Notes
-----
Any datetime values with time zone information will be converted to UTC.
Examples
--------
>>> pd.read_sql_table('table_name', 'postgres:///db_name') # doctest:+SKIP
]
variable[con] assign[=] call[name[_engine_builder], parameter[name[con]]]
if <ast.UnaryOp object at 0x7da1b26ac220> begin[:]
<ast.Raise object at 0x7da1b26aee60>
import module[sqlalchemy]
from relative_module[sqlalchemy.schema] import module[MetaData]
variable[meta] assign[=] call[name[MetaData], parameter[name[con]]]
<ast.Try object at 0x7da1b26ae800>
variable[pandas_sql] assign[=] call[name[SQLDatabase], parameter[name[con]]]
variable[table] assign[=] call[name[pandas_sql].read_table, parameter[name[table_name]]]
if compare[name[table] is_not constant[None]] begin[:]
return[name[table]] | keyword[def] identifier[read_sql_table] ( identifier[table_name] , identifier[con] , identifier[schema] = keyword[None] , identifier[index_col] = keyword[None] ,
identifier[coerce_float] = keyword[True] , identifier[parse_dates] = keyword[None] , identifier[columns] = keyword[None] ,
identifier[chunksize] = keyword[None] ):
literal[string]
identifier[con] = identifier[_engine_builder] ( identifier[con] )
keyword[if] keyword[not] identifier[_is_sqlalchemy_connectable] ( identifier[con] ):
keyword[raise] identifier[NotImplementedError] ( literal[string]
literal[string] )
keyword[import] identifier[sqlalchemy]
keyword[from] identifier[sqlalchemy] . identifier[schema] keyword[import] identifier[MetaData]
identifier[meta] = identifier[MetaData] ( identifier[con] , identifier[schema] = identifier[schema] )
keyword[try] :
identifier[meta] . identifier[reflect] ( identifier[only] =[ identifier[table_name] ], identifier[views] = keyword[True] )
keyword[except] identifier[sqlalchemy] . identifier[exc] . identifier[InvalidRequestError] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] = identifier[table_name] ))
identifier[pandas_sql] = identifier[SQLDatabase] ( identifier[con] , identifier[meta] = identifier[meta] )
identifier[table] = identifier[pandas_sql] . identifier[read_table] (
identifier[table_name] , identifier[index_col] = identifier[index_col] , identifier[coerce_float] = identifier[coerce_float] ,
identifier[parse_dates] = identifier[parse_dates] , identifier[columns] = identifier[columns] , identifier[chunksize] = identifier[chunksize] )
keyword[if] identifier[table] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[table]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] = identifier[table_name] ), identifier[con] ) | def read_sql_table(table_name, con, schema=None, index_col=None, coerce_float=True, parse_dates=None, columns=None, chunksize=None):
"""
Read SQL database table into a DataFrame.
Given a table name and a SQLAlchemy connectable, returns a DataFrame.
This function does not support DBAPI connections.
Parameters
----------
table_name : str
Name of SQL table in database.
con : SQLAlchemy connectable or str
A database URI could be provided as as str.
SQLite DBAPI connection mode not supported.
schema : str, default None
Name of SQL schema in database to query (if database flavor
supports this). Uses default schema if None (default).
index_col : str or list of str, optional, default: None
Column(s) to set as index(MultiIndex).
coerce_float : bool, default True
Attempts to convert values of non-string, non-numeric objects (like
decimal.Decimal) to floating point. Can result in loss of Precision.
parse_dates : list or dict, default None
- List of column names to parse as dates.
- Dict of ``{column_name: format string}`` where format string is
strftime compatible in case of parsing string times or is one of
(D, s, ns, ms, us) in case of parsing integer timestamps.
- Dict of ``{column_name: arg dict}``, where the arg dict corresponds
to the keyword arguments of :func:`pandas.to_datetime`
Especially useful with databases without native Datetime support,
such as SQLite.
columns : list, default None
List of column names to select from SQL table.
chunksize : int, default None
If specified, returns an iterator where `chunksize` is the number of
rows to include in each chunk.
Returns
-------
DataFrame
A SQL table is returned as two-dimensional data structure with labeled
axes.
See Also
--------
read_sql_query : Read SQL query into a DataFrame.
read_sql : Read SQL query or database table into a DataFrame.
Notes
-----
Any datetime values with time zone information will be converted to UTC.
Examples
--------
>>> pd.read_sql_table('table_name', 'postgres:///db_name') # doctest:+SKIP
"""
con = _engine_builder(con)
if not _is_sqlalchemy_connectable(con):
raise NotImplementedError('read_sql_table only supported for SQLAlchemy connectable.') # depends on [control=['if'], data=[]]
import sqlalchemy
from sqlalchemy.schema import MetaData
meta = MetaData(con, schema=schema)
try:
meta.reflect(only=[table_name], views=True) # depends on [control=['try'], data=[]]
except sqlalchemy.exc.InvalidRequestError:
raise ValueError('Table {name} not found'.format(name=table_name)) # depends on [control=['except'], data=[]]
pandas_sql = SQLDatabase(con, meta=meta)
table = pandas_sql.read_table(table_name, index_col=index_col, coerce_float=coerce_float, parse_dates=parse_dates, columns=columns, chunksize=chunksize)
if table is not None:
return table # depends on [control=['if'], data=['table']]
else:
raise ValueError('Table {name} not found'.format(name=table_name), con) |
def Vfs_to_zs(Vfs, Vms):
r'''Converts a list of mass fractions to mole fractions. Requires molecular
weights for all species.
.. math::
z_i = \frac{\frac{\text{Vf}_i}{V_{m,i}}}{\sum_i
\frac{\text{Vf}_i}{V_{m,i}}}
Parameters
----------
Vfs : iterable
Molar volume fractions [-]
VMs : iterable
Molar volumes of species [m^3/mol]
Returns
-------
zs : list
Mole fractions [-]
Notes
-----
Does not check that the sums add to one. Does not check that inputs are of
the same length.
Molar volumes are specified in terms of pure components only. Function
works with any phase.
Examples
--------
Acetone and benzene example
>>> Vfs_to_zs([0.596, 0.404], [8.0234e-05, 9.543e-05])
[0.6369779395901142, 0.3630220604098858]
'''
mols_i = [Vfi/Vmi for Vfi, Vmi in zip(Vfs, Vms)]
mols = sum(mols_i)
return [mol_i/mols for mol_i in mols_i] | def function[Vfs_to_zs, parameter[Vfs, Vms]]:
constant[Converts a list of mass fractions to mole fractions. Requires molecular
weights for all species.
.. math::
z_i = \frac{\frac{\text{Vf}_i}{V_{m,i}}}{\sum_i
\frac{\text{Vf}_i}{V_{m,i}}}
Parameters
----------
Vfs : iterable
Molar volume fractions [-]
VMs : iterable
Molar volumes of species [m^3/mol]
Returns
-------
zs : list
Mole fractions [-]
Notes
-----
Does not check that the sums add to one. Does not check that inputs are of
the same length.
Molar volumes are specified in terms of pure components only. Function
works with any phase.
Examples
--------
Acetone and benzene example
>>> Vfs_to_zs([0.596, 0.404], [8.0234e-05, 9.543e-05])
[0.6369779395901142, 0.3630220604098858]
]
variable[mols_i] assign[=] <ast.ListComp object at 0x7da1b021d6f0>
variable[mols] assign[=] call[name[sum], parameter[name[mols_i]]]
return[<ast.ListComp object at 0x7da1b021ff40>] | keyword[def] identifier[Vfs_to_zs] ( identifier[Vfs] , identifier[Vms] ):
literal[string]
identifier[mols_i] =[ identifier[Vfi] / identifier[Vmi] keyword[for] identifier[Vfi] , identifier[Vmi] keyword[in] identifier[zip] ( identifier[Vfs] , identifier[Vms] )]
identifier[mols] = identifier[sum] ( identifier[mols_i] )
keyword[return] [ identifier[mol_i] / identifier[mols] keyword[for] identifier[mol_i] keyword[in] identifier[mols_i] ] | def Vfs_to_zs(Vfs, Vms):
"""Converts a list of mass fractions to mole fractions. Requires molecular
weights for all species.
.. math::
z_i = \\frac{\\frac{\\text{Vf}_i}{V_{m,i}}}{\\sum_i
\\frac{\\text{Vf}_i}{V_{m,i}}}
Parameters
----------
Vfs : iterable
Molar volume fractions [-]
VMs : iterable
Molar volumes of species [m^3/mol]
Returns
-------
zs : list
Mole fractions [-]
Notes
-----
Does not check that the sums add to one. Does not check that inputs are of
the same length.
Molar volumes are specified in terms of pure components only. Function
works with any phase.
Examples
--------
Acetone and benzene example
>>> Vfs_to_zs([0.596, 0.404], [8.0234e-05, 9.543e-05])
[0.6369779395901142, 0.3630220604098858]
"""
mols_i = [Vfi / Vmi for (Vfi, Vmi) in zip(Vfs, Vms)]
mols = sum(mols_i)
return [mol_i / mols for mol_i in mols_i] |
def star(n, alpha='faced', center=(1, 1)):
"""
Create the star points of various design matrices
Parameters
----------
n : int
The number of variables in the design
Optional
--------
alpha : str
Available values are 'faced' (default), 'orthogonal', or 'rotatable'
center : array
A 1-by-2 array of integers indicating the number of center points
assigned in each block of the response surface design. Default is
(1, 1).
Returns
-------
H : 2d-array
The star-point portion of the design matrix (i.e. at +/- alpha)
a : scalar
The alpha value to scale the star points with.
Example
-------
::
>>> star(3)
array([[-1., 0., 0.],
[ 1., 0., 0.],
[ 0., -1., 0.],
[ 0., 1., 0.],
[ 0., 0., -1.],
[ 0., 0., 1.]])
"""
# Star points at the center of each face of the factorial
if alpha=='faced':
a = 1
elif alpha=='orthogonal':
nc = 2**n # factorial points
nco = center[0] # center points to factorial
na = 2*n # axial points
nao = center[1] # center points to axial design
# value of alpha in orthogonal design
a = (n*(1 + nao/float(na))/(1 + nco/float(nc)))**0.5
elif alpha=='rotatable':
nc = 2**n # number of factorial points
a = nc**(0.25) # value of alpha in rotatable design
else:
raise ValueError('Invalid value for "alpha": {:}'.format(alpha))
# Create the actual matrix now.
H = np.zeros((2*n, n))
for i in range(n):
H[2*i:2*i+2, i] = [-1, 1]
H *= a
return H, a | def function[star, parameter[n, alpha, center]]:
constant[
Create the star points of various design matrices
Parameters
----------
n : int
The number of variables in the design
Optional
--------
alpha : str
Available values are 'faced' (default), 'orthogonal', or 'rotatable'
center : array
A 1-by-2 array of integers indicating the number of center points
assigned in each block of the response surface design. Default is
(1, 1).
Returns
-------
H : 2d-array
The star-point portion of the design matrix (i.e. at +/- alpha)
a : scalar
The alpha value to scale the star points with.
Example
-------
::
>>> star(3)
array([[-1., 0., 0.],
[ 1., 0., 0.],
[ 0., -1., 0.],
[ 0., 1., 0.],
[ 0., 0., -1.],
[ 0., 0., 1.]])
]
if compare[name[alpha] equal[==] constant[faced]] begin[:]
variable[a] assign[=] constant[1]
variable[H] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da20c993520>, <ast.Name object at 0x7da20c991600>]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
call[name[H]][tuple[[<ast.Slice object at 0x7da20c991bd0>, <ast.Name object at 0x7da20c991060>]]] assign[=] list[[<ast.UnaryOp object at 0x7da20c992d70>, <ast.Constant object at 0x7da20c990dc0>]]
<ast.AugAssign object at 0x7da20c991360>
return[tuple[[<ast.Name object at 0x7da20c990d30>, <ast.Name object at 0x7da20c9919f0>]]] | keyword[def] identifier[star] ( identifier[n] , identifier[alpha] = literal[string] , identifier[center] =( literal[int] , literal[int] )):
literal[string]
keyword[if] identifier[alpha] == literal[string] :
identifier[a] = literal[int]
keyword[elif] identifier[alpha] == literal[string] :
identifier[nc] = literal[int] ** identifier[n]
identifier[nco] = identifier[center] [ literal[int] ]
identifier[na] = literal[int] * identifier[n]
identifier[nao] = identifier[center] [ literal[int] ]
identifier[a] =( identifier[n] *( literal[int] + identifier[nao] / identifier[float] ( identifier[na] ))/( literal[int] + identifier[nco] / identifier[float] ( identifier[nc] )))** literal[int]
keyword[elif] identifier[alpha] == literal[string] :
identifier[nc] = literal[int] ** identifier[n]
identifier[a] = identifier[nc] **( literal[int] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[alpha] ))
identifier[H] = identifier[np] . identifier[zeros] (( literal[int] * identifier[n] , identifier[n] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[H] [ literal[int] * identifier[i] : literal[int] * identifier[i] + literal[int] , identifier[i] ]=[- literal[int] , literal[int] ]
identifier[H] *= identifier[a]
keyword[return] identifier[H] , identifier[a] | def star(n, alpha='faced', center=(1, 1)):
"""
Create the star points of various design matrices
Parameters
----------
n : int
The number of variables in the design
Optional
--------
alpha : str
Available values are 'faced' (default), 'orthogonal', or 'rotatable'
center : array
A 1-by-2 array of integers indicating the number of center points
assigned in each block of the response surface design. Default is
(1, 1).
Returns
-------
H : 2d-array
The star-point portion of the design matrix (i.e. at +/- alpha)
a : scalar
The alpha value to scale the star points with.
Example
-------
::
>>> star(3)
array([[-1., 0., 0.],
[ 1., 0., 0.],
[ 0., -1., 0.],
[ 0., 1., 0.],
[ 0., 0., -1.],
[ 0., 0., 1.]])
""" # Star points at the center of each face of the factorial
if alpha == 'faced':
a = 1 # depends on [control=['if'], data=[]]
elif alpha == 'orthogonal':
nc = 2 ** n # factorial points
nco = center[0] # center points to factorial
na = 2 * n # axial points
nao = center[1] # center points to axial design
# value of alpha in orthogonal design
a = (n * (1 + nao / float(na)) / (1 + nco / float(nc))) ** 0.5 # depends on [control=['if'], data=[]]
elif alpha == 'rotatable':
nc = 2 ** n # number of factorial points
a = nc ** 0.25 # value of alpha in rotatable design # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid value for "alpha": {:}'.format(alpha)) # Create the actual matrix now.
H = np.zeros((2 * n, n))
for i in range(n):
H[2 * i:2 * i + 2, i] = [-1, 1] # depends on [control=['for'], data=['i']]
H *= a
return (H, a) |
def api_call(
self,
api_method: str,
*,
http_verb: str = "POST",
files: dict = None,
data: dict = None,
params: dict = None,
json: dict = None,
):
"""Create a request and execute the API call to Slack.
Args:
api_method (str): The target Slack API method.
e.g. 'chat.postMessage'
http_verb (str): HTTP Verb. e.g. 'POST'
files (dict): Files to multipart upload.
e.g. {imageORfile: file_objectORfile_path}
data: The body to attach to the request. If a dictionary is
provided, form-encoding will take place.
e.g. {'key1': 'value1', 'key2': 'value2'}
params (dict): The URL parameters to append to the URL.
e.g. {'key1': 'value1', 'key2': 'value2'}
json (dict): JSON for the body to attach to the request
(if files or data is not specified).
e.g. {'key1': 'value1', 'key2': 'value2'}
Returns:
(SlackResponse)
The server's response to an HTTP request. Data
from the response can be accessed like a dict.
If the response included 'next_cursor' it can
be iterated on to execute subsequent requests.
Raises:
SlackApiError: The following Slack API call failed:
'chat.postMessage'.
SlackRequestError: Json data can only be submitted as
POST requests.
"""
if json is not None and http_verb != "POST":
msg = "Json data can only be submitted as POST requests. GET requests should use the 'params' argument."
raise err.SlackRequestError(msg)
api_url = self._get_url(api_method)
headers = {
"User-Agent": self._get_user_agent(),
"Authorization": "Bearer {}".format(self.token),
}
if files is not None:
form_data = aiohttp.FormData()
for k, v in files.items():
if isinstance(v, str):
form_data.add_field(k, open(v, "rb"))
else:
form_data.add_field(k, v)
if data is not None:
for k, v in data.items():
form_data.add_field(k, str(v))
data = form_data
req_args = {
"headers": headers,
"data": data,
"params": params,
"json": json,
"ssl": self.ssl,
"proxy": self.proxy,
}
if self._event_loop is None:
self._set_event_loop()
future = asyncio.ensure_future(
self._send(http_verb=http_verb, api_url=api_url, req_args=req_args),
loop=self._event_loop,
)
if self.run_async:
return future
return self._event_loop.run_until_complete(future) | def function[api_call, parameter[self, api_method]]:
constant[Create a request and execute the API call to Slack.
Args:
api_method (str): The target Slack API method.
e.g. 'chat.postMessage'
http_verb (str): HTTP Verb. e.g. 'POST'
files (dict): Files to multipart upload.
e.g. {imageORfile: file_objectORfile_path}
data: The body to attach to the request. If a dictionary is
provided, form-encoding will take place.
e.g. {'key1': 'value1', 'key2': 'value2'}
params (dict): The URL parameters to append to the URL.
e.g. {'key1': 'value1', 'key2': 'value2'}
json (dict): JSON for the body to attach to the request
(if files or data is not specified).
e.g. {'key1': 'value1', 'key2': 'value2'}
Returns:
(SlackResponse)
The server's response to an HTTP request. Data
from the response can be accessed like a dict.
If the response included 'next_cursor' it can
be iterated on to execute subsequent requests.
Raises:
SlackApiError: The following Slack API call failed:
'chat.postMessage'.
SlackRequestError: Json data can only be submitted as
POST requests.
]
if <ast.BoolOp object at 0x7da1b1babc10> begin[:]
variable[msg] assign[=] constant[Json data can only be submitted as POST requests. GET requests should use the 'params' argument.]
<ast.Raise object at 0x7da1b1bab730>
variable[api_url] assign[=] call[name[self]._get_url, parameter[name[api_method]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b1baae30>, <ast.Constant object at 0x7da1b1bab280>], [<ast.Call object at 0x7da1b1bab2e0>, <ast.Call object at 0x7da1b1baaf50>]]
if compare[name[files] is_not constant[None]] begin[:]
variable[form_data] assign[=] call[name[aiohttp].FormData, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1bc23e0>, <ast.Name object at 0x7da1b1bc2bc0>]]] in starred[call[name[files].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[v], name[str]]] begin[:]
call[name[form_data].add_field, parameter[name[k], call[name[open], parameter[name[v], constant[rb]]]]]
if compare[name[data] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1bc1d50>, <ast.Name object at 0x7da1b1bc1150>]]] in starred[call[name[data].items, parameter[]]] begin[:]
call[name[form_data].add_field, parameter[name[k], call[name[str], parameter[name[v]]]]]
variable[data] assign[=] name[form_data]
variable[req_args] assign[=] dictionary[[<ast.Constant object at 0x7da1b1bc3010>, <ast.Constant object at 0x7da1b1bc2110>, <ast.Constant object at 0x7da1b1bc0ca0>, <ast.Constant object at 0x7da1b1bc3fd0>, <ast.Constant object at 0x7da1b1bc0a90>, <ast.Constant object at 0x7da1b1bc18a0>], [<ast.Name object at 0x7da1b1bc1780>, <ast.Name object at 0x7da1b1bc0640>, <ast.Name object at 0x7da1b1bc1810>, <ast.Name object at 0x7da1b1bc0cd0>, <ast.Attribute object at 0x7da1b1bc2a70>, <ast.Attribute object at 0x7da1b1bc2f20>]]
if compare[name[self]._event_loop is constant[None]] begin[:]
call[name[self]._set_event_loop, parameter[]]
variable[future] assign[=] call[name[asyncio].ensure_future, parameter[call[name[self]._send, parameter[]]]]
if name[self].run_async begin[:]
return[name[future]]
return[call[name[self]._event_loop.run_until_complete, parameter[name[future]]]] | keyword[def] identifier[api_call] (
identifier[self] ,
identifier[api_method] : identifier[str] ,
*,
identifier[http_verb] : identifier[str] = literal[string] ,
identifier[files] : identifier[dict] = keyword[None] ,
identifier[data] : identifier[dict] = keyword[None] ,
identifier[params] : identifier[dict] = keyword[None] ,
identifier[json] : identifier[dict] = keyword[None] ,
):
literal[string]
keyword[if] identifier[json] keyword[is] keyword[not] keyword[None] keyword[and] identifier[http_verb] != literal[string] :
identifier[msg] = literal[string]
keyword[raise] identifier[err] . identifier[SlackRequestError] ( identifier[msg] )
identifier[api_url] = identifier[self] . identifier[_get_url] ( identifier[api_method] )
identifier[headers] ={
literal[string] : identifier[self] . identifier[_get_user_agent] (),
literal[string] : literal[string] . identifier[format] ( identifier[self] . identifier[token] ),
}
keyword[if] identifier[files] keyword[is] keyword[not] keyword[None] :
identifier[form_data] = identifier[aiohttp] . identifier[FormData] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[files] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[v] , identifier[str] ):
identifier[form_data] . identifier[add_field] ( identifier[k] , identifier[open] ( identifier[v] , literal[string] ))
keyword[else] :
identifier[form_data] . identifier[add_field] ( identifier[k] , identifier[v] )
keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[data] . identifier[items] ():
identifier[form_data] . identifier[add_field] ( identifier[k] , identifier[str] ( identifier[v] ))
identifier[data] = identifier[form_data]
identifier[req_args] ={
literal[string] : identifier[headers] ,
literal[string] : identifier[data] ,
literal[string] : identifier[params] ,
literal[string] : identifier[json] ,
literal[string] : identifier[self] . identifier[ssl] ,
literal[string] : identifier[self] . identifier[proxy] ,
}
keyword[if] identifier[self] . identifier[_event_loop] keyword[is] keyword[None] :
identifier[self] . identifier[_set_event_loop] ()
identifier[future] = identifier[asyncio] . identifier[ensure_future] (
identifier[self] . identifier[_send] ( identifier[http_verb] = identifier[http_verb] , identifier[api_url] = identifier[api_url] , identifier[req_args] = identifier[req_args] ),
identifier[loop] = identifier[self] . identifier[_event_loop] ,
)
keyword[if] identifier[self] . identifier[run_async] :
keyword[return] identifier[future]
keyword[return] identifier[self] . identifier[_event_loop] . identifier[run_until_complete] ( identifier[future] ) | def api_call(self, api_method: str, *, http_verb: str='POST', files: dict=None, data: dict=None, params: dict=None, json: dict=None):
"""Create a request and execute the API call to Slack.
Args:
api_method (str): The target Slack API method.
e.g. 'chat.postMessage'
http_verb (str): HTTP Verb. e.g. 'POST'
files (dict): Files to multipart upload.
e.g. {imageORfile: file_objectORfile_path}
data: The body to attach to the request. If a dictionary is
provided, form-encoding will take place.
e.g. {'key1': 'value1', 'key2': 'value2'}
params (dict): The URL parameters to append to the URL.
e.g. {'key1': 'value1', 'key2': 'value2'}
json (dict): JSON for the body to attach to the request
(if files or data is not specified).
e.g. {'key1': 'value1', 'key2': 'value2'}
Returns:
(SlackResponse)
The server's response to an HTTP request. Data
from the response can be accessed like a dict.
If the response included 'next_cursor' it can
be iterated on to execute subsequent requests.
Raises:
SlackApiError: The following Slack API call failed:
'chat.postMessage'.
SlackRequestError: Json data can only be submitted as
POST requests.
"""
if json is not None and http_verb != 'POST':
msg = "Json data can only be submitted as POST requests. GET requests should use the 'params' argument."
raise err.SlackRequestError(msg) # depends on [control=['if'], data=[]]
api_url = self._get_url(api_method)
headers = {'User-Agent': self._get_user_agent(), 'Authorization': 'Bearer {}'.format(self.token)}
if files is not None:
form_data = aiohttp.FormData()
for (k, v) in files.items():
if isinstance(v, str):
form_data.add_field(k, open(v, 'rb')) # depends on [control=['if'], data=[]]
else:
form_data.add_field(k, v) # depends on [control=['for'], data=[]]
if data is not None:
for (k, v) in data.items():
form_data.add_field(k, str(v)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['data']]
data = form_data # depends on [control=['if'], data=['files']]
req_args = {'headers': headers, 'data': data, 'params': params, 'json': json, 'ssl': self.ssl, 'proxy': self.proxy}
if self._event_loop is None:
self._set_event_loop() # depends on [control=['if'], data=[]]
future = asyncio.ensure_future(self._send(http_verb=http_verb, api_url=api_url, req_args=req_args), loop=self._event_loop)
if self.run_async:
return future # depends on [control=['if'], data=[]]
return self._event_loop.run_until_complete(future) |
def _init_pval_obj(self):
"""Returns a Fisher object based on user-input."""
if self.pval_fnc_name in self.options.keys():
try:
fisher_obj = self.options[self.pval_fnc_name](self.pval_fnc_name, self.log)
except ImportError:
print("fisher module not installed. Falling back on scipy.stats.fisher_exact")
fisher_obj = self.options['fisher_scipy_stats']('fisher_scipy_stats', self.log)
return fisher_obj
raise Exception("PVALUE FUNCTION({FNC}) NOT FOUND".format(FNC=self.pval_fnc_name)) | def function[_init_pval_obj, parameter[self]]:
constant[Returns a Fisher object based on user-input.]
if compare[name[self].pval_fnc_name in call[name[self].options.keys, parameter[]]] begin[:]
<ast.Try object at 0x7da18bc705e0>
return[name[fisher_obj]]
<ast.Raise object at 0x7da18bc71900> | keyword[def] identifier[_init_pval_obj] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[pval_fnc_name] keyword[in] identifier[self] . identifier[options] . identifier[keys] ():
keyword[try] :
identifier[fisher_obj] = identifier[self] . identifier[options] [ identifier[self] . identifier[pval_fnc_name] ]( identifier[self] . identifier[pval_fnc_name] , identifier[self] . identifier[log] )
keyword[except] identifier[ImportError] :
identifier[print] ( literal[string] )
identifier[fisher_obj] = identifier[self] . identifier[options] [ literal[string] ]( literal[string] , identifier[self] . identifier[log] )
keyword[return] identifier[fisher_obj]
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[FNC] = identifier[self] . identifier[pval_fnc_name] )) | def _init_pval_obj(self):
"""Returns a Fisher object based on user-input."""
if self.pval_fnc_name in self.options.keys():
try:
fisher_obj = self.options[self.pval_fnc_name](self.pval_fnc_name, self.log) # depends on [control=['try'], data=[]]
except ImportError:
print('fisher module not installed. Falling back on scipy.stats.fisher_exact')
fisher_obj = self.options['fisher_scipy_stats']('fisher_scipy_stats', self.log) # depends on [control=['except'], data=[]]
return fisher_obj # depends on [control=['if'], data=[]]
raise Exception('PVALUE FUNCTION({FNC}) NOT FOUND'.format(FNC=self.pval_fnc_name)) |
def energy_data():
"""
Connects to the database and loads Readings for device 8.
"""
cur = db.cursor().execute("""SELECT timestamp, current FROM Readings""")
original = TimeSeries()
original.initialize_from_sql_cursor(cur)
original.normalize("day", fusionMethod = "sum")
return itty.Response(json.dumps(original, cls=PycastEncoder), content_type='application/json') | def function[energy_data, parameter[]]:
constant[
Connects to the database and loads Readings for device 8.
]
variable[cur] assign[=] call[call[name[db].cursor, parameter[]].execute, parameter[constant[SELECT timestamp, current FROM Readings]]]
variable[original] assign[=] call[name[TimeSeries], parameter[]]
call[name[original].initialize_from_sql_cursor, parameter[name[cur]]]
call[name[original].normalize, parameter[constant[day]]]
return[call[name[itty].Response, parameter[call[name[json].dumps, parameter[name[original]]]]]] | keyword[def] identifier[energy_data] ():
literal[string]
identifier[cur] = identifier[db] . identifier[cursor] (). identifier[execute] ( literal[string] )
identifier[original] = identifier[TimeSeries] ()
identifier[original] . identifier[initialize_from_sql_cursor] ( identifier[cur] )
identifier[original] . identifier[normalize] ( literal[string] , identifier[fusionMethod] = literal[string] )
keyword[return] identifier[itty] . identifier[Response] ( identifier[json] . identifier[dumps] ( identifier[original] , identifier[cls] = identifier[PycastEncoder] ), identifier[content_type] = literal[string] ) | def energy_data():
"""
Connects to the database and loads Readings for device 8.
"""
cur = db.cursor().execute('SELECT timestamp, current FROM Readings')
original = TimeSeries()
original.initialize_from_sql_cursor(cur)
original.normalize('day', fusionMethod='sum')
return itty.Response(json.dumps(original, cls=PycastEncoder), content_type='application/json') |
def error_catcher(self, extra_info: Optional[str] = None):
"""
Context manager to catch, print and record InstaloaderExceptions.
:param extra_info: String to prefix error message with."""
try:
yield
except InstaloaderException as err:
if extra_info:
self.error('{}: {}'.format(extra_info, err))
else:
self.error('{}'.format(err))
if self.raise_all_errors:
raise | def function[error_catcher, parameter[self, extra_info]]:
constant[
Context manager to catch, print and record InstaloaderExceptions.
:param extra_info: String to prefix error message with.]
<ast.Try object at 0x7da204565ab0> | keyword[def] identifier[error_catcher] ( identifier[self] , identifier[extra_info] : identifier[Optional] [ identifier[str] ]= keyword[None] ):
literal[string]
keyword[try] :
keyword[yield]
keyword[except] identifier[InstaloaderException] keyword[as] identifier[err] :
keyword[if] identifier[extra_info] :
identifier[self] . identifier[error] ( literal[string] . identifier[format] ( identifier[extra_info] , identifier[err] ))
keyword[else] :
identifier[self] . identifier[error] ( literal[string] . identifier[format] ( identifier[err] ))
keyword[if] identifier[self] . identifier[raise_all_errors] :
keyword[raise] | def error_catcher(self, extra_info: Optional[str]=None):
"""
Context manager to catch, print and record InstaloaderExceptions.
:param extra_info: String to prefix error message with."""
try:
yield # depends on [control=['try'], data=[]]
except InstaloaderException as err:
if extra_info:
self.error('{}: {}'.format(extra_info, err)) # depends on [control=['if'], data=[]]
else:
self.error('{}'.format(err))
if self.raise_all_errors:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['err']] |
def get_mean(self, grp=None):
"""
Compute the mean curve as a ProbabilityMap
:param grp:
if not None must be a string of the form "grp-XX"; in that case
returns the mean considering only the contribution for group XX
"""
self.init()
if len(self.weights) == 1: # one realization
# the standard deviation is zero
pmap = self.get(0, grp)
for sid, pcurve in pmap.items():
array = numpy.zeros(pcurve.array.shape[:-1] + (2,))
array[:, 0] = pcurve.array[:, 0]
pcurve.array = array
return pmap
else: # multiple realizations
dic = ({g: self.dstore['poes/' + g] for g in self.dstore['poes']}
if grp is None else {grp: self.dstore['poes/' + grp]})
pmaps = self.rlzs_assoc.combine_pmaps(dic)
return stats.compute_pmap_stats(
pmaps, [stats.mean_curve, stats.std_curve],
self.weights, self.imtls) | def function[get_mean, parameter[self, grp]]:
constant[
Compute the mean curve as a ProbabilityMap
:param grp:
if not None must be a string of the form "grp-XX"; in that case
returns the mean considering only the contribution for group XX
]
call[name[self].init, parameter[]]
if compare[call[name[len], parameter[name[self].weights]] equal[==] constant[1]] begin[:]
variable[pmap] assign[=] call[name[self].get, parameter[constant[0], name[grp]]]
for taget[tuple[[<ast.Name object at 0x7da18bcc9390>, <ast.Name object at 0x7da18bcc81f0>]]] in starred[call[name[pmap].items, parameter[]]] begin[:]
variable[array] assign[=] call[name[numpy].zeros, parameter[binary_operation[call[name[pcurve].array.shape][<ast.Slice object at 0x7da18bcc8d60>] + tuple[[<ast.Constant object at 0x7da18bccb970>]]]]]
call[name[array]][tuple[[<ast.Slice object at 0x7da18bcc8070>, <ast.Constant object at 0x7da18bccb2b0>]]] assign[=] call[name[pcurve].array][tuple[[<ast.Slice object at 0x7da18bcc9810>, <ast.Constant object at 0x7da18bcc8a90>]]]
name[pcurve].array assign[=] name[array]
return[name[pmap]] | keyword[def] identifier[get_mean] ( identifier[self] , identifier[grp] = keyword[None] ):
literal[string]
identifier[self] . identifier[init] ()
keyword[if] identifier[len] ( identifier[self] . identifier[weights] )== literal[int] :
identifier[pmap] = identifier[self] . identifier[get] ( literal[int] , identifier[grp] )
keyword[for] identifier[sid] , identifier[pcurve] keyword[in] identifier[pmap] . identifier[items] ():
identifier[array] = identifier[numpy] . identifier[zeros] ( identifier[pcurve] . identifier[array] . identifier[shape] [:- literal[int] ]+( literal[int] ,))
identifier[array] [:, literal[int] ]= identifier[pcurve] . identifier[array] [:, literal[int] ]
identifier[pcurve] . identifier[array] = identifier[array]
keyword[return] identifier[pmap]
keyword[else] :
identifier[dic] =({ identifier[g] : identifier[self] . identifier[dstore] [ literal[string] + identifier[g] ] keyword[for] identifier[g] keyword[in] identifier[self] . identifier[dstore] [ literal[string] ]}
keyword[if] identifier[grp] keyword[is] keyword[None] keyword[else] { identifier[grp] : identifier[self] . identifier[dstore] [ literal[string] + identifier[grp] ]})
identifier[pmaps] = identifier[self] . identifier[rlzs_assoc] . identifier[combine_pmaps] ( identifier[dic] )
keyword[return] identifier[stats] . identifier[compute_pmap_stats] (
identifier[pmaps] ,[ identifier[stats] . identifier[mean_curve] , identifier[stats] . identifier[std_curve] ],
identifier[self] . identifier[weights] , identifier[self] . identifier[imtls] ) | def get_mean(self, grp=None):
"""
Compute the mean curve as a ProbabilityMap
:param grp:
if not None must be a string of the form "grp-XX"; in that case
returns the mean considering only the contribution for group XX
"""
self.init()
if len(self.weights) == 1: # one realization
# the standard deviation is zero
pmap = self.get(0, grp)
for (sid, pcurve) in pmap.items():
array = numpy.zeros(pcurve.array.shape[:-1] + (2,))
array[:, 0] = pcurve.array[:, 0]
pcurve.array = array # depends on [control=['for'], data=[]]
return pmap # depends on [control=['if'], data=[]]
else: # multiple realizations
dic = {g: self.dstore['poes/' + g] for g in self.dstore['poes']} if grp is None else {grp: self.dstore['poes/' + grp]}
pmaps = self.rlzs_assoc.combine_pmaps(dic)
return stats.compute_pmap_stats(pmaps, [stats.mean_curve, stats.std_curve], self.weights, self.imtls) |
def modify(LowLayerCompatibility_presence=0,
HighLayerCompatibility_presence=0,
ReverseCallSetupDirection_presence=0):
"""MODIFY Section 9.3.13"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x17) # 00010111
c = BearerCapability()
packet = a / b / c
if LowLayerCompatibility_presence is 1:
d = LowLayerCompatibilityHdr(ieiLLC=0x7C, eightBitLLC=0x0)
packet = packet / d
if HighLayerCompatibility_presence is 1:
e = HighLayerCompatibilityHdr(ieiHLC=0x7D, eightBitHLC=0x0)
packet = packet / e
if ReverseCallSetupDirection_presence is 1:
f = ReverseCallSetupDirectionHdr(ieiRCSD=0xA3)
packet = packet / f
return packet | def function[modify, parameter[LowLayerCompatibility_presence, HighLayerCompatibility_presence, ReverseCallSetupDirection_presence]]:
constant[MODIFY Section 9.3.13]
variable[a] assign[=] call[name[TpPd], parameter[]]
variable[b] assign[=] call[name[MessageType], parameter[]]
variable[c] assign[=] call[name[BearerCapability], parameter[]]
variable[packet] assign[=] binary_operation[binary_operation[name[a] / name[b]] / name[c]]
if compare[name[LowLayerCompatibility_presence] is constant[1]] begin[:]
variable[d] assign[=] call[name[LowLayerCompatibilityHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[d]]
if compare[name[HighLayerCompatibility_presence] is constant[1]] begin[:]
variable[e] assign[=] call[name[HighLayerCompatibilityHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[e]]
if compare[name[ReverseCallSetupDirection_presence] is constant[1]] begin[:]
variable[f] assign[=] call[name[ReverseCallSetupDirectionHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[f]]
return[name[packet]] | keyword[def] identifier[modify] ( identifier[LowLayerCompatibility_presence] = literal[int] ,
identifier[HighLayerCompatibility_presence] = literal[int] ,
identifier[ReverseCallSetupDirection_presence] = literal[int] ):
literal[string]
identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] )
identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] )
identifier[c] = identifier[BearerCapability] ()
identifier[packet] = identifier[a] / identifier[b] / identifier[c]
keyword[if] identifier[LowLayerCompatibility_presence] keyword[is] literal[int] :
identifier[d] = identifier[LowLayerCompatibilityHdr] ( identifier[ieiLLC] = literal[int] , identifier[eightBitLLC] = literal[int] )
identifier[packet] = identifier[packet] / identifier[d]
keyword[if] identifier[HighLayerCompatibility_presence] keyword[is] literal[int] :
identifier[e] = identifier[HighLayerCompatibilityHdr] ( identifier[ieiHLC] = literal[int] , identifier[eightBitHLC] = literal[int] )
identifier[packet] = identifier[packet] / identifier[e]
keyword[if] identifier[ReverseCallSetupDirection_presence] keyword[is] literal[int] :
identifier[f] = identifier[ReverseCallSetupDirectionHdr] ( identifier[ieiRCSD] = literal[int] )
identifier[packet] = identifier[packet] / identifier[f]
keyword[return] identifier[packet] | def modify(LowLayerCompatibility_presence=0, HighLayerCompatibility_presence=0, ReverseCallSetupDirection_presence=0):
"""MODIFY Section 9.3.13"""
a = TpPd(pd=3)
b = MessageType(mesType=23) # 00010111
c = BearerCapability()
packet = a / b / c
if LowLayerCompatibility_presence is 1:
d = LowLayerCompatibilityHdr(ieiLLC=124, eightBitLLC=0)
packet = packet / d # depends on [control=['if'], data=[]]
if HighLayerCompatibility_presence is 1:
e = HighLayerCompatibilityHdr(ieiHLC=125, eightBitHLC=0)
packet = packet / e # depends on [control=['if'], data=[]]
if ReverseCallSetupDirection_presence is 1:
f = ReverseCallSetupDirectionHdr(ieiRCSD=163)
packet = packet / f # depends on [control=['if'], data=[]]
return packet |
def set_webconfiguration_settings(name, settings, location=''):
r'''
Set the value of the setting for an IIS container.
Args:
name (str): The PSPath of the IIS webconfiguration settings.
settings (list): A list of dictionaries containing setting name, filter and value.
location (str): The location of the settings (optional)
Returns:
bool: True if successful, otherwise False
CLI Example:
.. code-block:: bash
salt '*' win_iis.set_webconfiguration_settings name='IIS:\' settings="[{'name': 'enabled', 'filter': 'system.webServer/security/authentication/anonymousAuthentication', 'value': False}]"
'''
ps_cmd = []
if not settings:
log.warning('No settings provided')
return False
settings = _prepare_settings(name, settings)
# Treat all values as strings for the purpose of comparing them to existing values.
for idx, setting in enumerate(settings):
if setting['name'].split('.')[-1] != 'Collection':
settings[idx]['value'] = six.text_type(setting['value'])
current_settings = get_webconfiguration_settings(
name=name, settings=settings, location=location)
if settings == current_settings:
log.debug('Settings already contain the provided values.')
return True
for setting in settings:
# If the value is numeric, don't treat it as a string in PowerShell.
if setting['name'].split('.')[-1] != 'Collection':
try:
complex(setting['value'])
value = setting['value']
except ValueError:
value = "'{0}'".format(setting['value'])
else:
configelement_list = []
for value_item in setting['value']:
configelement_construct = []
for key, value in value_item.items():
configelement_construct.append("{0}='{1}'".format(key, value))
configelement_list.append('@{' + ';'.join(configelement_construct) + '}')
value = ','.join(configelement_list)
ps_cmd.extend(['Set-WebConfigurationProperty',
'-PSPath', "'{0}'".format(name),
'-Filter', "'{0}'".format(setting['filter']),
'-Name', "'{0}'".format(setting['name']),
'-Location', "'{0}'".format(location),
'-Value', '{0};'.format(value)])
cmd_ret = _srvmgr(ps_cmd)
if cmd_ret['retcode'] != 0:
msg = 'Unable to set settings for {0}'.format(name)
raise CommandExecutionError(msg)
# Get the fields post-change so that we can verify tht all values
# were modified successfully. Track the ones that weren't.
new_settings = get_webconfiguration_settings(
name=name, settings=settings, location=location)
failed_settings = []
for idx, setting in enumerate(settings):
is_collection = setting['name'].split('.')[-1] == 'Collection'
if ((not is_collection and six.text_type(setting['value']) != six.text_type(new_settings[idx]['value']))
or (is_collection and list(map(dict, setting['value'])) != list(map(dict, new_settings[idx]['value'])))):
failed_settings.append(setting)
if failed_settings:
log.error('Failed to change settings: %s', failed_settings)
return False
log.debug('Settings configured successfully: %s', settings)
return True | def function[set_webconfiguration_settings, parameter[name, settings, location]]:
constant[
Set the value of the setting for an IIS container.
Args:
name (str): The PSPath of the IIS webconfiguration settings.
settings (list): A list of dictionaries containing setting name, filter and value.
location (str): The location of the settings (optional)
Returns:
bool: True if successful, otherwise False
CLI Example:
.. code-block:: bash
salt '*' win_iis.set_webconfiguration_settings name='IIS:\' settings="[{'name': 'enabled', 'filter': 'system.webServer/security/authentication/anonymousAuthentication', 'value': False}]"
]
variable[ps_cmd] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da18f813fa0> begin[:]
call[name[log].warning, parameter[constant[No settings provided]]]
return[constant[False]]
variable[settings] assign[=] call[name[_prepare_settings], parameter[name[name], name[settings]]]
for taget[tuple[[<ast.Name object at 0x7da18f812890>, <ast.Name object at 0x7da18f811ea0>]]] in starred[call[name[enumerate], parameter[name[settings]]]] begin[:]
if compare[call[call[call[name[setting]][constant[name]].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da18f8128f0>] not_equal[!=] constant[Collection]] begin[:]
call[call[name[settings]][name[idx]]][constant[value]] assign[=] call[name[six].text_type, parameter[call[name[setting]][constant[value]]]]
variable[current_settings] assign[=] call[name[get_webconfiguration_settings], parameter[]]
if compare[name[settings] equal[==] name[current_settings]] begin[:]
call[name[log].debug, parameter[constant[Settings already contain the provided values.]]]
return[constant[True]]
for taget[name[setting]] in starred[name[settings]] begin[:]
if compare[call[call[call[name[setting]][constant[name]].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da18f810340>] not_equal[!=] constant[Collection]] begin[:]
<ast.Try object at 0x7da18f811180>
call[name[ps_cmd].extend, parameter[list[[<ast.Constant object at 0x7da18f813310>, <ast.Constant object at 0x7da18f813490>, <ast.Call object at 0x7da18f811900>, <ast.Constant object at 0x7da18f811870>, <ast.Call object at 0x7da18f811d50>, <ast.Constant object at 0x7da18f813c10>, <ast.Call object at 0x7da18f8139d0>, <ast.Constant object at 0x7da18f8112a0>, <ast.Call object at 0x7da18f813730>, <ast.Constant object at 0x7da18f813c70>, <ast.Call object at 0x7da18f813670>]]]]
variable[cmd_ret] assign[=] call[name[_srvmgr], parameter[name[ps_cmd]]]
if compare[call[name[cmd_ret]][constant[retcode]] not_equal[!=] constant[0]] begin[:]
variable[msg] assign[=] call[constant[Unable to set settings for {0}].format, parameter[name[name]]]
<ast.Raise object at 0x7da18f810760>
variable[new_settings] assign[=] call[name[get_webconfiguration_settings], parameter[]]
variable[failed_settings] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b26ac910>, <ast.Name object at 0x7da1b26af400>]]] in starred[call[name[enumerate], parameter[name[settings]]]] begin[:]
variable[is_collection] assign[=] compare[call[call[call[name[setting]][constant[name]].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da1b26af9a0>] equal[==] constant[Collection]]
if <ast.BoolOp object at 0x7da1b26adbd0> begin[:]
call[name[failed_settings].append, parameter[name[setting]]]
if name[failed_settings] begin[:]
call[name[log].error, parameter[constant[Failed to change settings: %s], name[failed_settings]]]
return[constant[False]]
call[name[log].debug, parameter[constant[Settings configured successfully: %s], name[settings]]]
return[constant[True]] | keyword[def] identifier[set_webconfiguration_settings] ( identifier[name] , identifier[settings] , identifier[location] = literal[string] ):
literal[string]
identifier[ps_cmd] =[]
keyword[if] keyword[not] identifier[settings] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[return] keyword[False]
identifier[settings] = identifier[_prepare_settings] ( identifier[name] , identifier[settings] )
keyword[for] identifier[idx] , identifier[setting] keyword[in] identifier[enumerate] ( identifier[settings] ):
keyword[if] identifier[setting] [ literal[string] ]. identifier[split] ( literal[string] )[- literal[int] ]!= literal[string] :
identifier[settings] [ identifier[idx] ][ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[setting] [ literal[string] ])
identifier[current_settings] = identifier[get_webconfiguration_settings] (
identifier[name] = identifier[name] , identifier[settings] = identifier[settings] , identifier[location] = identifier[location] )
keyword[if] identifier[settings] == identifier[current_settings] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[return] keyword[True]
keyword[for] identifier[setting] keyword[in] identifier[settings] :
keyword[if] identifier[setting] [ literal[string] ]. identifier[split] ( literal[string] )[- literal[int] ]!= literal[string] :
keyword[try] :
identifier[complex] ( identifier[setting] [ literal[string] ])
identifier[value] = identifier[setting] [ literal[string] ]
keyword[except] identifier[ValueError] :
identifier[value] = literal[string] . identifier[format] ( identifier[setting] [ literal[string] ])
keyword[else] :
identifier[configelement_list] =[]
keyword[for] identifier[value_item] keyword[in] identifier[setting] [ literal[string] ]:
identifier[configelement_construct] =[]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[value_item] . identifier[items] ():
identifier[configelement_construct] . identifier[append] ( literal[string] . identifier[format] ( identifier[key] , identifier[value] ))
identifier[configelement_list] . identifier[append] ( literal[string] + literal[string] . identifier[join] ( identifier[configelement_construct] )+ literal[string] )
identifier[value] = literal[string] . identifier[join] ( identifier[configelement_list] )
identifier[ps_cmd] . identifier[extend] ([ literal[string] ,
literal[string] , literal[string] . identifier[format] ( identifier[name] ),
literal[string] , literal[string] . identifier[format] ( identifier[setting] [ literal[string] ]),
literal[string] , literal[string] . identifier[format] ( identifier[setting] [ literal[string] ]),
literal[string] , literal[string] . identifier[format] ( identifier[location] ),
literal[string] , literal[string] . identifier[format] ( identifier[value] )])
identifier[cmd_ret] = identifier[_srvmgr] ( identifier[ps_cmd] )
keyword[if] identifier[cmd_ret] [ literal[string] ]!= literal[int] :
identifier[msg] = literal[string] . identifier[format] ( identifier[name] )
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
identifier[new_settings] = identifier[get_webconfiguration_settings] (
identifier[name] = identifier[name] , identifier[settings] = identifier[settings] , identifier[location] = identifier[location] )
identifier[failed_settings] =[]
keyword[for] identifier[idx] , identifier[setting] keyword[in] identifier[enumerate] ( identifier[settings] ):
identifier[is_collection] = identifier[setting] [ literal[string] ]. identifier[split] ( literal[string] )[- literal[int] ]== literal[string]
keyword[if] (( keyword[not] identifier[is_collection] keyword[and] identifier[six] . identifier[text_type] ( identifier[setting] [ literal[string] ])!= identifier[six] . identifier[text_type] ( identifier[new_settings] [ identifier[idx] ][ literal[string] ]))
keyword[or] ( identifier[is_collection] keyword[and] identifier[list] ( identifier[map] ( identifier[dict] , identifier[setting] [ literal[string] ]))!= identifier[list] ( identifier[map] ( identifier[dict] , identifier[new_settings] [ identifier[idx] ][ literal[string] ])))):
identifier[failed_settings] . identifier[append] ( identifier[setting] )
keyword[if] identifier[failed_settings] :
identifier[log] . identifier[error] ( literal[string] , identifier[failed_settings] )
keyword[return] keyword[False]
identifier[log] . identifier[debug] ( literal[string] , identifier[settings] )
keyword[return] keyword[True] | def set_webconfiguration_settings(name, settings, location=''):
"""
Set the value of the setting for an IIS container.
Args:
name (str): The PSPath of the IIS webconfiguration settings.
settings (list): A list of dictionaries containing setting name, filter and value.
location (str): The location of the settings (optional)
Returns:
bool: True if successful, otherwise False
CLI Example:
.. code-block:: bash
salt '*' win_iis.set_webconfiguration_settings name='IIS:\\' settings="[{'name': 'enabled', 'filter': 'system.webServer/security/authentication/anonymousAuthentication', 'value': False}]"
"""
ps_cmd = []
if not settings:
log.warning('No settings provided')
return False # depends on [control=['if'], data=[]]
settings = _prepare_settings(name, settings)
# Treat all values as strings for the purpose of comparing them to existing values.
for (idx, setting) in enumerate(settings):
if setting['name'].split('.')[-1] != 'Collection':
settings[idx]['value'] = six.text_type(setting['value']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
current_settings = get_webconfiguration_settings(name=name, settings=settings, location=location)
if settings == current_settings:
log.debug('Settings already contain the provided values.')
return True # depends on [control=['if'], data=[]]
for setting in settings:
# If the value is numeric, don't treat it as a string in PowerShell.
if setting['name'].split('.')[-1] != 'Collection':
try:
complex(setting['value'])
value = setting['value'] # depends on [control=['try'], data=[]]
except ValueError:
value = "'{0}'".format(setting['value']) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
configelement_list = []
for value_item in setting['value']:
configelement_construct = []
for (key, value) in value_item.items():
configelement_construct.append("{0}='{1}'".format(key, value)) # depends on [control=['for'], data=[]]
configelement_list.append('@{' + ';'.join(configelement_construct) + '}') # depends on [control=['for'], data=['value_item']]
value = ','.join(configelement_list)
ps_cmd.extend(['Set-WebConfigurationProperty', '-PSPath', "'{0}'".format(name), '-Filter', "'{0}'".format(setting['filter']), '-Name', "'{0}'".format(setting['name']), '-Location', "'{0}'".format(location), '-Value', '{0};'.format(value)]) # depends on [control=['for'], data=['setting']]
cmd_ret = _srvmgr(ps_cmd)
if cmd_ret['retcode'] != 0:
msg = 'Unable to set settings for {0}'.format(name)
raise CommandExecutionError(msg) # depends on [control=['if'], data=[]]
# Get the fields post-change so that we can verify tht all values
# were modified successfully. Track the ones that weren't.
new_settings = get_webconfiguration_settings(name=name, settings=settings, location=location)
failed_settings = []
for (idx, setting) in enumerate(settings):
is_collection = setting['name'].split('.')[-1] == 'Collection'
if not is_collection and six.text_type(setting['value']) != six.text_type(new_settings[idx]['value']) or (is_collection and list(map(dict, setting['value'])) != list(map(dict, new_settings[idx]['value']))):
failed_settings.append(setting) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if failed_settings:
log.error('Failed to change settings: %s', failed_settings)
return False # depends on [control=['if'], data=[]]
log.debug('Settings configured successfully: %s', settings)
return True |
def libvlc_audio_output_device_enum(mp):
'''Gets a list of potential audio output devices,
See L{libvlc_audio_output_device_set}().
@note: Not all audio outputs support enumerating devices.
The audio output may be functional even if the list is empty (NULL).
@note: The list may not be exhaustive.
@warning: Some audio output devices in the list might not actually work in
some circumstances. By default, it is recommended to not specify any
explicit audio device.
@param mp: media player.
@return: A NULL-terminated linked list of potential audio output devices. It must be freed it with L{libvlc_audio_output_device_list_release}().
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_output_device_enum', None) or \
_Cfunction('libvlc_audio_output_device_enum', ((1,),), None,
ctypes.POINTER(AudioOutputDevice), MediaPlayer)
return f(mp) | def function[libvlc_audio_output_device_enum, parameter[mp]]:
constant[Gets a list of potential audio output devices,
See L{libvlc_audio_output_device_set}().
@note: Not all audio outputs support enumerating devices.
The audio output may be functional even if the list is empty (NULL).
@note: The list may not be exhaustive.
@warning: Some audio output devices in the list might not actually work in
some circumstances. By default, it is recommended to not specify any
explicit audio device.
@param mp: media player.
@return: A NULL-terminated linked list of potential audio output devices. It must be freed it with L{libvlc_audio_output_device_list_release}().
@version: LibVLC 2.2.0 or later.
]
variable[f] assign[=] <ast.BoolOp object at 0x7da1b2345d20>
return[call[name[f], parameter[name[mp]]]] | keyword[def] identifier[libvlc_audio_output_device_enum] ( identifier[mp] ):
literal[string]
identifier[f] = identifier[_Cfunctions] . identifier[get] ( literal[string] , keyword[None] ) keyword[or] identifier[_Cfunction] ( literal[string] ,(( literal[int] ,),), keyword[None] ,
identifier[ctypes] . identifier[POINTER] ( identifier[AudioOutputDevice] ), identifier[MediaPlayer] )
keyword[return] identifier[f] ( identifier[mp] ) | def libvlc_audio_output_device_enum(mp):
"""Gets a list of potential audio output devices,
See L{libvlc_audio_output_device_set}().
@note: Not all audio outputs support enumerating devices.
The audio output may be functional even if the list is empty (NULL).
@note: The list may not be exhaustive.
@warning: Some audio output devices in the list might not actually work in
some circumstances. By default, it is recommended to not specify any
explicit audio device.
@param mp: media player.
@return: A NULL-terminated linked list of potential audio output devices. It must be freed it with L{libvlc_audio_output_device_list_release}().
@version: LibVLC 2.2.0 or later.
"""
f = _Cfunctions.get('libvlc_audio_output_device_enum', None) or _Cfunction('libvlc_audio_output_device_enum', ((1,),), None, ctypes.POINTER(AudioOutputDevice), MediaPlayer)
return f(mp) |
def t_NUMBER(self, t):
r'\d+\.?\d*'
if t.value.find(".") != -1:
t.value = float(t.value)
else:
t.value = int(t.value)
return t | def function[t_NUMBER, parameter[self, t]]:
constant[\d+\.?\d*]
if compare[call[name[t].value.find, parameter[constant[.]]] not_equal[!=] <ast.UnaryOp object at 0x7da1b0a64160>] begin[:]
name[t].value assign[=] call[name[float], parameter[name[t].value]]
return[name[t]] | keyword[def] identifier[t_NUMBER] ( identifier[self] , identifier[t] ):
literal[string]
keyword[if] identifier[t] . identifier[value] . identifier[find] ( literal[string] )!=- literal[int] :
identifier[t] . identifier[value] = identifier[float] ( identifier[t] . identifier[value] )
keyword[else] :
identifier[t] . identifier[value] = identifier[int] ( identifier[t] . identifier[value] )
keyword[return] identifier[t] | def t_NUMBER(self, t):
"""\\d+\\.?\\d*"""
if t.value.find('.') != -1:
t.value = float(t.value) # depends on [control=['if'], data=[]]
else:
t.value = int(t.value)
return t |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.