code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _parse_settings_vlan(opts, iface):
'''
Filters given options and outputs valid settings for a vlan
'''
vlan = {}
if 'reorder_hdr' in opts:
if opts['reorder_hdr'] in _CONFIG_TRUE + _CONFIG_FALSE:
vlan.update({'reorder_hdr': opts['reorder_hdr']})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'reorder_hdr', valid)
if 'vlan_id' in opts:
if opts['vlan_id'] > 0:
vlan.update({'vlan_id': opts['vlan_id']})
else:
_raise_error_iface(iface, 'vlan_id', 'Positive integer')
if 'phys_dev' in opts:
if opts['phys_dev']:
vlan.update({'phys_dev': opts['phys_dev']})
else:
_raise_error_iface(iface, 'phys_dev', 'Non-empty string')
return vlan | def function[_parse_settings_vlan, parameter[opts, iface]]:
constant[
Filters given options and outputs valid settings for a vlan
]
variable[vlan] assign[=] dictionary[[], []]
if compare[constant[reorder_hdr] in name[opts]] begin[:]
if compare[call[name[opts]][constant[reorder_hdr]] in binary_operation[name[_CONFIG_TRUE] + name[_CONFIG_FALSE]]] begin[:]
call[name[vlan].update, parameter[dictionary[[<ast.Constant object at 0x7da1b21aa500>], [<ast.Subscript object at 0x7da1b21aa050>]]]]
if compare[constant[vlan_id] in name[opts]] begin[:]
if compare[call[name[opts]][constant[vlan_id]] greater[>] constant[0]] begin[:]
call[name[vlan].update, parameter[dictionary[[<ast.Constant object at 0x7da1b21aa410>], [<ast.Subscript object at 0x7da1b21aa140>]]]]
if compare[constant[phys_dev] in name[opts]] begin[:]
if call[name[opts]][constant[phys_dev]] begin[:]
call[name[vlan].update, parameter[dictionary[[<ast.Constant object at 0x7da1b21a9c60>], [<ast.Subscript object at 0x7da1b21a84f0>]]]]
return[name[vlan]] | keyword[def] identifier[_parse_settings_vlan] ( identifier[opts] , identifier[iface] ):
literal[string]
identifier[vlan] ={}
keyword[if] literal[string] keyword[in] identifier[opts] :
keyword[if] identifier[opts] [ literal[string] ] keyword[in] identifier[_CONFIG_TRUE] + identifier[_CONFIG_FALSE] :
identifier[vlan] . identifier[update] ({ literal[string] : identifier[opts] [ literal[string] ]})
keyword[else] :
identifier[valid] = identifier[_CONFIG_TRUE] + identifier[_CONFIG_FALSE]
identifier[_raise_error_iface] ( identifier[iface] , literal[string] , identifier[valid] )
keyword[if] literal[string] keyword[in] identifier[opts] :
keyword[if] identifier[opts] [ literal[string] ]> literal[int] :
identifier[vlan] . identifier[update] ({ literal[string] : identifier[opts] [ literal[string] ]})
keyword[else] :
identifier[_raise_error_iface] ( identifier[iface] , literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[opts] :
keyword[if] identifier[opts] [ literal[string] ]:
identifier[vlan] . identifier[update] ({ literal[string] : identifier[opts] [ literal[string] ]})
keyword[else] :
identifier[_raise_error_iface] ( identifier[iface] , literal[string] , literal[string] )
keyword[return] identifier[vlan] | def _parse_settings_vlan(opts, iface):
"""
Filters given options and outputs valid settings for a vlan
"""
vlan = {}
if 'reorder_hdr' in opts:
if opts['reorder_hdr'] in _CONFIG_TRUE + _CONFIG_FALSE:
vlan.update({'reorder_hdr': opts['reorder_hdr']}) # depends on [control=['if'], data=[]]
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'reorder_hdr', valid) # depends on [control=['if'], data=['opts']]
if 'vlan_id' in opts:
if opts['vlan_id'] > 0:
vlan.update({'vlan_id': opts['vlan_id']}) # depends on [control=['if'], data=[]]
else:
_raise_error_iface(iface, 'vlan_id', 'Positive integer') # depends on [control=['if'], data=['opts']]
if 'phys_dev' in opts:
if opts['phys_dev']:
vlan.update({'phys_dev': opts['phys_dev']}) # depends on [control=['if'], data=[]]
else:
_raise_error_iface(iface, 'phys_dev', 'Non-empty string') # depends on [control=['if'], data=['opts']]
return vlan |
def find(pattern, path='.', exclude=None, recursive=True):
"""Find files that match *pattern* in *path*"""
import fnmatch
import os
if recursive:
for root, dirnames, filenames in os.walk(path):
for pat in _to_list(pattern):
for filename in fnmatch.filter(filenames, pat):
filepath = join(abspath(root), filename)
for excl in _to_list(exclude):
if excl and fnmatch.fnmatch(filepath, excl):
break
else:
yield filepath
else:
for pat in _to_list(pattern):
for filename in fnmatch.filter(list(path), pat):
filepath = join(abspath(path), filename)
for excl in _to_list(exclude):
if excl and fnmatch.fnmatch(filepath, excl):
break
else:
yield filepath | def function[find, parameter[pattern, path, exclude, recursive]]:
constant[Find files that match *pattern* in *path*]
import module[fnmatch]
import module[os]
if name[recursive] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0432f80>, <ast.Name object at 0x7da1b0432bc0>, <ast.Name object at 0x7da1b0432a10>]]] in starred[call[name[os].walk, parameter[name[path]]]] begin[:]
for taget[name[pat]] in starred[call[name[_to_list], parameter[name[pattern]]]] begin[:]
for taget[name[filename]] in starred[call[name[fnmatch].filter, parameter[name[filenames], name[pat]]]] begin[:]
variable[filepath] assign[=] call[name[join], parameter[call[name[abspath], parameter[name[root]]], name[filename]]]
for taget[name[excl]] in starred[call[name[_to_list], parameter[name[exclude]]]] begin[:]
if <ast.BoolOp object at 0x7da1b050be20> begin[:]
break | keyword[def] identifier[find] ( identifier[pattern] , identifier[path] = literal[string] , identifier[exclude] = keyword[None] , identifier[recursive] = keyword[True] ):
literal[string]
keyword[import] identifier[fnmatch]
keyword[import] identifier[os]
keyword[if] identifier[recursive] :
keyword[for] identifier[root] , identifier[dirnames] , identifier[filenames] keyword[in] identifier[os] . identifier[walk] ( identifier[path] ):
keyword[for] identifier[pat] keyword[in] identifier[_to_list] ( identifier[pattern] ):
keyword[for] identifier[filename] keyword[in] identifier[fnmatch] . identifier[filter] ( identifier[filenames] , identifier[pat] ):
identifier[filepath] = identifier[join] ( identifier[abspath] ( identifier[root] ), identifier[filename] )
keyword[for] identifier[excl] keyword[in] identifier[_to_list] ( identifier[exclude] ):
keyword[if] identifier[excl] keyword[and] identifier[fnmatch] . identifier[fnmatch] ( identifier[filepath] , identifier[excl] ):
keyword[break]
keyword[else] :
keyword[yield] identifier[filepath]
keyword[else] :
keyword[for] identifier[pat] keyword[in] identifier[_to_list] ( identifier[pattern] ):
keyword[for] identifier[filename] keyword[in] identifier[fnmatch] . identifier[filter] ( identifier[list] ( identifier[path] ), identifier[pat] ):
identifier[filepath] = identifier[join] ( identifier[abspath] ( identifier[path] ), identifier[filename] )
keyword[for] identifier[excl] keyword[in] identifier[_to_list] ( identifier[exclude] ):
keyword[if] identifier[excl] keyword[and] identifier[fnmatch] . identifier[fnmatch] ( identifier[filepath] , identifier[excl] ):
keyword[break]
keyword[else] :
keyword[yield] identifier[filepath] | def find(pattern, path='.', exclude=None, recursive=True):
"""Find files that match *pattern* in *path*"""
import fnmatch
import os
if recursive:
for (root, dirnames, filenames) in os.walk(path):
for pat in _to_list(pattern):
for filename in fnmatch.filter(filenames, pat):
filepath = join(abspath(root), filename)
for excl in _to_list(exclude):
if excl and fnmatch.fnmatch(filepath, excl):
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['excl']]
else:
yield filepath # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=['pat']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
for pat in _to_list(pattern):
for filename in fnmatch.filter(list(path), pat):
filepath = join(abspath(path), filename)
for excl in _to_list(exclude):
if excl and fnmatch.fnmatch(filepath, excl):
break # depends on [control=['if'], data=[]]
else:
yield filepath # depends on [control=['for'], data=['excl']] # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=['pat']] |
def get_current_platform():
"""
Look in /sys/class/board-info/ to determine the platform type.
This can return 'ev3', 'evb', 'pistorms', 'brickpi', 'brickpi3' or 'fake'.
"""
board_info_dir = '/sys/class/board-info/'
if not os.path.exists(board_info_dir) or os.environ.get("FAKE_SYS"):
return 'fake'
for board in os.listdir(board_info_dir):
uevent_filename = os.path.join(board_info_dir, board, 'uevent')
if os.path.exists(uevent_filename):
with open(uevent_filename, 'r') as fh:
for line in fh.readlines():
(key, value) = line.strip().split('=')
if key == 'BOARD_INFO_MODEL':
if value == 'LEGO MINDSTORMS EV3':
return 'ev3'
elif value in ('FatcatLab EVB', 'QuestCape'):
return 'evb'
elif value == 'PiStorms':
return 'pistorms'
# This is the same for both BrickPi and BrickPi+.
# There is not a way to tell the difference.
elif value == 'Dexter Industries BrickPi':
return 'brickpi'
elif value == 'Dexter Industries BrickPi3':
return 'brickpi3'
elif value == 'FAKE-SYS':
return 'fake'
return None | def function[get_current_platform, parameter[]]:
constant[
Look in /sys/class/board-info/ to determine the platform type.
This can return 'ev3', 'evb', 'pistorms', 'brickpi', 'brickpi3' or 'fake'.
]
variable[board_info_dir] assign[=] constant[/sys/class/board-info/]
if <ast.BoolOp object at 0x7da18dc98250> begin[:]
return[constant[fake]]
for taget[name[board]] in starred[call[name[os].listdir, parameter[name[board_info_dir]]]] begin[:]
variable[uevent_filename] assign[=] call[name[os].path.join, parameter[name[board_info_dir], name[board], constant[uevent]]]
if call[name[os].path.exists, parameter[name[uevent_filename]]] begin[:]
with call[name[open], parameter[name[uevent_filename], constant[r]]] begin[:]
for taget[name[line]] in starred[call[name[fh].readlines, parameter[]]] begin[:]
<ast.Tuple object at 0x7da1b17ed6f0> assign[=] call[call[name[line].strip, parameter[]].split, parameter[constant[=]]]
if compare[name[key] equal[==] constant[BOARD_INFO_MODEL]] begin[:]
if compare[name[value] equal[==] constant[LEGO MINDSTORMS EV3]] begin[:]
return[constant[ev3]]
return[constant[None]] | keyword[def] identifier[get_current_platform] ():
literal[string]
identifier[board_info_dir] = literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[board_info_dir] ) keyword[or] identifier[os] . identifier[environ] . identifier[get] ( literal[string] ):
keyword[return] literal[string]
keyword[for] identifier[board] keyword[in] identifier[os] . identifier[listdir] ( identifier[board_info_dir] ):
identifier[uevent_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[board_info_dir] , identifier[board] , literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[uevent_filename] ):
keyword[with] identifier[open] ( identifier[uevent_filename] , literal[string] ) keyword[as] identifier[fh] :
keyword[for] identifier[line] keyword[in] identifier[fh] . identifier[readlines] ():
( identifier[key] , identifier[value] )= identifier[line] . identifier[strip] (). identifier[split] ( literal[string] )
keyword[if] identifier[key] == literal[string] :
keyword[if] identifier[value] == literal[string] :
keyword[return] literal[string]
keyword[elif] identifier[value] keyword[in] ( literal[string] , literal[string] ):
keyword[return] literal[string]
keyword[elif] identifier[value] == literal[string] :
keyword[return] literal[string]
keyword[elif] identifier[value] == literal[string] :
keyword[return] literal[string]
keyword[elif] identifier[value] == literal[string] :
keyword[return] literal[string]
keyword[elif] identifier[value] == literal[string] :
keyword[return] literal[string]
keyword[return] keyword[None] | def get_current_platform():
"""
Look in /sys/class/board-info/ to determine the platform type.
This can return 'ev3', 'evb', 'pistorms', 'brickpi', 'brickpi3' or 'fake'.
"""
board_info_dir = '/sys/class/board-info/'
if not os.path.exists(board_info_dir) or os.environ.get('FAKE_SYS'):
return 'fake' # depends on [control=['if'], data=[]]
for board in os.listdir(board_info_dir):
uevent_filename = os.path.join(board_info_dir, board, 'uevent')
if os.path.exists(uevent_filename):
with open(uevent_filename, 'r') as fh:
for line in fh.readlines():
(key, value) = line.strip().split('=')
if key == 'BOARD_INFO_MODEL':
if value == 'LEGO MINDSTORMS EV3':
return 'ev3' # depends on [control=['if'], data=[]]
elif value in ('FatcatLab EVB', 'QuestCape'):
return 'evb' # depends on [control=['if'], data=[]]
elif value == 'PiStorms':
return 'pistorms' # depends on [control=['if'], data=[]]
# This is the same for both BrickPi and BrickPi+.
# There is not a way to tell the difference.
elif value == 'Dexter Industries BrickPi':
return 'brickpi' # depends on [control=['if'], data=[]]
elif value == 'Dexter Industries BrickPi3':
return 'brickpi3' # depends on [control=['if'], data=[]]
elif value == 'FAKE-SYS':
return 'fake' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['fh']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['board']]
return None |
def make_utool_json_encoder(allow_pickle=False):
"""
References:
http://stackoverflow.com/questions/8230315/python-sets-are
http://stackoverflow.com/questions/11561932/why-does-json
https://github.com/jsonpickle/jsonpickle
http://stackoverflow.com/questions/24369666/typeerror-b1
http://stackoverflow.com/questions/30469575/how-to-pickle
"""
import utool as ut
PYOBJECT_TAG = '__PYTHON_OBJECT__'
UUID_TAG = '__UUID__'
SLICE_TAG = '__SLICE__'
def decode_pickle(text):
obj = pickle.loads(codecs.decode(text.encode(), 'base64'))
return obj
def encode_pickle(obj):
try:
# Use protocol 2 to support both python2.7 and python3
COMPATIBLE_PROTOCOL = 2
pickle_bytes = pickle.dumps(obj, protocol=COMPATIBLE_PROTOCOL)
except Exception:
raise
text = codecs.encode(pickle_bytes, 'base64').decode()
return text
type_to_tag = collections.OrderedDict([
(slice, SLICE_TAG),
(uuid.UUID, UUID_TAG),
(object, PYOBJECT_TAG),
])
tag_to_type = {tag: type_ for type_, tag in type_to_tag.items()}
def slice_part(c):
return '' if c is None else str(c)
def encode_slice(s):
parts = [slice_part(s.start), slice_part(s.stop), slice_part(s.step)]
return ':'.join(parts)
def decode_slice(x):
return ut.smart_cast(x, slice)
encoders = {
UUID_TAG: str,
SLICE_TAG: encode_slice,
PYOBJECT_TAG: encode_pickle,
}
decoders = {
UUID_TAG: uuid.UUID,
SLICE_TAG: decode_slice,
PYOBJECT_TAG: decode_pickle,
}
if not allow_pickle:
del encoders[PYOBJECT_TAG]
del decoders[PYOBJECT_TAG]
type_ = tag_to_type[PYOBJECT_TAG]
del tag_to_type[PYOBJECT_TAG]
del type_to_tag[type_]
class UtoolJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, util_type.NUMPY_TYPE_TUPLE):
return obj.tolist()
elif six.PY3 and isinstance(obj, bytes):
return obj.decode('utf-8')
elif isinstance(obj, (set, frozenset)):
return list(obj)
# return json.JSONEncoder.default(self, list(obj))
# return [json.JSONEncoder.default(o) for o in obj]
elif isinstance(obj, util_type.PRIMATIVE_TYPES):
return json.JSONEncoder.default(self, obj)
elif hasattr(obj, '__getstate__'):
return obj.__getstate__()
else:
for type_, tag in type_to_tag.items():
if isinstance(obj, type_):
#print('----')
#print('encoder obj = %r' % (obj,))
#print('encoder type_ = %r' % (type_,))
func = encoders[tag]
text = func(obj)
return {tag: text}
raise TypeError('Invalid serialization type=%r' % (type(obj)))
@classmethod
def _json_object_hook(cls, value, verbose=False, **kwargs):
if len(value) == 1:
tag, text = list(value.items())[0]
if tag in decoders:
#print('----')
#print('decoder tag = %r' % (tag,))
func = decoders[tag]
obj = func(text)
#print('decoder obj = %r' % (obj,))
return obj
else:
return value
return value
return UtoolJSONEncoder | def function[make_utool_json_encoder, parameter[allow_pickle]]:
constant[
References:
http://stackoverflow.com/questions/8230315/python-sets-are
http://stackoverflow.com/questions/11561932/why-does-json
https://github.com/jsonpickle/jsonpickle
http://stackoverflow.com/questions/24369666/typeerror-b1
http://stackoverflow.com/questions/30469575/how-to-pickle
]
import module[utool] as alias[ut]
variable[PYOBJECT_TAG] assign[=] constant[__PYTHON_OBJECT__]
variable[UUID_TAG] assign[=] constant[__UUID__]
variable[SLICE_TAG] assign[=] constant[__SLICE__]
def function[decode_pickle, parameter[text]]:
variable[obj] assign[=] call[name[pickle].loads, parameter[call[name[codecs].decode, parameter[call[name[text].encode, parameter[]], constant[base64]]]]]
return[name[obj]]
def function[encode_pickle, parameter[obj]]:
<ast.Try object at 0x7da1b245e050>
variable[text] assign[=] call[call[name[codecs].encode, parameter[name[pickle_bytes], constant[base64]]].decode, parameter[]]
return[name[text]]
variable[type_to_tag] assign[=] call[name[collections].OrderedDict, parameter[list[[<ast.Tuple object at 0x7da1b245e170>, <ast.Tuple object at 0x7da1b245cdc0>, <ast.Tuple object at 0x7da1b245e350>]]]]
variable[tag_to_type] assign[=] <ast.DictComp object at 0x7da1b245c310>
def function[slice_part, parameter[c]]:
return[<ast.IfExp object at 0x7da1b245d690>]
def function[encode_slice, parameter[s]]:
variable[parts] assign[=] list[[<ast.Call object at 0x7da1b245ed40>, <ast.Call object at 0x7da1b245c490>, <ast.Call object at 0x7da1b245c3a0>]]
return[call[constant[:].join, parameter[name[parts]]]]
def function[decode_slice, parameter[x]]:
return[call[name[ut].smart_cast, parameter[name[x], name[slice]]]]
variable[encoders] assign[=] dictionary[[<ast.Name object at 0x7da1b245fd60>, <ast.Name object at 0x7da1b245c220>, <ast.Name object at 0x7da1b245e0e0>], [<ast.Name object at 0x7da1b245c1f0>, <ast.Name object at 0x7da1b245e0b0>, <ast.Name object at 0x7da1b245d150>]]
variable[decoders] assign[=] dictionary[[<ast.Name object at 0x7da1b245f010>, <ast.Name object at 0x7da1b245e770>, <ast.Name object at 0x7da1b245c370>], [<ast.Attribute object at 0x7da1b245e6e0>, <ast.Name object at 0x7da1b245dff0>, <ast.Name object at 0x7da1b245e410>]]
if <ast.UnaryOp object at 0x7da1b245cd30> begin[:]
<ast.Delete object at 0x7da1b245fa00>
<ast.Delete object at 0x7da1b245f6d0>
variable[type_] assign[=] call[name[tag_to_type]][name[PYOBJECT_TAG]]
<ast.Delete object at 0x7da1b245e4d0>
<ast.Delete object at 0x7da1b245d210>
class class[UtoolJSONEncoder, parameter[]] begin[:]
def function[default, parameter[self, obj]]:
if call[name[isinstance], parameter[name[obj], name[util_type].NUMPY_TYPE_TUPLE]] begin[:]
return[call[name[obj].tolist, parameter[]]]
def function[_json_object_hook, parameter[cls, value, verbose]]:
if compare[call[name[len], parameter[name[value]]] equal[==] constant[1]] begin[:]
<ast.Tuple object at 0x7da18f811270> assign[=] call[call[name[list], parameter[call[name[value].items, parameter[]]]]][constant[0]]
if compare[name[tag] in name[decoders]] begin[:]
variable[func] assign[=] call[name[decoders]][name[tag]]
variable[obj] assign[=] call[name[func], parameter[name[text]]]
return[name[obj]]
return[name[value]]
return[name[UtoolJSONEncoder]] | keyword[def] identifier[make_utool_json_encoder] ( identifier[allow_pickle] = keyword[False] ):
literal[string]
keyword[import] identifier[utool] keyword[as] identifier[ut]
identifier[PYOBJECT_TAG] = literal[string]
identifier[UUID_TAG] = literal[string]
identifier[SLICE_TAG] = literal[string]
keyword[def] identifier[decode_pickle] ( identifier[text] ):
identifier[obj] = identifier[pickle] . identifier[loads] ( identifier[codecs] . identifier[decode] ( identifier[text] . identifier[encode] (), literal[string] ))
keyword[return] identifier[obj]
keyword[def] identifier[encode_pickle] ( identifier[obj] ):
keyword[try] :
identifier[COMPATIBLE_PROTOCOL] = literal[int]
identifier[pickle_bytes] = identifier[pickle] . identifier[dumps] ( identifier[obj] , identifier[protocol] = identifier[COMPATIBLE_PROTOCOL] )
keyword[except] identifier[Exception] :
keyword[raise]
identifier[text] = identifier[codecs] . identifier[encode] ( identifier[pickle_bytes] , literal[string] ). identifier[decode] ()
keyword[return] identifier[text]
identifier[type_to_tag] = identifier[collections] . identifier[OrderedDict] ([
( identifier[slice] , identifier[SLICE_TAG] ),
( identifier[uuid] . identifier[UUID] , identifier[UUID_TAG] ),
( identifier[object] , identifier[PYOBJECT_TAG] ),
])
identifier[tag_to_type] ={ identifier[tag] : identifier[type_] keyword[for] identifier[type_] , identifier[tag] keyword[in] identifier[type_to_tag] . identifier[items] ()}
keyword[def] identifier[slice_part] ( identifier[c] ):
keyword[return] literal[string] keyword[if] identifier[c] keyword[is] keyword[None] keyword[else] identifier[str] ( identifier[c] )
keyword[def] identifier[encode_slice] ( identifier[s] ):
identifier[parts] =[ identifier[slice_part] ( identifier[s] . identifier[start] ), identifier[slice_part] ( identifier[s] . identifier[stop] ), identifier[slice_part] ( identifier[s] . identifier[step] )]
keyword[return] literal[string] . identifier[join] ( identifier[parts] )
keyword[def] identifier[decode_slice] ( identifier[x] ):
keyword[return] identifier[ut] . identifier[smart_cast] ( identifier[x] , identifier[slice] )
identifier[encoders] ={
identifier[UUID_TAG] : identifier[str] ,
identifier[SLICE_TAG] : identifier[encode_slice] ,
identifier[PYOBJECT_TAG] : identifier[encode_pickle] ,
}
identifier[decoders] ={
identifier[UUID_TAG] : identifier[uuid] . identifier[UUID] ,
identifier[SLICE_TAG] : identifier[decode_slice] ,
identifier[PYOBJECT_TAG] : identifier[decode_pickle] ,
}
keyword[if] keyword[not] identifier[allow_pickle] :
keyword[del] identifier[encoders] [ identifier[PYOBJECT_TAG] ]
keyword[del] identifier[decoders] [ identifier[PYOBJECT_TAG] ]
identifier[type_] = identifier[tag_to_type] [ identifier[PYOBJECT_TAG] ]
keyword[del] identifier[tag_to_type] [ identifier[PYOBJECT_TAG] ]
keyword[del] identifier[type_to_tag] [ identifier[type_] ]
keyword[class] identifier[UtoolJSONEncoder] ( identifier[json] . identifier[JSONEncoder] ):
keyword[def] identifier[default] ( identifier[self] , identifier[obj] ):
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[util_type] . identifier[NUMPY_TYPE_TUPLE] ):
keyword[return] identifier[obj] . identifier[tolist] ()
keyword[elif] identifier[six] . identifier[PY3] keyword[and] identifier[isinstance] ( identifier[obj] , identifier[bytes] ):
keyword[return] identifier[obj] . identifier[decode] ( literal[string] )
keyword[elif] identifier[isinstance] ( identifier[obj] ,( identifier[set] , identifier[frozenset] )):
keyword[return] identifier[list] ( identifier[obj] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[util_type] . identifier[PRIMATIVE_TYPES] ):
keyword[return] identifier[json] . identifier[JSONEncoder] . identifier[default] ( identifier[self] , identifier[obj] )
keyword[elif] identifier[hasattr] ( identifier[obj] , literal[string] ):
keyword[return] identifier[obj] . identifier[__getstate__] ()
keyword[else] :
keyword[for] identifier[type_] , identifier[tag] keyword[in] identifier[type_to_tag] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[type_] ):
identifier[func] = identifier[encoders] [ identifier[tag] ]
identifier[text] = identifier[func] ( identifier[obj] )
keyword[return] { identifier[tag] : identifier[text] }
keyword[raise] identifier[TypeError] ( literal[string] %( identifier[type] ( identifier[obj] )))
@ identifier[classmethod]
keyword[def] identifier[_json_object_hook] ( identifier[cls] , identifier[value] , identifier[verbose] = keyword[False] ,** identifier[kwargs] ):
keyword[if] identifier[len] ( identifier[value] )== literal[int] :
identifier[tag] , identifier[text] = identifier[list] ( identifier[value] . identifier[items] ())[ literal[int] ]
keyword[if] identifier[tag] keyword[in] identifier[decoders] :
identifier[func] = identifier[decoders] [ identifier[tag] ]
identifier[obj] = identifier[func] ( identifier[text] )
keyword[return] identifier[obj]
keyword[else] :
keyword[return] identifier[value]
keyword[return] identifier[value]
keyword[return] identifier[UtoolJSONEncoder] | def make_utool_json_encoder(allow_pickle=False):
"""
References:
http://stackoverflow.com/questions/8230315/python-sets-are
http://stackoverflow.com/questions/11561932/why-does-json
https://github.com/jsonpickle/jsonpickle
http://stackoverflow.com/questions/24369666/typeerror-b1
http://stackoverflow.com/questions/30469575/how-to-pickle
"""
import utool as ut
PYOBJECT_TAG = '__PYTHON_OBJECT__'
UUID_TAG = '__UUID__'
SLICE_TAG = '__SLICE__'
def decode_pickle(text):
obj = pickle.loads(codecs.decode(text.encode(), 'base64'))
return obj
def encode_pickle(obj):
try:
# Use protocol 2 to support both python2.7 and python3
COMPATIBLE_PROTOCOL = 2
pickle_bytes = pickle.dumps(obj, protocol=COMPATIBLE_PROTOCOL) # depends on [control=['try'], data=[]]
except Exception:
raise # depends on [control=['except'], data=[]]
text = codecs.encode(pickle_bytes, 'base64').decode()
return text
type_to_tag = collections.OrderedDict([(slice, SLICE_TAG), (uuid.UUID, UUID_TAG), (object, PYOBJECT_TAG)])
tag_to_type = {tag: type_ for (type_, tag) in type_to_tag.items()}
def slice_part(c):
return '' if c is None else str(c)
def encode_slice(s):
parts = [slice_part(s.start), slice_part(s.stop), slice_part(s.step)]
return ':'.join(parts)
def decode_slice(x):
return ut.smart_cast(x, slice)
encoders = {UUID_TAG: str, SLICE_TAG: encode_slice, PYOBJECT_TAG: encode_pickle}
decoders = {UUID_TAG: uuid.UUID, SLICE_TAG: decode_slice, PYOBJECT_TAG: decode_pickle}
if not allow_pickle:
del encoders[PYOBJECT_TAG]
del decoders[PYOBJECT_TAG]
type_ = tag_to_type[PYOBJECT_TAG]
del tag_to_type[PYOBJECT_TAG]
del type_to_tag[type_] # depends on [control=['if'], data=[]]
class UtoolJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, util_type.NUMPY_TYPE_TUPLE):
return obj.tolist() # depends on [control=['if'], data=[]]
elif six.PY3 and isinstance(obj, bytes):
return obj.decode('utf-8') # depends on [control=['if'], data=[]]
elif isinstance(obj, (set, frozenset)):
return list(obj) # depends on [control=['if'], data=[]]
# return json.JSONEncoder.default(self, list(obj))
# return [json.JSONEncoder.default(o) for o in obj]
elif isinstance(obj, util_type.PRIMATIVE_TYPES):
return json.JSONEncoder.default(self, obj) # depends on [control=['if'], data=[]]
elif hasattr(obj, '__getstate__'):
return obj.__getstate__() # depends on [control=['if'], data=[]]
else:
for (type_, tag) in type_to_tag.items():
if isinstance(obj, type_):
#print('----')
#print('encoder obj = %r' % (obj,))
#print('encoder type_ = %r' % (type_,))
func = encoders[tag]
text = func(obj)
return {tag: text} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise TypeError('Invalid serialization type=%r' % type(obj))
@classmethod
def _json_object_hook(cls, value, verbose=False, **kwargs):
if len(value) == 1:
(tag, text) = list(value.items())[0]
if tag in decoders:
#print('----')
#print('decoder tag = %r' % (tag,))
func = decoders[tag]
obj = func(text)
#print('decoder obj = %r' % (obj,))
return obj # depends on [control=['if'], data=['tag', 'decoders']] # depends on [control=['if'], data=[]]
else:
return value
return value
return UtoolJSONEncoder |
def run_mainloop_with(self, target):
"""Start the OS's main loop to process asyncronous BLE events and then
run the specified target function in a background thread. Target
function should be a function that takes no parameters and optionally
return an integer response code. When the target function stops
executing or returns with value then the main loop will be stopped and
the program will exit with the returned code.
Note that an OS main loop is required to process asyncronous BLE events
and this function is provided as a convenience for writing simple tools
and scripts that don't need to be full-blown GUI applications. If you
are writing a GUI application that has a main loop (a GTK glib main loop
on Linux, or a Cocoa main loop on OSX) then you don't need to call this
function.
"""
# Spin up a background thread to run the target code.
self._user_thread = threading.Thread(target=self._user_thread_main, args=(target,))
self._user_thread.daemon = True # Don't let the user thread block exit.
self._user_thread.start()
# Spin up a GLib main loop in the main thread to process async BLE events.
self._gobject_mainloop = GObject.MainLoop()
try:
self._gobject_mainloop.run() # Doesn't return until the mainloop ends.
except KeyboardInterrupt:
self._gobject_mainloop.quit()
sys.exit(0)
# Main loop finished. Check if an exception occured and throw it,
# otherwise return the status code from the user code.
if self._exception is not None:
# Rethrow exception with its original stack trace following advice from:
# http://nedbatchelder.com/blog/200711/rethrowing_exceptions_in_python.html
raise_(self._exception[1], None, self._exception[2])
else:
sys.exit(self._return_code) | def function[run_mainloop_with, parameter[self, target]]:
constant[Start the OS's main loop to process asyncronous BLE events and then
run the specified target function in a background thread. Target
function should be a function that takes no parameters and optionally
return an integer response code. When the target function stops
executing or returns with value then the main loop will be stopped and
the program will exit with the returned code.
Note that an OS main loop is required to process asyncronous BLE events
and this function is provided as a convenience for writing simple tools
and scripts that don't need to be full-blown GUI applications. If you
are writing a GUI application that has a main loop (a GTK glib main loop
on Linux, or a Cocoa main loop on OSX) then you don't need to call this
function.
]
name[self]._user_thread assign[=] call[name[threading].Thread, parameter[]]
name[self]._user_thread.daemon assign[=] constant[True]
call[name[self]._user_thread.start, parameter[]]
name[self]._gobject_mainloop assign[=] call[name[GObject].MainLoop, parameter[]]
<ast.Try object at 0x7da20c6e4be0>
if compare[name[self]._exception is_not constant[None]] begin[:]
call[name[raise_], parameter[call[name[self]._exception][constant[1]], constant[None], call[name[self]._exception][constant[2]]]] | keyword[def] identifier[run_mainloop_with] ( identifier[self] , identifier[target] ):
literal[string]
identifier[self] . identifier[_user_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[_user_thread_main] , identifier[args] =( identifier[target] ,))
identifier[self] . identifier[_user_thread] . identifier[daemon] = keyword[True]
identifier[self] . identifier[_user_thread] . identifier[start] ()
identifier[self] . identifier[_gobject_mainloop] = identifier[GObject] . identifier[MainLoop] ()
keyword[try] :
identifier[self] . identifier[_gobject_mainloop] . identifier[run] ()
keyword[except] identifier[KeyboardInterrupt] :
identifier[self] . identifier[_gobject_mainloop] . identifier[quit] ()
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] identifier[self] . identifier[_exception] keyword[is] keyword[not] keyword[None] :
identifier[raise_] ( identifier[self] . identifier[_exception] [ literal[int] ], keyword[None] , identifier[self] . identifier[_exception] [ literal[int] ])
keyword[else] :
identifier[sys] . identifier[exit] ( identifier[self] . identifier[_return_code] ) | def run_mainloop_with(self, target):
"""Start the OS's main loop to process asyncronous BLE events and then
run the specified target function in a background thread. Target
function should be a function that takes no parameters and optionally
return an integer response code. When the target function stops
executing or returns with value then the main loop will be stopped and
the program will exit with the returned code.
Note that an OS main loop is required to process asyncronous BLE events
and this function is provided as a convenience for writing simple tools
and scripts that don't need to be full-blown GUI applications. If you
are writing a GUI application that has a main loop (a GTK glib main loop
on Linux, or a Cocoa main loop on OSX) then you don't need to call this
function.
"""
# Spin up a background thread to run the target code.
self._user_thread = threading.Thread(target=self._user_thread_main, args=(target,))
self._user_thread.daemon = True # Don't let the user thread block exit.
self._user_thread.start()
# Spin up a GLib main loop in the main thread to process async BLE events.
self._gobject_mainloop = GObject.MainLoop()
try:
self._gobject_mainloop.run() # Doesn't return until the mainloop ends. # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
self._gobject_mainloop.quit()
sys.exit(0) # depends on [control=['except'], data=[]]
# Main loop finished. Check if an exception occured and throw it,
# otherwise return the status code from the user code.
if self._exception is not None:
# Rethrow exception with its original stack trace following advice from:
# http://nedbatchelder.com/blog/200711/rethrowing_exceptions_in_python.html
raise_(self._exception[1], None, self._exception[2]) # depends on [control=['if'], data=[]]
else:
sys.exit(self._return_code) |
def remove_from_master_node(self, wait_for_finish=False, timeout=20, **kw):
"""
Remove this VSS Context from it's parent VSS Container.
This is required before calling VSSContext.delete(). It preps
the engine for removal.
:param bool wait_for_finish: wait for the task to finish
:param int timeout: how long to wait if delay
:type: TaskOperationPoller
"""
return Task.execute(self, 'remove_from_master_node',
timeout=timeout, wait_for_finish=wait_for_finish, **kw) | def function[remove_from_master_node, parameter[self, wait_for_finish, timeout]]:
constant[
Remove this VSS Context from it's parent VSS Container.
This is required before calling VSSContext.delete(). It preps
the engine for removal.
:param bool wait_for_finish: wait for the task to finish
:param int timeout: how long to wait if delay
:type: TaskOperationPoller
]
return[call[name[Task].execute, parameter[name[self], constant[remove_from_master_node]]]] | keyword[def] identifier[remove_from_master_node] ( identifier[self] , identifier[wait_for_finish] = keyword[False] , identifier[timeout] = literal[int] ,** identifier[kw] ):
literal[string]
keyword[return] identifier[Task] . identifier[execute] ( identifier[self] , literal[string] ,
identifier[timeout] = identifier[timeout] , identifier[wait_for_finish] = identifier[wait_for_finish] ,** identifier[kw] ) | def remove_from_master_node(self, wait_for_finish=False, timeout=20, **kw):
"""
Remove this VSS Context from it's parent VSS Container.
This is required before calling VSSContext.delete(). It preps
the engine for removal.
:param bool wait_for_finish: wait for the task to finish
:param int timeout: how long to wait if delay
:type: TaskOperationPoller
"""
return Task.execute(self, 'remove_from_master_node', timeout=timeout, wait_for_finish=wait_for_finish, **kw) |
def venv():
"""Install venv + deps."""
try:
import virtualenv # NOQA
except ImportError:
sh("%s -m pip install virtualenv" % PYTHON)
if not os.path.isdir("venv"):
sh("%s -m virtualenv venv" % PYTHON)
sh("venv\\Scripts\\pip install -r %s" % (REQUIREMENTS_TXT)) | def function[venv, parameter[]]:
constant[Install venv + deps.]
<ast.Try object at 0x7da18f09da50>
if <ast.UnaryOp object at 0x7da2041d86a0> begin[:]
call[name[sh], parameter[binary_operation[constant[%s -m virtualenv venv] <ast.Mod object at 0x7da2590d6920> name[PYTHON]]]]
call[name[sh], parameter[binary_operation[constant[venv\Scripts\pip install -r %s] <ast.Mod object at 0x7da2590d6920> name[REQUIREMENTS_TXT]]]] | keyword[def] identifier[venv] ():
literal[string]
keyword[try] :
keyword[import] identifier[virtualenv]
keyword[except] identifier[ImportError] :
identifier[sh] ( literal[string] % identifier[PYTHON] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( literal[string] ):
identifier[sh] ( literal[string] % identifier[PYTHON] )
identifier[sh] ( literal[string] %( identifier[REQUIREMENTS_TXT] )) | def venv():
"""Install venv + deps."""
try:
import virtualenv # NOQA # depends on [control=['try'], data=[]]
except ImportError:
sh('%s -m pip install virtualenv' % PYTHON) # depends on [control=['except'], data=[]]
if not os.path.isdir('venv'):
sh('%s -m virtualenv venv' % PYTHON) # depends on [control=['if'], data=[]]
sh('venv\\Scripts\\pip install -r %s' % REQUIREMENTS_TXT) |
def main():
'''Main routine.'''
# Load Azure app defaults
try:
with open('azurermconfig.json') as config_file:
config_data = json.load(config_file)
except FileNotFoundError:
sys.exit("Error: Expecting azurermconfig.json in current folder")
tenant_id = config_data['tenantId']
app_id = config_data['appId']
app_secret = config_data['appSecret']
subscription_id = config_data['subscriptionId']
access_token = azurerm.get_access_token(tenant_id, app_id, app_secret)
# list locations
locations = azurerm.list_locations(access_token, subscription_id)
for location in locations['value']:
print(location['name']
+ ', Display Name: ' + location['displayName']
+ ', Coords: ' + location['latitude']
+ ', ' + location['longitude']) | def function[main, parameter[]]:
constant[Main routine.]
<ast.Try object at 0x7da1b04d8a90>
variable[tenant_id] assign[=] call[name[config_data]][constant[tenantId]]
variable[app_id] assign[=] call[name[config_data]][constant[appId]]
variable[app_secret] assign[=] call[name[config_data]][constant[appSecret]]
variable[subscription_id] assign[=] call[name[config_data]][constant[subscriptionId]]
variable[access_token] assign[=] call[name[azurerm].get_access_token, parameter[name[tenant_id], name[app_id], name[app_secret]]]
variable[locations] assign[=] call[name[azurerm].list_locations, parameter[name[access_token], name[subscription_id]]]
for taget[name[location]] in starred[call[name[locations]][constant[value]]] begin[:]
call[name[print], parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[location]][constant[name]] + constant[, Display Name: ]] + call[name[location]][constant[displayName]]] + constant[, Coords: ]] + call[name[location]][constant[latitude]]] + constant[, ]] + call[name[location]][constant[longitude]]]]] | keyword[def] identifier[main] ():
literal[string]
keyword[try] :
keyword[with] identifier[open] ( literal[string] ) keyword[as] identifier[config_file] :
identifier[config_data] = identifier[json] . identifier[load] ( identifier[config_file] )
keyword[except] identifier[FileNotFoundError] :
identifier[sys] . identifier[exit] ( literal[string] )
identifier[tenant_id] = identifier[config_data] [ literal[string] ]
identifier[app_id] = identifier[config_data] [ literal[string] ]
identifier[app_secret] = identifier[config_data] [ literal[string] ]
identifier[subscription_id] = identifier[config_data] [ literal[string] ]
identifier[access_token] = identifier[azurerm] . identifier[get_access_token] ( identifier[tenant_id] , identifier[app_id] , identifier[app_secret] )
identifier[locations] = identifier[azurerm] . identifier[list_locations] ( identifier[access_token] , identifier[subscription_id] )
keyword[for] identifier[location] keyword[in] identifier[locations] [ literal[string] ]:
identifier[print] ( identifier[location] [ literal[string] ]
+ literal[string] + identifier[location] [ literal[string] ]
+ literal[string] + identifier[location] [ literal[string] ]
+ literal[string] + identifier[location] [ literal[string] ]) | def main():
"""Main routine."""
# Load Azure app defaults
try:
with open('azurermconfig.json') as config_file:
config_data = json.load(config_file) # depends on [control=['with'], data=['config_file']] # depends on [control=['try'], data=[]]
except FileNotFoundError:
sys.exit('Error: Expecting azurermconfig.json in current folder') # depends on [control=['except'], data=[]]
tenant_id = config_data['tenantId']
app_id = config_data['appId']
app_secret = config_data['appSecret']
subscription_id = config_data['subscriptionId']
access_token = azurerm.get_access_token(tenant_id, app_id, app_secret)
# list locations
locations = azurerm.list_locations(access_token, subscription_id)
for location in locations['value']:
print(location['name'] + ', Display Name: ' + location['displayName'] + ', Coords: ' + location['latitude'] + ', ' + location['longitude']) # depends on [control=['for'], data=['location']] |
def remove_user(self, username):
""" Remove user from the SQLite database.
* `username` [string]
Username of user to remove.
"""
sql = '''DELETE FROM user WHERE username = ?'''
try:
self._db_curs.execute(sql, (username, ))
self._db_conn.commit()
except (sqlite3.OperationalError, sqlite3.IntegrityError) as error:
raise AuthError(error)
return self._db_curs.rowcount | def function[remove_user, parameter[self, username]]:
constant[ Remove user from the SQLite database.
* `username` [string]
Username of user to remove.
]
variable[sql] assign[=] constant[DELETE FROM user WHERE username = ?]
<ast.Try object at 0x7da2046239a0>
return[name[self]._db_curs.rowcount] | keyword[def] identifier[remove_user] ( identifier[self] , identifier[username] ):
literal[string]
identifier[sql] = literal[string]
keyword[try] :
identifier[self] . identifier[_db_curs] . identifier[execute] ( identifier[sql] ,( identifier[username] ,))
identifier[self] . identifier[_db_conn] . identifier[commit] ()
keyword[except] ( identifier[sqlite3] . identifier[OperationalError] , identifier[sqlite3] . identifier[IntegrityError] ) keyword[as] identifier[error] :
keyword[raise] identifier[AuthError] ( identifier[error] )
keyword[return] identifier[self] . identifier[_db_curs] . identifier[rowcount] | def remove_user(self, username):
""" Remove user from the SQLite database.
* `username` [string]
Username of user to remove.
"""
sql = 'DELETE FROM user WHERE username = ?'
try:
self._db_curs.execute(sql, (username,))
self._db_conn.commit() # depends on [control=['try'], data=[]]
except (sqlite3.OperationalError, sqlite3.IntegrityError) as error:
raise AuthError(error) # depends on [control=['except'], data=['error']]
return self._db_curs.rowcount |
def Register(self, a, b, migrated_entity):
"""Registers a merge mapping.
If a and b are both not None, this means that entities a and b were merged
to produce migrated_entity. If one of a or b are not None, then it means
it was not merged but simply migrated.
The effect of a call to register is to update a_merge_map and b_merge_map
according to the merge. Also the private attributes _migrated_entity of a
and b are set to migrated_entity.
Args:
a: The entity from the old feed or None.
b: The entity from the new feed or None.
migrated_entity: The migrated entity.
"""
# There are a few places where code needs to find the corresponding
# migrated entity of an object without knowing in which original schedule
# the entity started. With a_merge_map and b_merge_map both have to be
# checked. Use of the _migrated_entity attribute allows the migrated entity
# to be directly found without the schedule. The merge maps also require
# that all objects be hashable. GenericGTFSObject is at the moment, but
# this is a bug. See comment in transitfeed.GenericGTFSObject.
if a is not None:
self.a_merge_map[a] = migrated_entity
a._migrated_entity = migrated_entity
if b is not None:
self.b_merge_map[b] = migrated_entity
b._migrated_entity = migrated_entity | def function[Register, parameter[self, a, b, migrated_entity]]:
constant[Registers a merge mapping.
If a and b are both not None, this means that entities a and b were merged
to produce migrated_entity. If one of a or b are not None, then it means
it was not merged but simply migrated.
The effect of a call to register is to update a_merge_map and b_merge_map
according to the merge. Also the private attributes _migrated_entity of a
and b are set to migrated_entity.
Args:
a: The entity from the old feed or None.
b: The entity from the new feed or None.
migrated_entity: The migrated entity.
]
if compare[name[a] is_not constant[None]] begin[:]
call[name[self].a_merge_map][name[a]] assign[=] name[migrated_entity]
name[a]._migrated_entity assign[=] name[migrated_entity]
if compare[name[b] is_not constant[None]] begin[:]
call[name[self].b_merge_map][name[b]] assign[=] name[migrated_entity]
name[b]._migrated_entity assign[=] name[migrated_entity] | keyword[def] identifier[Register] ( identifier[self] , identifier[a] , identifier[b] , identifier[migrated_entity] ):
literal[string]
keyword[if] identifier[a] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[a_merge_map] [ identifier[a] ]= identifier[migrated_entity]
identifier[a] . identifier[_migrated_entity] = identifier[migrated_entity]
keyword[if] identifier[b] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[b_merge_map] [ identifier[b] ]= identifier[migrated_entity]
identifier[b] . identifier[_migrated_entity] = identifier[migrated_entity] | def Register(self, a, b, migrated_entity):
"""Registers a merge mapping.
If a and b are both not None, this means that entities a and b were merged
to produce migrated_entity. If one of a or b are not None, then it means
it was not merged but simply migrated.
The effect of a call to register is to update a_merge_map and b_merge_map
according to the merge. Also the private attributes _migrated_entity of a
and b are set to migrated_entity.
Args:
a: The entity from the old feed or None.
b: The entity from the new feed or None.
migrated_entity: The migrated entity.
"""
# There are a few places where code needs to find the corresponding
# migrated entity of an object without knowing in which original schedule
# the entity started. With a_merge_map and b_merge_map both have to be
# checked. Use of the _migrated_entity attribute allows the migrated entity
# to be directly found without the schedule. The merge maps also require
# that all objects be hashable. GenericGTFSObject is at the moment, but
# this is a bug. See comment in transitfeed.GenericGTFSObject.
if a is not None:
self.a_merge_map[a] = migrated_entity
a._migrated_entity = migrated_entity # depends on [control=['if'], data=['a']]
if b is not None:
self.b_merge_map[b] = migrated_entity
b._migrated_entity = migrated_entity # depends on [control=['if'], data=['b']] |
def todict(self):
'''Convert to python dictionary.
@return dict suitable for initializing another IntelHex object.
'''
r = {}
r.update(self._buf)
if self.start_addr:
r['start_addr'] = self.start_addr
return r | def function[todict, parameter[self]]:
constant[Convert to python dictionary.
@return dict suitable for initializing another IntelHex object.
]
variable[r] assign[=] dictionary[[], []]
call[name[r].update, parameter[name[self]._buf]]
if name[self].start_addr begin[:]
call[name[r]][constant[start_addr]] assign[=] name[self].start_addr
return[name[r]] | keyword[def] identifier[todict] ( identifier[self] ):
literal[string]
identifier[r] ={}
identifier[r] . identifier[update] ( identifier[self] . identifier[_buf] )
keyword[if] identifier[self] . identifier[start_addr] :
identifier[r] [ literal[string] ]= identifier[self] . identifier[start_addr]
keyword[return] identifier[r] | def todict(self):
"""Convert to python dictionary.
@return dict suitable for initializing another IntelHex object.
"""
r = {}
r.update(self._buf)
if self.start_addr:
r['start_addr'] = self.start_addr # depends on [control=['if'], data=[]]
return r |
def coordinate_grad_semi_dual(b, M, reg, beta, i):
'''
Compute the coordinate gradient update for regularized discrete distributions for (i, :)
The function computes the gradient of the semi dual problem:
.. math::
\max_v \sum_i (\sum_j v_j * b_j - reg * log(\sum_j exp((v_j - M_{i,j})/reg) * b_j)) * a_i
Where :
- M is the (ns,nt) metric cost matrix
- v is a dual variable in R^J
- reg is the regularization term
- a and b are source and target weights (sum to 1)
The algorithm used for solving the problem is the ASGD & SAG algorithms
as proposed in [18]_ [alg.1 & alg.2]
Parameters
----------
b : np.ndarray(nt,)
target measure
M : np.ndarray(ns, nt)
cost matrix
reg : float nu
Regularization term > 0
v : np.ndarray(nt,)
dual variable
i : number int
picked number i
Returns
-------
coordinate gradient : np.ndarray(nt,)
Examples
--------
>>> n_source = 7
>>> n_target = 4
>>> reg = 1
>>> numItermax = 300000
>>> a = ot.utils.unif(n_source)
>>> b = ot.utils.unif(n_target)
>>> rng = np.random.RandomState(0)
>>> X_source = rng.randn(n_source, 2)
>>> Y_target = rng.randn(n_target, 2)
>>> M = ot.dist(X_source, Y_target)
>>> method = "ASGD"
>>> asgd_pi = stochastic.solve_semi_dual_entropic(a, b, M, reg,
method, numItermax)
>>> print(asgd_pi)
References
----------
[Genevay et al., 2016] :
Stochastic Optimization for Large-scale Optimal Transport,
Advances in Neural Information Processing Systems (2016),
arXiv preprint arxiv:1605.08527.
'''
r = M[i, :] - beta
exp_beta = np.exp(-r / reg) * b
khi = exp_beta / (np.sum(exp_beta))
return b - khi | def function[coordinate_grad_semi_dual, parameter[b, M, reg, beta, i]]:
constant[
Compute the coordinate gradient update for regularized discrete distributions for (i, :)
The function computes the gradient of the semi dual problem:
.. math::
\max_v \sum_i (\sum_j v_j * b_j - reg * log(\sum_j exp((v_j - M_{i,j})/reg) * b_j)) * a_i
Where :
- M is the (ns,nt) metric cost matrix
- v is a dual variable in R^J
- reg is the regularization term
- a and b are source and target weights (sum to 1)
The algorithm used for solving the problem is the ASGD & SAG algorithms
as proposed in [18]_ [alg.1 & alg.2]
Parameters
----------
b : np.ndarray(nt,)
target measure
M : np.ndarray(ns, nt)
cost matrix
reg : float nu
Regularization term > 0
v : np.ndarray(nt,)
dual variable
i : number int
picked number i
Returns
-------
coordinate gradient : np.ndarray(nt,)
Examples
--------
>>> n_source = 7
>>> n_target = 4
>>> reg = 1
>>> numItermax = 300000
>>> a = ot.utils.unif(n_source)
>>> b = ot.utils.unif(n_target)
>>> rng = np.random.RandomState(0)
>>> X_source = rng.randn(n_source, 2)
>>> Y_target = rng.randn(n_target, 2)
>>> M = ot.dist(X_source, Y_target)
>>> method = "ASGD"
>>> asgd_pi = stochastic.solve_semi_dual_entropic(a, b, M, reg,
method, numItermax)
>>> print(asgd_pi)
References
----------
[Genevay et al., 2016] :
Stochastic Optimization for Large-scale Optimal Transport,
Advances in Neural Information Processing Systems (2016),
arXiv preprint arxiv:1605.08527.
]
variable[r] assign[=] binary_operation[call[name[M]][tuple[[<ast.Name object at 0x7da1b18dd150>, <ast.Slice object at 0x7da1b18dfd60>]]] - name[beta]]
variable[exp_beta] assign[=] binary_operation[call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b18de920> / name[reg]]]] * name[b]]
variable[khi] assign[=] binary_operation[name[exp_beta] / call[name[np].sum, parameter[name[exp_beta]]]]
return[binary_operation[name[b] - name[khi]]] | keyword[def] identifier[coordinate_grad_semi_dual] ( identifier[b] , identifier[M] , identifier[reg] , identifier[beta] , identifier[i] ):
literal[string]
identifier[r] = identifier[M] [ identifier[i] ,:]- identifier[beta]
identifier[exp_beta] = identifier[np] . identifier[exp] (- identifier[r] / identifier[reg] )* identifier[b]
identifier[khi] = identifier[exp_beta] /( identifier[np] . identifier[sum] ( identifier[exp_beta] ))
keyword[return] identifier[b] - identifier[khi] | def coordinate_grad_semi_dual(b, M, reg, beta, i):
"""
Compute the coordinate gradient update for regularized discrete distributions for (i, :)
The function computes the gradient of the semi dual problem:
.. math::
\\max_v \\sum_i (\\sum_j v_j * b_j - reg * log(\\sum_j exp((v_j - M_{i,j})/reg) * b_j)) * a_i
Where :
- M is the (ns,nt) metric cost matrix
- v is a dual variable in R^J
- reg is the regularization term
- a and b are source and target weights (sum to 1)
The algorithm used for solving the problem is the ASGD & SAG algorithms
as proposed in [18]_ [alg.1 & alg.2]
Parameters
----------
b : np.ndarray(nt,)
target measure
M : np.ndarray(ns, nt)
cost matrix
reg : float nu
Regularization term > 0
v : np.ndarray(nt,)
dual variable
i : number int
picked number i
Returns
-------
coordinate gradient : np.ndarray(nt,)
Examples
--------
>>> n_source = 7
>>> n_target = 4
>>> reg = 1
>>> numItermax = 300000
>>> a = ot.utils.unif(n_source)
>>> b = ot.utils.unif(n_target)
>>> rng = np.random.RandomState(0)
>>> X_source = rng.randn(n_source, 2)
>>> Y_target = rng.randn(n_target, 2)
>>> M = ot.dist(X_source, Y_target)
>>> method = "ASGD"
>>> asgd_pi = stochastic.solve_semi_dual_entropic(a, b, M, reg,
method, numItermax)
>>> print(asgd_pi)
References
----------
[Genevay et al., 2016] :
Stochastic Optimization for Large-scale Optimal Transport,
Advances in Neural Information Processing Systems (2016),
arXiv preprint arxiv:1605.08527.
"""
r = M[i, :] - beta
exp_beta = np.exp(-r / reg) * b
khi = exp_beta / np.sum(exp_beta)
return b - khi |
def create_configfield_ref_target_node(target_id, env, lineno):
"""Create a ``target`` node that marks a configuration field.
Internally, this also adds to the ``lsst_configfields`` attribute of the
environment that is consumed by `documenteer.sphinxext.lssttasks.
crossrefs.process_pending_configfield_xref_nodes`.
See also
--------
`documenteer.sphinxext.lssttasks.crossrefs.process_pending_configfield_xref_nodes`
"""
target_node = nodes.target('', '', ids=[target_id])
# Store these task/configurable topic nodes in the environment for later
# cross referencing.
if not hasattr(env, 'lsst_configfields'):
env.lsst_configfields = {}
env.lsst_configfields[target_id] = {
'docname': env.docname,
'lineno': lineno,
'target': target_node,
}
return target_node | def function[create_configfield_ref_target_node, parameter[target_id, env, lineno]]:
constant[Create a ``target`` node that marks a configuration field.
Internally, this also adds to the ``lsst_configfields`` attribute of the
environment that is consumed by `documenteer.sphinxext.lssttasks.
crossrefs.process_pending_configfield_xref_nodes`.
See also
--------
`documenteer.sphinxext.lssttasks.crossrefs.process_pending_configfield_xref_nodes`
]
variable[target_node] assign[=] call[name[nodes].target, parameter[constant[], constant[]]]
if <ast.UnaryOp object at 0x7da1b2351540> begin[:]
name[env].lsst_configfields assign[=] dictionary[[], []]
call[name[env].lsst_configfields][name[target_id]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2352b00>, <ast.Constant object at 0x7da1b2353010>, <ast.Constant object at 0x7da1b2350fd0>], [<ast.Attribute object at 0x7da1b2353610>, <ast.Name object at 0x7da1b2351210>, <ast.Name object at 0x7da1b2350130>]]
return[name[target_node]] | keyword[def] identifier[create_configfield_ref_target_node] ( identifier[target_id] , identifier[env] , identifier[lineno] ):
literal[string]
identifier[target_node] = identifier[nodes] . identifier[target] ( literal[string] , literal[string] , identifier[ids] =[ identifier[target_id] ])
keyword[if] keyword[not] identifier[hasattr] ( identifier[env] , literal[string] ):
identifier[env] . identifier[lsst_configfields] ={}
identifier[env] . identifier[lsst_configfields] [ identifier[target_id] ]={
literal[string] : identifier[env] . identifier[docname] ,
literal[string] : identifier[lineno] ,
literal[string] : identifier[target_node] ,
}
keyword[return] identifier[target_node] | def create_configfield_ref_target_node(target_id, env, lineno):
"""Create a ``target`` node that marks a configuration field.
Internally, this also adds to the ``lsst_configfields`` attribute of the
environment that is consumed by `documenteer.sphinxext.lssttasks.
crossrefs.process_pending_configfield_xref_nodes`.
See also
--------
`documenteer.sphinxext.lssttasks.crossrefs.process_pending_configfield_xref_nodes`
"""
target_node = nodes.target('', '', ids=[target_id])
# Store these task/configurable topic nodes in the environment for later
# cross referencing.
if not hasattr(env, 'lsst_configfields'):
env.lsst_configfields = {} # depends on [control=['if'], data=[]]
env.lsst_configfields[target_id] = {'docname': env.docname, 'lineno': lineno, 'target': target_node}
return target_node |
def set_yticks(self, row, column, ticks):
"""Manually specify the y-axis tick values.
:param row,column: specify the subplot.
:param ticks: list of tick values.
"""
subplot = self.get_subplot_at(row, column)
subplot.set_yticks(ticks) | def function[set_yticks, parameter[self, row, column, ticks]]:
constant[Manually specify the y-axis tick values.
:param row,column: specify the subplot.
:param ticks: list of tick values.
]
variable[subplot] assign[=] call[name[self].get_subplot_at, parameter[name[row], name[column]]]
call[name[subplot].set_yticks, parameter[name[ticks]]] | keyword[def] identifier[set_yticks] ( identifier[self] , identifier[row] , identifier[column] , identifier[ticks] ):
literal[string]
identifier[subplot] = identifier[self] . identifier[get_subplot_at] ( identifier[row] , identifier[column] )
identifier[subplot] . identifier[set_yticks] ( identifier[ticks] ) | def set_yticks(self, row, column, ticks):
"""Manually specify the y-axis tick values.
:param row,column: specify the subplot.
:param ticks: list of tick values.
"""
subplot = self.get_subplot_at(row, column)
subplot.set_yticks(ticks) |
def _load(self, f, layer=None, source=None):
"""Load data from a yaml formatted file.
Parameters
----------
f : str or file like object
If f is a string then it is interpreted as a path to the file to load
If it is a file like object then data is read directly from it.
layer : str
layer to load data into. If none is supplied the outermost one is used
source : str
Source to attribute the values to
"""
if hasattr(f, 'read'):
self._loads(f.read(), layer=layer, source=source)
else:
with open(f) as f:
self._loads(f.read(), layer=layer, source=source) | def function[_load, parameter[self, f, layer, source]]:
constant[Load data from a yaml formatted file.
Parameters
----------
f : str or file like object
If f is a string then it is interpreted as a path to the file to load
If it is a file like object then data is read directly from it.
layer : str
layer to load data into. If none is supplied the outermost one is used
source : str
Source to attribute the values to
]
if call[name[hasattr], parameter[name[f], constant[read]]] begin[:]
call[name[self]._loads, parameter[call[name[f].read, parameter[]]]] | keyword[def] identifier[_load] ( identifier[self] , identifier[f] , identifier[layer] = keyword[None] , identifier[source] = keyword[None] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[f] , literal[string] ):
identifier[self] . identifier[_loads] ( identifier[f] . identifier[read] (), identifier[layer] = identifier[layer] , identifier[source] = identifier[source] )
keyword[else] :
keyword[with] identifier[open] ( identifier[f] ) keyword[as] identifier[f] :
identifier[self] . identifier[_loads] ( identifier[f] . identifier[read] (), identifier[layer] = identifier[layer] , identifier[source] = identifier[source] ) | def _load(self, f, layer=None, source=None):
"""Load data from a yaml formatted file.
Parameters
----------
f : str or file like object
If f is a string then it is interpreted as a path to the file to load
If it is a file like object then data is read directly from it.
layer : str
layer to load data into. If none is supplied the outermost one is used
source : str
Source to attribute the values to
"""
if hasattr(f, 'read'):
self._loads(f.read(), layer=layer, source=source) # depends on [control=['if'], data=[]]
else:
with open(f) as f:
self._loads(f.read(), layer=layer, source=source) # depends on [control=['with'], data=['f']] |
def log(self, txt: str) -> bool:
""" Log txt (if any) to the log file (if any). Return value indicates whether it is ok to terminate on the first
error or whether we need to continue processing.
:param txt: text to log.
:return: True if we aren't logging, False if we are.
"""
self.nerrors += 1
if self._logfile is not None:
print(txt, file=self._logfile)
return not self.logging | def function[log, parameter[self, txt]]:
constant[ Log txt (if any) to the log file (if any). Return value indicates whether it is ok to terminate on the first
error or whether we need to continue processing.
:param txt: text to log.
:return: True if we aren't logging, False if we are.
]
<ast.AugAssign object at 0x7da20c993850>
if compare[name[self]._logfile is_not constant[None]] begin[:]
call[name[print], parameter[name[txt]]]
return[<ast.UnaryOp object at 0x7da2041da830>] | keyword[def] identifier[log] ( identifier[self] , identifier[txt] : identifier[str] )-> identifier[bool] :
literal[string]
identifier[self] . identifier[nerrors] += literal[int]
keyword[if] identifier[self] . identifier[_logfile] keyword[is] keyword[not] keyword[None] :
identifier[print] ( identifier[txt] , identifier[file] = identifier[self] . identifier[_logfile] )
keyword[return] keyword[not] identifier[self] . identifier[logging] | def log(self, txt: str) -> bool:
""" Log txt (if any) to the log file (if any). Return value indicates whether it is ok to terminate on the first
error or whether we need to continue processing.
:param txt: text to log.
:return: True if we aren't logging, False if we are.
"""
self.nerrors += 1
if self._logfile is not None:
print(txt, file=self._logfile) # depends on [control=['if'], data=[]]
return not self.logging |
def delete_project(self, project_name):
"""
Delete a project with the specified name. Raises ItemNotFound if no such project exists
:param project_name: str: name of the project to delete
:return:
"""
project = self._get_project_for_name(project_name)
project.delete()
self.clear_project_cache() | def function[delete_project, parameter[self, project_name]]:
constant[
Delete a project with the specified name. Raises ItemNotFound if no such project exists
:param project_name: str: name of the project to delete
:return:
]
variable[project] assign[=] call[name[self]._get_project_for_name, parameter[name[project_name]]]
call[name[project].delete, parameter[]]
call[name[self].clear_project_cache, parameter[]] | keyword[def] identifier[delete_project] ( identifier[self] , identifier[project_name] ):
literal[string]
identifier[project] = identifier[self] . identifier[_get_project_for_name] ( identifier[project_name] )
identifier[project] . identifier[delete] ()
identifier[self] . identifier[clear_project_cache] () | def delete_project(self, project_name):
"""
Delete a project with the specified name. Raises ItemNotFound if no such project exists
:param project_name: str: name of the project to delete
:return:
"""
project = self._get_project_for_name(project_name)
project.delete()
self.clear_project_cache() |
def make_owner(user):
'''
Makes the given user a owner and tutor.
'''
tutor_group, owner_group = _get_user_groups()
user.is_staff = True
user.is_superuser = False
user.save()
owner_group.user_set.add(user)
owner_group.save()
tutor_group.user_set.add(user)
tutor_group.save() | def function[make_owner, parameter[user]]:
constant[
Makes the given user a owner and tutor.
]
<ast.Tuple object at 0x7da1b285a5f0> assign[=] call[name[_get_user_groups], parameter[]]
name[user].is_staff assign[=] constant[True]
name[user].is_superuser assign[=] constant[False]
call[name[user].save, parameter[]]
call[name[owner_group].user_set.add, parameter[name[user]]]
call[name[owner_group].save, parameter[]]
call[name[tutor_group].user_set.add, parameter[name[user]]]
call[name[tutor_group].save, parameter[]] | keyword[def] identifier[make_owner] ( identifier[user] ):
literal[string]
identifier[tutor_group] , identifier[owner_group] = identifier[_get_user_groups] ()
identifier[user] . identifier[is_staff] = keyword[True]
identifier[user] . identifier[is_superuser] = keyword[False]
identifier[user] . identifier[save] ()
identifier[owner_group] . identifier[user_set] . identifier[add] ( identifier[user] )
identifier[owner_group] . identifier[save] ()
identifier[tutor_group] . identifier[user_set] . identifier[add] ( identifier[user] )
identifier[tutor_group] . identifier[save] () | def make_owner(user):
"""
Makes the given user a owner and tutor.
"""
(tutor_group, owner_group) = _get_user_groups()
user.is_staff = True
user.is_superuser = False
user.save()
owner_group.user_set.add(user)
owner_group.save()
tutor_group.user_set.add(user)
tutor_group.save() |
async def evaluate_trained_model(state):
"""Evaluate the most recently trained model against the current best model.
Args:
state: the RL loop State instance.
"""
return await evaluate_model(
state.train_model_path, state.best_model_path,
os.path.join(fsdb.eval_dir(), state.train_model_name), state.seed) | <ast.AsyncFunctionDef object at 0x7da20cabd840> | keyword[async] keyword[def] identifier[evaluate_trained_model] ( identifier[state] ):
literal[string]
keyword[return] keyword[await] identifier[evaluate_model] (
identifier[state] . identifier[train_model_path] , identifier[state] . identifier[best_model_path] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[fsdb] . identifier[eval_dir] (), identifier[state] . identifier[train_model_name] ), identifier[state] . identifier[seed] ) | async def evaluate_trained_model(state):
"""Evaluate the most recently trained model against the current best model.
Args:
state: the RL loop State instance.
"""
return await evaluate_model(state.train_model_path, state.best_model_path, os.path.join(fsdb.eval_dir(), state.train_model_name), state.seed) |
def todataframe(table, index=None, exclude=None, columns=None,
coerce_float=False, nrows=None):
"""
Load data from the given `table` into a
`pandas <http://pandas.pydata.org/>`_ DataFrame. E.g.::
>>> import petl as etl
>>> table = [('foo', 'bar', 'baz'),
... ('apples', 1, 2.5),
... ('oranges', 3, 4.4),
... ('pears', 7, .1)]
>>> df = etl.todataframe(table)
>>> df
foo bar baz
0 apples 1 2.5
1 oranges 3 4.4
2 pears 7 0.1
"""
import pandas as pd
l = list(table)
data = l[1:]
if columns is None:
columns = l[0]
return pd.DataFrame.from_records(data, index=index, exclude=exclude,
columns=columns, coerce_float=coerce_float,
nrows=nrows) | def function[todataframe, parameter[table, index, exclude, columns, coerce_float, nrows]]:
constant[
Load data from the given `table` into a
`pandas <http://pandas.pydata.org/>`_ DataFrame. E.g.::
>>> import petl as etl
>>> table = [('foo', 'bar', 'baz'),
... ('apples', 1, 2.5),
... ('oranges', 3, 4.4),
... ('pears', 7, .1)]
>>> df = etl.todataframe(table)
>>> df
foo bar baz
0 apples 1 2.5
1 oranges 3 4.4
2 pears 7 0.1
]
import module[pandas] as alias[pd]
variable[l] assign[=] call[name[list], parameter[name[table]]]
variable[data] assign[=] call[name[l]][<ast.Slice object at 0x7da204347520>]
if compare[name[columns] is constant[None]] begin[:]
variable[columns] assign[=] call[name[l]][constant[0]]
return[call[name[pd].DataFrame.from_records, parameter[name[data]]]] | keyword[def] identifier[todataframe] ( identifier[table] , identifier[index] = keyword[None] , identifier[exclude] = keyword[None] , identifier[columns] = keyword[None] ,
identifier[coerce_float] = keyword[False] , identifier[nrows] = keyword[None] ):
literal[string]
keyword[import] identifier[pandas] keyword[as] identifier[pd]
identifier[l] = identifier[list] ( identifier[table] )
identifier[data] = identifier[l] [ literal[int] :]
keyword[if] identifier[columns] keyword[is] keyword[None] :
identifier[columns] = identifier[l] [ literal[int] ]
keyword[return] identifier[pd] . identifier[DataFrame] . identifier[from_records] ( identifier[data] , identifier[index] = identifier[index] , identifier[exclude] = identifier[exclude] ,
identifier[columns] = identifier[columns] , identifier[coerce_float] = identifier[coerce_float] ,
identifier[nrows] = identifier[nrows] ) | def todataframe(table, index=None, exclude=None, columns=None, coerce_float=False, nrows=None):
"""
Load data from the given `table` into a
`pandas <http://pandas.pydata.org/>`_ DataFrame. E.g.::
>>> import petl as etl
>>> table = [('foo', 'bar', 'baz'),
... ('apples', 1, 2.5),
... ('oranges', 3, 4.4),
... ('pears', 7, .1)]
>>> df = etl.todataframe(table)
>>> df
foo bar baz
0 apples 1 2.5
1 oranges 3 4.4
2 pears 7 0.1
"""
import pandas as pd
l = list(table)
data = l[1:]
if columns is None:
columns = l[0] # depends on [control=['if'], data=['columns']]
return pd.DataFrame.from_records(data, index=index, exclude=exclude, columns=columns, coerce_float=coerce_float, nrows=nrows) |
def _attrs_to_init_script(
attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc
):
"""
Return a script of an initializer for *attrs* and a dict of globals.
The globals are expected by the generated script.
If *frozen* is True, we cannot set the attributes directly so we use
a cached ``object.__setattr__``.
"""
lines = []
any_slot_ancestors = any(
_is_slot_attr(a.name, base_attr_map) for a in attrs
)
if frozen is True:
if slots is True:
lines.append(
# Circumvent the __setattr__ descriptor to save one lookup per
# assignment.
# Note _setattr will be used again below if cache_hash is True
"_setattr = _cached_setattr.__get__(self, self.__class__)"
)
def fmt_setter(attr_name, value_var):
return "_setattr('%(attr_name)s', %(value_var)s)" % {
"attr_name": attr_name,
"value_var": value_var,
}
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % {
"attr_name": attr_name,
"value_var": value_var,
"conv": conv_name,
}
else:
# Dict frozen classes assign directly to __dict__.
# But only if the attribute doesn't come from an ancestor slot
# class.
# Note _inst_dict will be used again below if cache_hash is True
lines.append("_inst_dict = self.__dict__")
if any_slot_ancestors:
lines.append(
# Circumvent the __setattr__ descriptor to save one lookup
# per assignment.
"_setattr = _cached_setattr.__get__(self, self.__class__)"
)
def fmt_setter(attr_name, value_var):
if _is_slot_attr(attr_name, base_attr_map):
res = "_setattr('%(attr_name)s', %(value_var)s)" % {
"attr_name": attr_name,
"value_var": value_var,
}
else:
res = "_inst_dict['%(attr_name)s'] = %(value_var)s" % {
"attr_name": attr_name,
"value_var": value_var,
}
return res
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
if _is_slot_attr(attr_name, base_attr_map):
tmpl = "_setattr('%(attr_name)s', %(c)s(%(value_var)s))"
else:
tmpl = "_inst_dict['%(attr_name)s'] = %(c)s(%(value_var)s)"
return tmpl % {
"attr_name": attr_name,
"value_var": value_var,
"c": conv_name,
}
else:
# Not frozen.
def fmt_setter(attr_name, value):
return "self.%(attr_name)s = %(value)s" % {
"attr_name": attr_name,
"value": value,
}
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
return "self.%(attr_name)s = %(conv)s(%(value_var)s)" % {
"attr_name": attr_name,
"value_var": value_var,
"conv": conv_name,
}
args = []
kw_only_args = []
attrs_to_validate = []
# This is a dictionary of names to validator and converter callables.
# Injecting this into __init__ globals lets us avoid lookups.
names_for_globals = {}
annotations = {"return": None}
for a in attrs:
if a.validator:
attrs_to_validate.append(a)
attr_name = a.name
arg_name = a.name.lstrip("_")
has_factory = isinstance(a.default, Factory)
if has_factory and a.default.takes_self:
maybe_self = "self"
else:
maybe_self = ""
if a.init is False:
if has_factory:
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
lines.append(
fmt_setter_with_converter(
attr_name,
init_factory_name + "({0})".format(maybe_self),
)
)
conv_name = _init_converter_pat.format(a.name)
names_for_globals[conv_name] = a.converter
else:
lines.append(
fmt_setter(
attr_name,
init_factory_name + "({0})".format(maybe_self),
)
)
names_for_globals[init_factory_name] = a.default.factory
else:
if a.converter is not None:
lines.append(
fmt_setter_with_converter(
attr_name,
"attr_dict['{attr_name}'].default".format(
attr_name=attr_name
),
)
)
conv_name = _init_converter_pat.format(a.name)
names_for_globals[conv_name] = a.converter
else:
lines.append(
fmt_setter(
attr_name,
"attr_dict['{attr_name}'].default".format(
attr_name=attr_name
),
)
)
elif a.default is not NOTHING and not has_factory:
arg = "{arg_name}=attr_dict['{attr_name}'].default".format(
arg_name=arg_name, attr_name=attr_name
)
if a.kw_only:
kw_only_args.append(arg)
else:
args.append(arg)
if a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, arg_name))
names_for_globals[
_init_converter_pat.format(a.name)
] = a.converter
else:
lines.append(fmt_setter(attr_name, arg_name))
elif has_factory:
arg = "{arg_name}=NOTHING".format(arg_name=arg_name)
if a.kw_only:
kw_only_args.append(arg)
else:
args.append(arg)
lines.append(
"if {arg_name} is not NOTHING:".format(arg_name=arg_name)
)
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
lines.append(
" " + fmt_setter_with_converter(attr_name, arg_name)
)
lines.append("else:")
lines.append(
" "
+ fmt_setter_with_converter(
attr_name,
init_factory_name + "({0})".format(maybe_self),
)
)
names_for_globals[
_init_converter_pat.format(a.name)
] = a.converter
else:
lines.append(" " + fmt_setter(attr_name, arg_name))
lines.append("else:")
lines.append(
" "
+ fmt_setter(
attr_name,
init_factory_name + "({0})".format(maybe_self),
)
)
names_for_globals[init_factory_name] = a.default.factory
else:
if a.kw_only:
kw_only_args.append(arg_name)
else:
args.append(arg_name)
if a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, arg_name))
names_for_globals[
_init_converter_pat.format(a.name)
] = a.converter
else:
lines.append(fmt_setter(attr_name, arg_name))
if a.init is True and a.converter is None and a.type is not None:
annotations[arg_name] = a.type
if attrs_to_validate: # we can skip this if there are no validators.
names_for_globals["_config"] = _config
lines.append("if _config._run_validators is True:")
for a in attrs_to_validate:
val_name = "__attr_validator_{}".format(a.name)
attr_name = "__attr_{}".format(a.name)
lines.append(
" {}(self, {}, self.{})".format(val_name, attr_name, a.name)
)
names_for_globals[val_name] = a.validator
names_for_globals[attr_name] = a
if post_init:
lines.append("self.__attrs_post_init__()")
# because this is set only after __attrs_post_init is called, a crash
# will result if post-init tries to access the hash code. This seemed
# preferable to setting this beforehand, in which case alteration to
# field values during post-init combined with post-init accessing the
# hash code would result in silent bugs.
if cache_hash:
if frozen:
if slots:
# if frozen and slots, then _setattr defined above
init_hash_cache = "_setattr('%s', %s)"
else:
# if frozen and not slots, then _inst_dict defined above
init_hash_cache = "_inst_dict['%s'] = %s"
else:
init_hash_cache = "self.%s = %s"
lines.append(init_hash_cache % (_hash_cache_field, "None"))
# For exceptions we rely on BaseException.__init__ for proper
# initialization.
if is_exc:
vals = ",".join("self." + a.name for a in attrs if a.init)
lines.append("BaseException.__init__(self, %s)" % (vals,))
args = ", ".join(args)
if kw_only_args:
if PY2:
raise PythonTooOldError(
"Keyword-only arguments only work on Python 3 and later."
)
args += "{leading_comma}*, {kw_only_args}".format(
leading_comma=", " if args else "",
kw_only_args=", ".join(kw_only_args),
)
return (
"""\
def __init__(self, {args}):
{lines}
""".format(
args=args, lines="\n ".join(lines) if lines else "pass"
),
names_for_globals,
annotations,
) | def function[_attrs_to_init_script, parameter[attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc]]:
constant[
Return a script of an initializer for *attrs* and a dict of globals.
The globals are expected by the generated script.
If *frozen* is True, we cannot set the attributes directly so we use
a cached ``object.__setattr__``.
]
variable[lines] assign[=] list[[]]
variable[any_slot_ancestors] assign[=] call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b1ebf760>]]
if compare[name[frozen] is constant[True]] begin[:]
if compare[name[slots] is constant[True]] begin[:]
call[name[lines].append, parameter[constant[_setattr = _cached_setattr.__get__(self, self.__class__)]]]
def function[fmt_setter, parameter[attr_name, value_var]]:
return[binary_operation[constant[_setattr('%(attr_name)s', %(value_var)s)] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b1ebf160>, <ast.Constant object at 0x7da1b1ebf130>], [<ast.Name object at 0x7da1b1ebf100>, <ast.Name object at 0x7da1b1ebf0d0>]]]]
def function[fmt_setter_with_converter, parameter[attr_name, value_var]]:
variable[conv_name] assign[=] call[name[_init_converter_pat].format, parameter[name[attr_name]]]
return[binary_operation[constant[_setattr('%(attr_name)s', %(conv)s(%(value_var)s))] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b1ebedd0>, <ast.Constant object at 0x7da1b1ebeda0>, <ast.Constant object at 0x7da1b1ebed70>], [<ast.Name object at 0x7da1b1ebed40>, <ast.Name object at 0x7da1b1ebed10>, <ast.Name object at 0x7da1b1ebece0>]]]]
variable[args] assign[=] list[[]]
variable[kw_only_args] assign[=] list[[]]
variable[attrs_to_validate] assign[=] list[[]]
variable[names_for_globals] assign[=] dictionary[[], []]
variable[annotations] assign[=] dictionary[[<ast.Constant object at 0x7da1b1ebd510>], [<ast.Constant object at 0x7da1b1ebd4e0>]]
for taget[name[a]] in starred[name[attrs]] begin[:]
if name[a].validator begin[:]
call[name[attrs_to_validate].append, parameter[name[a]]]
variable[attr_name] assign[=] name[a].name
variable[arg_name] assign[=] call[name[a].name.lstrip, parameter[constant[_]]]
variable[has_factory] assign[=] call[name[isinstance], parameter[name[a].default, name[Factory]]]
if <ast.BoolOp object at 0x7da1b1ebcee0> begin[:]
variable[maybe_self] assign[=] constant[self]
if compare[name[a].init is constant[False]] begin[:]
if name[has_factory] begin[:]
variable[init_factory_name] assign[=] call[name[_init_factory_pat].format, parameter[name[a].name]]
if compare[name[a].converter is_not constant[None]] begin[:]
call[name[lines].append, parameter[call[name[fmt_setter_with_converter], parameter[name[attr_name], binary_operation[name[init_factory_name] + call[constant[({0})].format, parameter[name[maybe_self]]]]]]]]
variable[conv_name] assign[=] call[name[_init_converter_pat].format, parameter[name[a].name]]
call[name[names_for_globals]][name[conv_name]] assign[=] name[a].converter
call[name[names_for_globals]][name[init_factory_name]] assign[=] name[a].default.factory
if <ast.BoolOp object at 0x7da18ede7790> begin[:]
call[name[annotations]][name[arg_name]] assign[=] name[a].type
if name[attrs_to_validate] begin[:]
call[name[names_for_globals]][constant[_config]] assign[=] name[_config]
call[name[lines].append, parameter[constant[if _config._run_validators is True:]]]
for taget[name[a]] in starred[name[attrs_to_validate]] begin[:]
variable[val_name] assign[=] call[constant[__attr_validator_{}].format, parameter[name[a].name]]
variable[attr_name] assign[=] call[constant[__attr_{}].format, parameter[name[a].name]]
call[name[lines].append, parameter[call[constant[ {}(self, {}, self.{})].format, parameter[name[val_name], name[attr_name], name[a].name]]]]
call[name[names_for_globals]][name[val_name]] assign[=] name[a].validator
call[name[names_for_globals]][name[attr_name]] assign[=] name[a]
if name[post_init] begin[:]
call[name[lines].append, parameter[constant[self.__attrs_post_init__()]]]
if name[cache_hash] begin[:]
if name[frozen] begin[:]
if name[slots] begin[:]
variable[init_hash_cache] assign[=] constant[_setattr('%s', %s)]
call[name[lines].append, parameter[binary_operation[name[init_hash_cache] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bcc8640>, <ast.Constant object at 0x7da18bcc8f70>]]]]]
if name[is_exc] begin[:]
variable[vals] assign[=] call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da18bcc8460>]]
call[name[lines].append, parameter[binary_operation[constant[BaseException.__init__(self, %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bcc9060>]]]]]
variable[args] assign[=] call[constant[, ].join, parameter[name[args]]]
if name[kw_only_args] begin[:]
if name[PY2] begin[:]
<ast.Raise object at 0x7da18bcc9960>
<ast.AugAssign object at 0x7da18bccb040>
return[tuple[[<ast.Call object at 0x7da18bccb100>, <ast.Name object at 0x7da18bcc93f0>, <ast.Name object at 0x7da18bccb6d0>]]] | keyword[def] identifier[_attrs_to_init_script] (
identifier[attrs] , identifier[frozen] , identifier[slots] , identifier[post_init] , identifier[cache_hash] , identifier[base_attr_map] , identifier[is_exc]
):
literal[string]
identifier[lines] =[]
identifier[any_slot_ancestors] = identifier[any] (
identifier[_is_slot_attr] ( identifier[a] . identifier[name] , identifier[base_attr_map] ) keyword[for] identifier[a] keyword[in] identifier[attrs]
)
keyword[if] identifier[frozen] keyword[is] keyword[True] :
keyword[if] identifier[slots] keyword[is] keyword[True] :
identifier[lines] . identifier[append] (
literal[string]
)
keyword[def] identifier[fmt_setter] ( identifier[attr_name] , identifier[value_var] ):
keyword[return] literal[string] %{
literal[string] : identifier[attr_name] ,
literal[string] : identifier[value_var] ,
}
keyword[def] identifier[fmt_setter_with_converter] ( identifier[attr_name] , identifier[value_var] ):
identifier[conv_name] = identifier[_init_converter_pat] . identifier[format] ( identifier[attr_name] )
keyword[return] literal[string] %{
literal[string] : identifier[attr_name] ,
literal[string] : identifier[value_var] ,
literal[string] : identifier[conv_name] ,
}
keyword[else] :
identifier[lines] . identifier[append] ( literal[string] )
keyword[if] identifier[any_slot_ancestors] :
identifier[lines] . identifier[append] (
literal[string]
)
keyword[def] identifier[fmt_setter] ( identifier[attr_name] , identifier[value_var] ):
keyword[if] identifier[_is_slot_attr] ( identifier[attr_name] , identifier[base_attr_map] ):
identifier[res] = literal[string] %{
literal[string] : identifier[attr_name] ,
literal[string] : identifier[value_var] ,
}
keyword[else] :
identifier[res] = literal[string] %{
literal[string] : identifier[attr_name] ,
literal[string] : identifier[value_var] ,
}
keyword[return] identifier[res]
keyword[def] identifier[fmt_setter_with_converter] ( identifier[attr_name] , identifier[value_var] ):
identifier[conv_name] = identifier[_init_converter_pat] . identifier[format] ( identifier[attr_name] )
keyword[if] identifier[_is_slot_attr] ( identifier[attr_name] , identifier[base_attr_map] ):
identifier[tmpl] = literal[string]
keyword[else] :
identifier[tmpl] = literal[string]
keyword[return] identifier[tmpl] %{
literal[string] : identifier[attr_name] ,
literal[string] : identifier[value_var] ,
literal[string] : identifier[conv_name] ,
}
keyword[else] :
keyword[def] identifier[fmt_setter] ( identifier[attr_name] , identifier[value] ):
keyword[return] literal[string] %{
literal[string] : identifier[attr_name] ,
literal[string] : identifier[value] ,
}
keyword[def] identifier[fmt_setter_with_converter] ( identifier[attr_name] , identifier[value_var] ):
identifier[conv_name] = identifier[_init_converter_pat] . identifier[format] ( identifier[attr_name] )
keyword[return] literal[string] %{
literal[string] : identifier[attr_name] ,
literal[string] : identifier[value_var] ,
literal[string] : identifier[conv_name] ,
}
identifier[args] =[]
identifier[kw_only_args] =[]
identifier[attrs_to_validate] =[]
identifier[names_for_globals] ={}
identifier[annotations] ={ literal[string] : keyword[None] }
keyword[for] identifier[a] keyword[in] identifier[attrs] :
keyword[if] identifier[a] . identifier[validator] :
identifier[attrs_to_validate] . identifier[append] ( identifier[a] )
identifier[attr_name] = identifier[a] . identifier[name]
identifier[arg_name] = identifier[a] . identifier[name] . identifier[lstrip] ( literal[string] )
identifier[has_factory] = identifier[isinstance] ( identifier[a] . identifier[default] , identifier[Factory] )
keyword[if] identifier[has_factory] keyword[and] identifier[a] . identifier[default] . identifier[takes_self] :
identifier[maybe_self] = literal[string]
keyword[else] :
identifier[maybe_self] = literal[string]
keyword[if] identifier[a] . identifier[init] keyword[is] keyword[False] :
keyword[if] identifier[has_factory] :
identifier[init_factory_name] = identifier[_init_factory_pat] . identifier[format] ( identifier[a] . identifier[name] )
keyword[if] identifier[a] . identifier[converter] keyword[is] keyword[not] keyword[None] :
identifier[lines] . identifier[append] (
identifier[fmt_setter_with_converter] (
identifier[attr_name] ,
identifier[init_factory_name] + literal[string] . identifier[format] ( identifier[maybe_self] ),
)
)
identifier[conv_name] = identifier[_init_converter_pat] . identifier[format] ( identifier[a] . identifier[name] )
identifier[names_for_globals] [ identifier[conv_name] ]= identifier[a] . identifier[converter]
keyword[else] :
identifier[lines] . identifier[append] (
identifier[fmt_setter] (
identifier[attr_name] ,
identifier[init_factory_name] + literal[string] . identifier[format] ( identifier[maybe_self] ),
)
)
identifier[names_for_globals] [ identifier[init_factory_name] ]= identifier[a] . identifier[default] . identifier[factory]
keyword[else] :
keyword[if] identifier[a] . identifier[converter] keyword[is] keyword[not] keyword[None] :
identifier[lines] . identifier[append] (
identifier[fmt_setter_with_converter] (
identifier[attr_name] ,
literal[string] . identifier[format] (
identifier[attr_name] = identifier[attr_name]
),
)
)
identifier[conv_name] = identifier[_init_converter_pat] . identifier[format] ( identifier[a] . identifier[name] )
identifier[names_for_globals] [ identifier[conv_name] ]= identifier[a] . identifier[converter]
keyword[else] :
identifier[lines] . identifier[append] (
identifier[fmt_setter] (
identifier[attr_name] ,
literal[string] . identifier[format] (
identifier[attr_name] = identifier[attr_name]
),
)
)
keyword[elif] identifier[a] . identifier[default] keyword[is] keyword[not] identifier[NOTHING] keyword[and] keyword[not] identifier[has_factory] :
identifier[arg] = literal[string] . identifier[format] (
identifier[arg_name] = identifier[arg_name] , identifier[attr_name] = identifier[attr_name]
)
keyword[if] identifier[a] . identifier[kw_only] :
identifier[kw_only_args] . identifier[append] ( identifier[arg] )
keyword[else] :
identifier[args] . identifier[append] ( identifier[arg] )
keyword[if] identifier[a] . identifier[converter] keyword[is] keyword[not] keyword[None] :
identifier[lines] . identifier[append] ( identifier[fmt_setter_with_converter] ( identifier[attr_name] , identifier[arg_name] ))
identifier[names_for_globals] [
identifier[_init_converter_pat] . identifier[format] ( identifier[a] . identifier[name] )
]= identifier[a] . identifier[converter]
keyword[else] :
identifier[lines] . identifier[append] ( identifier[fmt_setter] ( identifier[attr_name] , identifier[arg_name] ))
keyword[elif] identifier[has_factory] :
identifier[arg] = literal[string] . identifier[format] ( identifier[arg_name] = identifier[arg_name] )
keyword[if] identifier[a] . identifier[kw_only] :
identifier[kw_only_args] . identifier[append] ( identifier[arg] )
keyword[else] :
identifier[args] . identifier[append] ( identifier[arg] )
identifier[lines] . identifier[append] (
literal[string] . identifier[format] ( identifier[arg_name] = identifier[arg_name] )
)
identifier[init_factory_name] = identifier[_init_factory_pat] . identifier[format] ( identifier[a] . identifier[name] )
keyword[if] identifier[a] . identifier[converter] keyword[is] keyword[not] keyword[None] :
identifier[lines] . identifier[append] (
literal[string] + identifier[fmt_setter_with_converter] ( identifier[attr_name] , identifier[arg_name] )
)
identifier[lines] . identifier[append] ( literal[string] )
identifier[lines] . identifier[append] (
literal[string]
+ identifier[fmt_setter_with_converter] (
identifier[attr_name] ,
identifier[init_factory_name] + literal[string] . identifier[format] ( identifier[maybe_self] ),
)
)
identifier[names_for_globals] [
identifier[_init_converter_pat] . identifier[format] ( identifier[a] . identifier[name] )
]= identifier[a] . identifier[converter]
keyword[else] :
identifier[lines] . identifier[append] ( literal[string] + identifier[fmt_setter] ( identifier[attr_name] , identifier[arg_name] ))
identifier[lines] . identifier[append] ( literal[string] )
identifier[lines] . identifier[append] (
literal[string]
+ identifier[fmt_setter] (
identifier[attr_name] ,
identifier[init_factory_name] + literal[string] . identifier[format] ( identifier[maybe_self] ),
)
)
identifier[names_for_globals] [ identifier[init_factory_name] ]= identifier[a] . identifier[default] . identifier[factory]
keyword[else] :
keyword[if] identifier[a] . identifier[kw_only] :
identifier[kw_only_args] . identifier[append] ( identifier[arg_name] )
keyword[else] :
identifier[args] . identifier[append] ( identifier[arg_name] )
keyword[if] identifier[a] . identifier[converter] keyword[is] keyword[not] keyword[None] :
identifier[lines] . identifier[append] ( identifier[fmt_setter_with_converter] ( identifier[attr_name] , identifier[arg_name] ))
identifier[names_for_globals] [
identifier[_init_converter_pat] . identifier[format] ( identifier[a] . identifier[name] )
]= identifier[a] . identifier[converter]
keyword[else] :
identifier[lines] . identifier[append] ( identifier[fmt_setter] ( identifier[attr_name] , identifier[arg_name] ))
keyword[if] identifier[a] . identifier[init] keyword[is] keyword[True] keyword[and] identifier[a] . identifier[converter] keyword[is] keyword[None] keyword[and] identifier[a] . identifier[type] keyword[is] keyword[not] keyword[None] :
identifier[annotations] [ identifier[arg_name] ]= identifier[a] . identifier[type]
keyword[if] identifier[attrs_to_validate] :
identifier[names_for_globals] [ literal[string] ]= identifier[_config]
identifier[lines] . identifier[append] ( literal[string] )
keyword[for] identifier[a] keyword[in] identifier[attrs_to_validate] :
identifier[val_name] = literal[string] . identifier[format] ( identifier[a] . identifier[name] )
identifier[attr_name] = literal[string] . identifier[format] ( identifier[a] . identifier[name] )
identifier[lines] . identifier[append] (
literal[string] . identifier[format] ( identifier[val_name] , identifier[attr_name] , identifier[a] . identifier[name] )
)
identifier[names_for_globals] [ identifier[val_name] ]= identifier[a] . identifier[validator]
identifier[names_for_globals] [ identifier[attr_name] ]= identifier[a]
keyword[if] identifier[post_init] :
identifier[lines] . identifier[append] ( literal[string] )
keyword[if] identifier[cache_hash] :
keyword[if] identifier[frozen] :
keyword[if] identifier[slots] :
identifier[init_hash_cache] = literal[string]
keyword[else] :
identifier[init_hash_cache] = literal[string]
keyword[else] :
identifier[init_hash_cache] = literal[string]
identifier[lines] . identifier[append] ( identifier[init_hash_cache] %( identifier[_hash_cache_field] , literal[string] ))
keyword[if] identifier[is_exc] :
identifier[vals] = literal[string] . identifier[join] ( literal[string] + identifier[a] . identifier[name] keyword[for] identifier[a] keyword[in] identifier[attrs] keyword[if] identifier[a] . identifier[init] )
identifier[lines] . identifier[append] ( literal[string] %( identifier[vals] ,))
identifier[args] = literal[string] . identifier[join] ( identifier[args] )
keyword[if] identifier[kw_only_args] :
keyword[if] identifier[PY2] :
keyword[raise] identifier[PythonTooOldError] (
literal[string]
)
identifier[args] += literal[string] . identifier[format] (
identifier[leading_comma] = literal[string] keyword[if] identifier[args] keyword[else] literal[string] ,
identifier[kw_only_args] = literal[string] . identifier[join] ( identifier[kw_only_args] ),
)
keyword[return] (
literal[string] . identifier[format] (
identifier[args] = identifier[args] , identifier[lines] = literal[string] . identifier[join] ( identifier[lines] ) keyword[if] identifier[lines] keyword[else] literal[string]
),
identifier[names_for_globals] ,
identifier[annotations] ,
) | def _attrs_to_init_script(attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc):
"""
Return a script of an initializer for *attrs* and a dict of globals.
The globals are expected by the generated script.
If *frozen* is True, we cannot set the attributes directly so we use
a cached ``object.__setattr__``.
"""
lines = []
any_slot_ancestors = any((_is_slot_attr(a.name, base_attr_map) for a in attrs))
if frozen is True:
if slots is True:
# Circumvent the __setattr__ descriptor to save one lookup per
# assignment.
# Note _setattr will be used again below if cache_hash is True
lines.append('_setattr = _cached_setattr.__get__(self, self.__class__)')
def fmt_setter(attr_name, value_var):
return "_setattr('%(attr_name)s', %(value_var)s)" % {'attr_name': attr_name, 'value_var': value_var}
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % {'attr_name': attr_name, 'value_var': value_var, 'conv': conv_name} # depends on [control=['if'], data=[]]
else:
# Dict frozen classes assign directly to __dict__.
# But only if the attribute doesn't come from an ancestor slot
# class.
# Note _inst_dict will be used again below if cache_hash is True
lines.append('_inst_dict = self.__dict__')
if any_slot_ancestors:
# Circumvent the __setattr__ descriptor to save one lookup
# per assignment.
lines.append('_setattr = _cached_setattr.__get__(self, self.__class__)') # depends on [control=['if'], data=[]]
def fmt_setter(attr_name, value_var):
if _is_slot_attr(attr_name, base_attr_map):
res = "_setattr('%(attr_name)s', %(value_var)s)" % {'attr_name': attr_name, 'value_var': value_var} # depends on [control=['if'], data=[]]
else:
res = "_inst_dict['%(attr_name)s'] = %(value_var)s" % {'attr_name': attr_name, 'value_var': value_var}
return res
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
if _is_slot_attr(attr_name, base_attr_map):
tmpl = "_setattr('%(attr_name)s', %(c)s(%(value_var)s))" # depends on [control=['if'], data=[]]
else:
tmpl = "_inst_dict['%(attr_name)s'] = %(c)s(%(value_var)s)"
return tmpl % {'attr_name': attr_name, 'value_var': value_var, 'c': conv_name} # depends on [control=['if'], data=[]]
else:
# Not frozen.
def fmt_setter(attr_name, value):
return 'self.%(attr_name)s = %(value)s' % {'attr_name': attr_name, 'value': value}
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
return 'self.%(attr_name)s = %(conv)s(%(value_var)s)' % {'attr_name': attr_name, 'value_var': value_var, 'conv': conv_name}
args = []
kw_only_args = []
attrs_to_validate = []
# This is a dictionary of names to validator and converter callables.
# Injecting this into __init__ globals lets us avoid lookups.
names_for_globals = {}
annotations = {'return': None}
for a in attrs:
if a.validator:
attrs_to_validate.append(a) # depends on [control=['if'], data=[]]
attr_name = a.name
arg_name = a.name.lstrip('_')
has_factory = isinstance(a.default, Factory)
if has_factory and a.default.takes_self:
maybe_self = 'self' # depends on [control=['if'], data=[]]
else:
maybe_self = ''
if a.init is False:
if has_factory:
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, init_factory_name + '({0})'.format(maybe_self)))
conv_name = _init_converter_pat.format(a.name)
names_for_globals[conv_name] = a.converter # depends on [control=['if'], data=[]]
else:
lines.append(fmt_setter(attr_name, init_factory_name + '({0})'.format(maybe_self)))
names_for_globals[init_factory_name] = a.default.factory # depends on [control=['if'], data=[]]
elif a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, "attr_dict['{attr_name}'].default".format(attr_name=attr_name)))
conv_name = _init_converter_pat.format(a.name)
names_for_globals[conv_name] = a.converter # depends on [control=['if'], data=[]]
else:
lines.append(fmt_setter(attr_name, "attr_dict['{attr_name}'].default".format(attr_name=attr_name))) # depends on [control=['if'], data=[]]
elif a.default is not NOTHING and (not has_factory):
arg = "{arg_name}=attr_dict['{attr_name}'].default".format(arg_name=arg_name, attr_name=attr_name)
if a.kw_only:
kw_only_args.append(arg) # depends on [control=['if'], data=[]]
else:
args.append(arg)
if a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, arg_name))
names_for_globals[_init_converter_pat.format(a.name)] = a.converter # depends on [control=['if'], data=[]]
else:
lines.append(fmt_setter(attr_name, arg_name)) # depends on [control=['if'], data=[]]
elif has_factory:
arg = '{arg_name}=NOTHING'.format(arg_name=arg_name)
if a.kw_only:
kw_only_args.append(arg) # depends on [control=['if'], data=[]]
else:
args.append(arg)
lines.append('if {arg_name} is not NOTHING:'.format(arg_name=arg_name))
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
lines.append(' ' + fmt_setter_with_converter(attr_name, arg_name))
lines.append('else:')
lines.append(' ' + fmt_setter_with_converter(attr_name, init_factory_name + '({0})'.format(maybe_self)))
names_for_globals[_init_converter_pat.format(a.name)] = a.converter # depends on [control=['if'], data=[]]
else:
lines.append(' ' + fmt_setter(attr_name, arg_name))
lines.append('else:')
lines.append(' ' + fmt_setter(attr_name, init_factory_name + '({0})'.format(maybe_self)))
names_for_globals[init_factory_name] = a.default.factory # depends on [control=['if'], data=[]]
else:
if a.kw_only:
kw_only_args.append(arg_name) # depends on [control=['if'], data=[]]
else:
args.append(arg_name)
if a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, arg_name))
names_for_globals[_init_converter_pat.format(a.name)] = a.converter # depends on [control=['if'], data=[]]
else:
lines.append(fmt_setter(attr_name, arg_name))
if a.init is True and a.converter is None and (a.type is not None):
annotations[arg_name] = a.type # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']]
if attrs_to_validate: # we can skip this if there are no validators.
names_for_globals['_config'] = _config
lines.append('if _config._run_validators is True:')
for a in attrs_to_validate:
val_name = '__attr_validator_{}'.format(a.name)
attr_name = '__attr_{}'.format(a.name)
lines.append(' {}(self, {}, self.{})'.format(val_name, attr_name, a.name))
names_for_globals[val_name] = a.validator
names_for_globals[attr_name] = a # depends on [control=['for'], data=['a']] # depends on [control=['if'], data=[]]
if post_init:
lines.append('self.__attrs_post_init__()') # depends on [control=['if'], data=[]]
# because this is set only after __attrs_post_init is called, a crash
# will result if post-init tries to access the hash code. This seemed
# preferable to setting this beforehand, in which case alteration to
# field values during post-init combined with post-init accessing the
# hash code would result in silent bugs.
if cache_hash:
if frozen:
if slots:
# if frozen and slots, then _setattr defined above
init_hash_cache = "_setattr('%s', %s)" # depends on [control=['if'], data=[]]
else:
# if frozen and not slots, then _inst_dict defined above
init_hash_cache = "_inst_dict['%s'] = %s" # depends on [control=['if'], data=[]]
else:
init_hash_cache = 'self.%s = %s'
lines.append(init_hash_cache % (_hash_cache_field, 'None')) # depends on [control=['if'], data=[]]
# For exceptions we rely on BaseException.__init__ for proper
# initialization.
if is_exc:
vals = ','.join(('self.' + a.name for a in attrs if a.init))
lines.append('BaseException.__init__(self, %s)' % (vals,)) # depends on [control=['if'], data=[]]
args = ', '.join(args)
if kw_only_args:
if PY2:
raise PythonTooOldError('Keyword-only arguments only work on Python 3 and later.') # depends on [control=['if'], data=[]]
args += '{leading_comma}*, {kw_only_args}'.format(leading_comma=', ' if args else '', kw_only_args=', '.join(kw_only_args)) # depends on [control=['if'], data=[]]
return ('def __init__(self, {args}):\n {lines}\n'.format(args=args, lines='\n '.join(lines) if lines else 'pass'), names_for_globals, annotations) |
def get_ac_info_all(auth, url):
"""
function takes no input as input to RESTFUL call to HP IMC
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: list of dictionaries where each element of the list represents a single wireless controller which has been
discovered in the HPE IMC WSM module
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.wsm.acinfo import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> ac_info_all = get_ac_info_all(auth.creds, auth.url)
>>> assert type(ac_info_all) is list
>>> assert len(ac_info_all[0]) == 12
>>> assert 'hardwareVersion' in ac_info_all[0]
>>> assert 'ipAddress' in ac_info_all[0]
>>> assert 'label' in ac_info_all[0]
>>> assert 'macAddress' in ac_info_all[0]
>>> assert 'onlineApCount' in ac_info_all[0]
>>> assert 'onlineClientCount' in ac_info_all[0]
>>> assert 'pingStatus' in ac_info_all[0]
>>> assert 'serialId' in ac_info_all[0]
>>> assert 'softwareVersion' in ac_info_all[0]
>>> assert 'status' in ac_info_all[0]
>>> assert 'sysName' in ac_info_all[0]
>>> assert 'type' in ac_info_all[0]
"""
get_ac_info_all_url = "/imcrs/wlan/acInfo/queryAcBasicInfo"
f_url = url + get_ac_info_all_url
payload = None
r = requests.get(f_url, auth=auth,
headers=HEADERS) # creates the URL using the payload variable as the contents
# print(r.status_code)
try:
if r.status_code == 200:
if len(r.text) > 0:
return json.loads(r.text)['acBasicInfo']
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + " get_ac_info_all: An Error has occured" | def function[get_ac_info_all, parameter[auth, url]]:
constant[
function takes no input as input to RESTFUL call to HP IMC
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: list of dictionaries where each element of the list represents a single wireless controller which has been
discovered in the HPE IMC WSM module
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.wsm.acinfo import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> ac_info_all = get_ac_info_all(auth.creds, auth.url)
>>> assert type(ac_info_all) is list
>>> assert len(ac_info_all[0]) == 12
>>> assert 'hardwareVersion' in ac_info_all[0]
>>> assert 'ipAddress' in ac_info_all[0]
>>> assert 'label' in ac_info_all[0]
>>> assert 'macAddress' in ac_info_all[0]
>>> assert 'onlineApCount' in ac_info_all[0]
>>> assert 'onlineClientCount' in ac_info_all[0]
>>> assert 'pingStatus' in ac_info_all[0]
>>> assert 'serialId' in ac_info_all[0]
>>> assert 'softwareVersion' in ac_info_all[0]
>>> assert 'status' in ac_info_all[0]
>>> assert 'sysName' in ac_info_all[0]
>>> assert 'type' in ac_info_all[0]
]
variable[get_ac_info_all_url] assign[=] constant[/imcrs/wlan/acInfo/queryAcBasicInfo]
variable[f_url] assign[=] binary_operation[name[url] + name[get_ac_info_all_url]]
variable[payload] assign[=] constant[None]
variable[r] assign[=] call[name[requests].get, parameter[name[f_url]]]
<ast.Try object at 0x7da1b26ad6f0> | keyword[def] identifier[get_ac_info_all] ( identifier[auth] , identifier[url] ):
literal[string]
identifier[get_ac_info_all_url] = literal[string]
identifier[f_url] = identifier[url] + identifier[get_ac_info_all_url]
identifier[payload] = keyword[None]
identifier[r] = identifier[requests] . identifier[get] ( identifier[f_url] , identifier[auth] = identifier[auth] ,
identifier[headers] = identifier[HEADERS] )
keyword[try] :
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
keyword[if] identifier[len] ( identifier[r] . identifier[text] )> literal[int] :
keyword[return] identifier[json] . identifier[loads] ( identifier[r] . identifier[text] )[ literal[string] ]
keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[e] :
keyword[return] literal[string] + identifier[str] ( identifier[e] )+ literal[string] | def get_ac_info_all(auth, url):
"""
function takes no input as input to RESTFUL call to HP IMC
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: list of dictionaries where each element of the list represents a single wireless controller which has been
discovered in the HPE IMC WSM module
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.wsm.acinfo import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> ac_info_all = get_ac_info_all(auth.creds, auth.url)
>>> assert type(ac_info_all) is list
>>> assert len(ac_info_all[0]) == 12
>>> assert 'hardwareVersion' in ac_info_all[0]
>>> assert 'ipAddress' in ac_info_all[0]
>>> assert 'label' in ac_info_all[0]
>>> assert 'macAddress' in ac_info_all[0]
>>> assert 'onlineApCount' in ac_info_all[0]
>>> assert 'onlineClientCount' in ac_info_all[0]
>>> assert 'pingStatus' in ac_info_all[0]
>>> assert 'serialId' in ac_info_all[0]
>>> assert 'softwareVersion' in ac_info_all[0]
>>> assert 'status' in ac_info_all[0]
>>> assert 'sysName' in ac_info_all[0]
>>> assert 'type' in ac_info_all[0]
"""
get_ac_info_all_url = '/imcrs/wlan/acInfo/queryAcBasicInfo'
f_url = url + get_ac_info_all_url
payload = None
r = requests.get(f_url, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents
# print(r.status_code)
try:
if r.status_code == 200:
if len(r.text) > 0:
return json.loads(r.text)['acBasicInfo'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except requests.exceptions.RequestException as e:
return 'Error:\n' + str(e) + ' get_ac_info_all: An Error has occured' # depends on [control=['except'], data=['e']] |
def post_request(self, container, resource=None, params=None, accept=None):
"""Send a POST request."""
url = self.make_url(container, resource)
headers = self._make_headers(accept)
try:
rsp = requests.post(url, data=params, headers=headers,
verify=self._verify, timeout=self._timeout)
except requests.exceptions.ConnectionError as e:
RestHttp._raise_conn_error(e)
if self._dbg_print:
self.__print_req('POST', rsp.url, headers, params)
return self._handle_response(rsp) | def function[post_request, parameter[self, container, resource, params, accept]]:
constant[Send a POST request.]
variable[url] assign[=] call[name[self].make_url, parameter[name[container], name[resource]]]
variable[headers] assign[=] call[name[self]._make_headers, parameter[name[accept]]]
<ast.Try object at 0x7da1b26af670>
if name[self]._dbg_print begin[:]
call[name[self].__print_req, parameter[constant[POST], name[rsp].url, name[headers], name[params]]]
return[call[name[self]._handle_response, parameter[name[rsp]]]] | keyword[def] identifier[post_request] ( identifier[self] , identifier[container] , identifier[resource] = keyword[None] , identifier[params] = keyword[None] , identifier[accept] = keyword[None] ):
literal[string]
identifier[url] = identifier[self] . identifier[make_url] ( identifier[container] , identifier[resource] )
identifier[headers] = identifier[self] . identifier[_make_headers] ( identifier[accept] )
keyword[try] :
identifier[rsp] = identifier[requests] . identifier[post] ( identifier[url] , identifier[data] = identifier[params] , identifier[headers] = identifier[headers] ,
identifier[verify] = identifier[self] . identifier[_verify] , identifier[timeout] = identifier[self] . identifier[_timeout] )
keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] keyword[as] identifier[e] :
identifier[RestHttp] . identifier[_raise_conn_error] ( identifier[e] )
keyword[if] identifier[self] . identifier[_dbg_print] :
identifier[self] . identifier[__print_req] ( literal[string] , identifier[rsp] . identifier[url] , identifier[headers] , identifier[params] )
keyword[return] identifier[self] . identifier[_handle_response] ( identifier[rsp] ) | def post_request(self, container, resource=None, params=None, accept=None):
"""Send a POST request."""
url = self.make_url(container, resource)
headers = self._make_headers(accept)
try:
rsp = requests.post(url, data=params, headers=headers, verify=self._verify, timeout=self._timeout) # depends on [control=['try'], data=[]]
except requests.exceptions.ConnectionError as e:
RestHttp._raise_conn_error(e) # depends on [control=['except'], data=['e']]
if self._dbg_print:
self.__print_req('POST', rsp.url, headers, params) # depends on [control=['if'], data=[]]
return self._handle_response(rsp) |
def _add_right(self, d):
'''
Adds the provided domino to the right end of the board.
:param Domino d: domino to add
:return: None
:raises EndsMismatchException: if the values do not match
'''
if not self:
self.board.append(d)
elif d.first == self.right_end():
self.board.append(d)
elif d.second == self.right_end():
self.board.append(d.inverted())
else:
raise dominoes.EndsMismatchException(
'{} cannot be added to the right of'
' the board - values do not match!'.format(d)
) | def function[_add_right, parameter[self, d]]:
constant[
Adds the provided domino to the right end of the board.
:param Domino d: domino to add
:return: None
:raises EndsMismatchException: if the values do not match
]
if <ast.UnaryOp object at 0x7da18bccada0> begin[:]
call[name[self].board.append, parameter[name[d]]] | keyword[def] identifier[_add_right] ( identifier[self] , identifier[d] ):
literal[string]
keyword[if] keyword[not] identifier[self] :
identifier[self] . identifier[board] . identifier[append] ( identifier[d] )
keyword[elif] identifier[d] . identifier[first] == identifier[self] . identifier[right_end] ():
identifier[self] . identifier[board] . identifier[append] ( identifier[d] )
keyword[elif] identifier[d] . identifier[second] == identifier[self] . identifier[right_end] ():
identifier[self] . identifier[board] . identifier[append] ( identifier[d] . identifier[inverted] ())
keyword[else] :
keyword[raise] identifier[dominoes] . identifier[EndsMismatchException] (
literal[string]
literal[string] . identifier[format] ( identifier[d] )
) | def _add_right(self, d):
"""
Adds the provided domino to the right end of the board.
:param Domino d: domino to add
:return: None
:raises EndsMismatchException: if the values do not match
"""
if not self:
self.board.append(d) # depends on [control=['if'], data=[]]
elif d.first == self.right_end():
self.board.append(d) # depends on [control=['if'], data=[]]
elif d.second == self.right_end():
self.board.append(d.inverted()) # depends on [control=['if'], data=[]]
else:
raise dominoes.EndsMismatchException('{} cannot be added to the right of the board - values do not match!'.format(d)) |
def _is_error(self):
'''
Is this is an error code?
:return:
'''
if self.exit_code:
msg = self.SUCCESS_EXIT_CODES.get(self.exit_code)
if msg:
log.info(msg)
msg = self.WARNING_EXIT_CODES.get(self.exit_code)
if msg:
log.warning(msg)
return self.exit_code not in self.SUCCESS_EXIT_CODES and self.exit_code not in self.WARNING_EXIT_CODES | def function[_is_error, parameter[self]]:
constant[
Is this is an error code?
:return:
]
if name[self].exit_code begin[:]
variable[msg] assign[=] call[name[self].SUCCESS_EXIT_CODES.get, parameter[name[self].exit_code]]
if name[msg] begin[:]
call[name[log].info, parameter[name[msg]]]
variable[msg] assign[=] call[name[self].WARNING_EXIT_CODES.get, parameter[name[self].exit_code]]
if name[msg] begin[:]
call[name[log].warning, parameter[name[msg]]]
return[<ast.BoolOp object at 0x7da1b1c17e20>] | keyword[def] identifier[_is_error] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[exit_code] :
identifier[msg] = identifier[self] . identifier[SUCCESS_EXIT_CODES] . identifier[get] ( identifier[self] . identifier[exit_code] )
keyword[if] identifier[msg] :
identifier[log] . identifier[info] ( identifier[msg] )
identifier[msg] = identifier[self] . identifier[WARNING_EXIT_CODES] . identifier[get] ( identifier[self] . identifier[exit_code] )
keyword[if] identifier[msg] :
identifier[log] . identifier[warning] ( identifier[msg] )
keyword[return] identifier[self] . identifier[exit_code] keyword[not] keyword[in] identifier[self] . identifier[SUCCESS_EXIT_CODES] keyword[and] identifier[self] . identifier[exit_code] keyword[not] keyword[in] identifier[self] . identifier[WARNING_EXIT_CODES] | def _is_error(self):
"""
Is this is an error code?
:return:
"""
if self.exit_code:
msg = self.SUCCESS_EXIT_CODES.get(self.exit_code)
if msg:
log.info(msg) # depends on [control=['if'], data=[]]
msg = self.WARNING_EXIT_CODES.get(self.exit_code)
if msg:
log.warning(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self.exit_code not in self.SUCCESS_EXIT_CODES and self.exit_code not in self.WARNING_EXIT_CODES |
def _broadcast_to(tensor_to_broadcast, target_tensors):
"""Helper to broadcast a tensor using a list of target tensors."""
output = tensor_to_broadcast
for tensor in target_tensors:
output += tf.zeros_like(tensor)
return output | def function[_broadcast_to, parameter[tensor_to_broadcast, target_tensors]]:
constant[Helper to broadcast a tensor using a list of target tensors.]
variable[output] assign[=] name[tensor_to_broadcast]
for taget[name[tensor]] in starred[name[target_tensors]] begin[:]
<ast.AugAssign object at 0x7da1b02d1480>
return[name[output]] | keyword[def] identifier[_broadcast_to] ( identifier[tensor_to_broadcast] , identifier[target_tensors] ):
literal[string]
identifier[output] = identifier[tensor_to_broadcast]
keyword[for] identifier[tensor] keyword[in] identifier[target_tensors] :
identifier[output] += identifier[tf] . identifier[zeros_like] ( identifier[tensor] )
keyword[return] identifier[output] | def _broadcast_to(tensor_to_broadcast, target_tensors):
"""Helper to broadcast a tensor using a list of target tensors."""
output = tensor_to_broadcast
for tensor in target_tensors:
output += tf.zeros_like(tensor) # depends on [control=['for'], data=['tensor']]
return output |
def unionfs(rw='rw', ro=None, union='union'):
"""
Decorator for the UnionFS feature.
This configures a unionfs for projects. The given base_dir and/or image_dir
are layered as follows:
image_dir=RW:base_dir=RO
All writes go to the image_dir, while base_dir delivers the (read-only)
versions of the rest of the filesystem.
The unified version will be provided in the project's builddir. Unmouting
is done as soon as the function completes.
Args:
rw: writeable storage area for the unified fuse filesystem.
ro: read-only storage area for the unified fuse filesystem.
union: mountpoint of the unified fuse filesystem.
"""
from functools import wraps
def wrap_in_union_fs(func):
"""
Function that wraps a given function inside the file system.
Args:
func: The function that needs to be wrapped inside the unions fs.
Return:
The file system with the function wrapped inside.
"""
@wraps(func)
def wrap_in_union_fs_func(project, *args, **kwargs):
"""
Wrap the func in the UnionFS mount stack.
We make sure that the mount points all exist and stack up the
directories for the unionfs. All directories outside of the default
build environment are tracked for deletion.
"""
container = project.container
if container is None or in_container():
return func(project, *args, **kwargs)
build_dir = local.path(project.builddir)
LOG.debug("UnionFS - Project builddir: %s", project.builddir)
if __unionfs_is_active(root=build_dir):
LOG.debug(
"UnionFS already active in %s, nesting not supported.",
build_dir)
return func(project, *args, **kwargs)
ro_dir = local.path(container.local)
rw_dir = build_dir / rw
un_dir = build_dir / union
LOG.debug("UnionFS - RW: %s", rw_dir)
unionfs_cmd = __unionfs_set_up(ro_dir, rw_dir, un_dir)
project_builddir_bak = project.builddir
project.builddir = un_dir
proc = unionfs_cmd.popen()
while (not __unionfs_is_active(root=un_dir)) and \
(proc.poll() is None):
pass
ret = None
if proc.poll() is None:
try:
with local.cwd(un_dir):
ret = func(project, *args, **kwargs)
finally:
project.builddir = project_builddir_bak
from signal import SIGINT
is_running = proc.poll() is None
while __unionfs_is_active(root=un_dir) and is_running:
try:
proc.send_signal(SIGINT)
proc.wait(timeout=3)
except subprocess.TimeoutExpired:
proc.kill()
is_running = False
LOG.debug("Unionfs shut down.")
if __unionfs_is_active(root=un_dir):
raise UnmountError()
return ret
return wrap_in_union_fs_func
return wrap_in_union_fs | def function[unionfs, parameter[rw, ro, union]]:
constant[
Decorator for the UnionFS feature.
This configures a unionfs for projects. The given base_dir and/or image_dir
are layered as follows:
image_dir=RW:base_dir=RO
All writes go to the image_dir, while base_dir delivers the (read-only)
versions of the rest of the filesystem.
The unified version will be provided in the project's builddir. Unmouting
is done as soon as the function completes.
Args:
rw: writeable storage area for the unified fuse filesystem.
ro: read-only storage area for the unified fuse filesystem.
union: mountpoint of the unified fuse filesystem.
]
from relative_module[functools] import module[wraps]
def function[wrap_in_union_fs, parameter[func]]:
constant[
Function that wraps a given function inside the file system.
Args:
func: The function that needs to be wrapped inside the unions fs.
Return:
The file system with the function wrapped inside.
]
def function[wrap_in_union_fs_func, parameter[project]]:
constant[
Wrap the func in the UnionFS mount stack.
We make sure that the mount points all exist and stack up the
directories for the unionfs. All directories outside of the default
build environment are tracked for deletion.
]
variable[container] assign[=] name[project].container
if <ast.BoolOp object at 0x7da2046223e0> begin[:]
return[call[name[func], parameter[name[project], <ast.Starred object at 0x7da204622350>]]]
variable[build_dir] assign[=] call[name[local].path, parameter[name[project].builddir]]
call[name[LOG].debug, parameter[constant[UnionFS - Project builddir: %s], name[project].builddir]]
if call[name[__unionfs_is_active], parameter[]] begin[:]
call[name[LOG].debug, parameter[constant[UnionFS already active in %s, nesting not supported.], name[build_dir]]]
return[call[name[func], parameter[name[project], <ast.Starred object at 0x7da2046202b0>]]]
variable[ro_dir] assign[=] call[name[local].path, parameter[name[container].local]]
variable[rw_dir] assign[=] binary_operation[name[build_dir] / name[rw]]
variable[un_dir] assign[=] binary_operation[name[build_dir] / name[union]]
call[name[LOG].debug, parameter[constant[UnionFS - RW: %s], name[rw_dir]]]
variable[unionfs_cmd] assign[=] call[name[__unionfs_set_up], parameter[name[ro_dir], name[rw_dir], name[un_dir]]]
variable[project_builddir_bak] assign[=] name[project].builddir
name[project].builddir assign[=] name[un_dir]
variable[proc] assign[=] call[name[unionfs_cmd].popen, parameter[]]
while <ast.BoolOp object at 0x7da204621960> begin[:]
pass
variable[ret] assign[=] constant[None]
if compare[call[name[proc].poll, parameter[]] is constant[None]] begin[:]
<ast.Try object at 0x7da204623430>
if call[name[__unionfs_is_active], parameter[]] begin[:]
<ast.Raise object at 0x7da20e962320>
return[name[ret]]
return[name[wrap_in_union_fs_func]]
return[name[wrap_in_union_fs]] | keyword[def] identifier[unionfs] ( identifier[rw] = literal[string] , identifier[ro] = keyword[None] , identifier[union] = literal[string] ):
literal[string]
keyword[from] identifier[functools] keyword[import] identifier[wraps]
keyword[def] identifier[wrap_in_union_fs] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrap_in_union_fs_func] ( identifier[project] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[container] = identifier[project] . identifier[container]
keyword[if] identifier[container] keyword[is] keyword[None] keyword[or] identifier[in_container] ():
keyword[return] identifier[func] ( identifier[project] ,* identifier[args] ,** identifier[kwargs] )
identifier[build_dir] = identifier[local] . identifier[path] ( identifier[project] . identifier[builddir] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[project] . identifier[builddir] )
keyword[if] identifier[__unionfs_is_active] ( identifier[root] = identifier[build_dir] ):
identifier[LOG] . identifier[debug] (
literal[string] ,
identifier[build_dir] )
keyword[return] identifier[func] ( identifier[project] ,* identifier[args] ,** identifier[kwargs] )
identifier[ro_dir] = identifier[local] . identifier[path] ( identifier[container] . identifier[local] )
identifier[rw_dir] = identifier[build_dir] / identifier[rw]
identifier[un_dir] = identifier[build_dir] / identifier[union]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[rw_dir] )
identifier[unionfs_cmd] = identifier[__unionfs_set_up] ( identifier[ro_dir] , identifier[rw_dir] , identifier[un_dir] )
identifier[project_builddir_bak] = identifier[project] . identifier[builddir]
identifier[project] . identifier[builddir] = identifier[un_dir]
identifier[proc] = identifier[unionfs_cmd] . identifier[popen] ()
keyword[while] ( keyword[not] identifier[__unionfs_is_active] ( identifier[root] = identifier[un_dir] )) keyword[and] ( identifier[proc] . identifier[poll] () keyword[is] keyword[None] ):
keyword[pass]
identifier[ret] = keyword[None]
keyword[if] identifier[proc] . identifier[poll] () keyword[is] keyword[None] :
keyword[try] :
keyword[with] identifier[local] . identifier[cwd] ( identifier[un_dir] ):
identifier[ret] = identifier[func] ( identifier[project] ,* identifier[args] ,** identifier[kwargs] )
keyword[finally] :
identifier[project] . identifier[builddir] = identifier[project_builddir_bak]
keyword[from] identifier[signal] keyword[import] identifier[SIGINT]
identifier[is_running] = identifier[proc] . identifier[poll] () keyword[is] keyword[None]
keyword[while] identifier[__unionfs_is_active] ( identifier[root] = identifier[un_dir] ) keyword[and] identifier[is_running] :
keyword[try] :
identifier[proc] . identifier[send_signal] ( identifier[SIGINT] )
identifier[proc] . identifier[wait] ( identifier[timeout] = literal[int] )
keyword[except] identifier[subprocess] . identifier[TimeoutExpired] :
identifier[proc] . identifier[kill] ()
identifier[is_running] = keyword[False]
identifier[LOG] . identifier[debug] ( literal[string] )
keyword[if] identifier[__unionfs_is_active] ( identifier[root] = identifier[un_dir] ):
keyword[raise] identifier[UnmountError] ()
keyword[return] identifier[ret]
keyword[return] identifier[wrap_in_union_fs_func]
keyword[return] identifier[wrap_in_union_fs] | def unionfs(rw='rw', ro=None, union='union'):
"""
Decorator for the UnionFS feature.
This configures a unionfs for projects. The given base_dir and/or image_dir
are layered as follows:
image_dir=RW:base_dir=RO
All writes go to the image_dir, while base_dir delivers the (read-only)
versions of the rest of the filesystem.
The unified version will be provided in the project's builddir. Unmouting
is done as soon as the function completes.
Args:
rw: writeable storage area for the unified fuse filesystem.
ro: read-only storage area for the unified fuse filesystem.
union: mountpoint of the unified fuse filesystem.
"""
from functools import wraps
def wrap_in_union_fs(func):
"""
Function that wraps a given function inside the file system.
Args:
func: The function that needs to be wrapped inside the unions fs.
Return:
The file system with the function wrapped inside.
"""
@wraps(func)
def wrap_in_union_fs_func(project, *args, **kwargs):
"""
Wrap the func in the UnionFS mount stack.
We make sure that the mount points all exist and stack up the
directories for the unionfs. All directories outside of the default
build environment are tracked for deletion.
"""
container = project.container
if container is None or in_container():
return func(project, *args, **kwargs) # depends on [control=['if'], data=[]]
build_dir = local.path(project.builddir)
LOG.debug('UnionFS - Project builddir: %s', project.builddir)
if __unionfs_is_active(root=build_dir):
LOG.debug('UnionFS already active in %s, nesting not supported.', build_dir)
return func(project, *args, **kwargs) # depends on [control=['if'], data=[]]
ro_dir = local.path(container.local)
rw_dir = build_dir / rw
un_dir = build_dir / union
LOG.debug('UnionFS - RW: %s', rw_dir)
unionfs_cmd = __unionfs_set_up(ro_dir, rw_dir, un_dir)
project_builddir_bak = project.builddir
project.builddir = un_dir
proc = unionfs_cmd.popen()
while not __unionfs_is_active(root=un_dir) and proc.poll() is None:
pass # depends on [control=['while'], data=[]]
ret = None
if proc.poll() is None:
try:
with local.cwd(un_dir):
ret = func(project, *args, **kwargs) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
finally:
project.builddir = project_builddir_bak
from signal import SIGINT
is_running = proc.poll() is None
while __unionfs_is_active(root=un_dir) and is_running:
try:
proc.send_signal(SIGINT)
proc.wait(timeout=3) # depends on [control=['try'], data=[]]
except subprocess.TimeoutExpired:
proc.kill()
is_running = False # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
LOG.debug('Unionfs shut down.') # depends on [control=['if'], data=[]]
if __unionfs_is_active(root=un_dir):
raise UnmountError() # depends on [control=['if'], data=[]]
return ret
return wrap_in_union_fs_func
return wrap_in_union_fs |
def log(verbose=False):
"""
print a log test
:param verbose: show more logs
"""
terminal.log.config(verbose=verbose)
terminal.log.info('this is a info message')
terminal.log.verbose.info('this is a verbose message') | def function[log, parameter[verbose]]:
constant[
print a log test
:param verbose: show more logs
]
call[name[terminal].log.config, parameter[]]
call[name[terminal].log.info, parameter[constant[this is a info message]]]
call[name[terminal].log.verbose.info, parameter[constant[this is a verbose message]]] | keyword[def] identifier[log] ( identifier[verbose] = keyword[False] ):
literal[string]
identifier[terminal] . identifier[log] . identifier[config] ( identifier[verbose] = identifier[verbose] )
identifier[terminal] . identifier[log] . identifier[info] ( literal[string] )
identifier[terminal] . identifier[log] . identifier[verbose] . identifier[info] ( literal[string] ) | def log(verbose=False):
"""
print a log test
:param verbose: show more logs
"""
terminal.log.config(verbose=verbose)
terminal.log.info('this is a info message')
terminal.log.verbose.info('this is a verbose message') |
def get_args():
"""get_args
This function extracts the script arguments within the arguments variable and
interprets their meaning before returning such content.
"""
expected = ['working_directory', 'operating_system', 'version']
possible = ['json'].extend(expected)
try:
opts, args = getopt.getopt(sys.argv[1:], '', map(lambda x: ('%s=' % x),
possible))
except getopt.GetoptError as err:
print(str(err))
exit_cleanly(error_number=errno.EINVAL)
arguments = dict()
for o, a in opts:
option = re.sub('^-*', '', o)
if 'json' in option:
arguments = _load_json(a)
break
if option in expected:
arguments[option] = a
error = 0
for item in expected:
if item not in arguments:
print("Missing: %s from arguments" % item)
error = errno.EINVAL
if error:
exit_cleanly(error_number=error)
return arguments | def function[get_args, parameter[]]:
constant[get_args
This function extracts the script arguments within the arguments variable and
interprets their meaning before returning such content.
]
variable[expected] assign[=] list[[<ast.Constant object at 0x7da1b17fbcd0>, <ast.Constant object at 0x7da1b17f9900>, <ast.Constant object at 0x7da1b17f9030>]]
variable[possible] assign[=] call[list[[<ast.Constant object at 0x7da1b17f9ff0>]].extend, parameter[name[expected]]]
<ast.Try object at 0x7da1b17f9690>
variable[arguments] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b17f99f0>, <ast.Name object at 0x7da1b17faef0>]]] in starred[name[opts]] begin[:]
variable[option] assign[=] call[name[re].sub, parameter[constant[^-*], constant[], name[o]]]
if compare[constant[json] in name[option]] begin[:]
variable[arguments] assign[=] call[name[_load_json], parameter[name[a]]]
break
if compare[name[option] in name[expected]] begin[:]
call[name[arguments]][name[option]] assign[=] name[a]
variable[error] assign[=] constant[0]
for taget[name[item]] in starred[name[expected]] begin[:]
if compare[name[item] <ast.NotIn object at 0x7da2590d7190> name[arguments]] begin[:]
call[name[print], parameter[binary_operation[constant[Missing: %s from arguments] <ast.Mod object at 0x7da2590d6920> name[item]]]]
variable[error] assign[=] name[errno].EINVAL
if name[error] begin[:]
call[name[exit_cleanly], parameter[]]
return[name[arguments]] | keyword[def] identifier[get_args] ():
literal[string]
identifier[expected] =[ literal[string] , literal[string] , literal[string] ]
identifier[possible] =[ literal[string] ]. identifier[extend] ( identifier[expected] )
keyword[try] :
identifier[opts] , identifier[args] = identifier[getopt] . identifier[getopt] ( identifier[sys] . identifier[argv] [ literal[int] :], literal[string] , identifier[map] ( keyword[lambda] identifier[x] :( literal[string] % identifier[x] ),
identifier[possible] ))
keyword[except] identifier[getopt] . identifier[GetoptError] keyword[as] identifier[err] :
identifier[print] ( identifier[str] ( identifier[err] ))
identifier[exit_cleanly] ( identifier[error_number] = identifier[errno] . identifier[EINVAL] )
identifier[arguments] = identifier[dict] ()
keyword[for] identifier[o] , identifier[a] keyword[in] identifier[opts] :
identifier[option] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[o] )
keyword[if] literal[string] keyword[in] identifier[option] :
identifier[arguments] = identifier[_load_json] ( identifier[a] )
keyword[break]
keyword[if] identifier[option] keyword[in] identifier[expected] :
identifier[arguments] [ identifier[option] ]= identifier[a]
identifier[error] = literal[int]
keyword[for] identifier[item] keyword[in] identifier[expected] :
keyword[if] identifier[item] keyword[not] keyword[in] identifier[arguments] :
identifier[print] ( literal[string] % identifier[item] )
identifier[error] = identifier[errno] . identifier[EINVAL]
keyword[if] identifier[error] :
identifier[exit_cleanly] ( identifier[error_number] = identifier[error] )
keyword[return] identifier[arguments] | def get_args():
"""get_args
This function extracts the script arguments within the arguments variable and
interprets their meaning before returning such content.
"""
expected = ['working_directory', 'operating_system', 'version']
possible = ['json'].extend(expected)
try:
(opts, args) = getopt.getopt(sys.argv[1:], '', map(lambda x: '%s=' % x, possible)) # depends on [control=['try'], data=[]]
except getopt.GetoptError as err:
print(str(err))
exit_cleanly(error_number=errno.EINVAL) # depends on [control=['except'], data=['err']]
arguments = dict()
for (o, a) in opts:
option = re.sub('^-*', '', o)
if 'json' in option:
arguments = _load_json(a)
break # depends on [control=['if'], data=[]]
if option in expected:
arguments[option] = a # depends on [control=['if'], data=['option']] # depends on [control=['for'], data=[]]
error = 0
for item in expected:
if item not in arguments:
print('Missing: %s from arguments' % item)
error = errno.EINVAL # depends on [control=['if'], data=['item']] # depends on [control=['for'], data=['item']]
if error:
exit_cleanly(error_number=error) # depends on [control=['if'], data=[]]
return arguments |
def _dict_increment(self, dictionary, key):
"""Increments the value of the dictionary at the specified key."""
if key in dictionary:
dictionary[key] += 1
else:
dictionary[key] = 1 | def function[_dict_increment, parameter[self, dictionary, key]]:
constant[Increments the value of the dictionary at the specified key.]
if compare[name[key] in name[dictionary]] begin[:]
<ast.AugAssign object at 0x7da204347760> | keyword[def] identifier[_dict_increment] ( identifier[self] , identifier[dictionary] , identifier[key] ):
literal[string]
keyword[if] identifier[key] keyword[in] identifier[dictionary] :
identifier[dictionary] [ identifier[key] ]+= literal[int]
keyword[else] :
identifier[dictionary] [ identifier[key] ]= literal[int] | def _dict_increment(self, dictionary, key):
"""Increments the value of the dictionary at the specified key."""
if key in dictionary:
dictionary[key] += 1 # depends on [control=['if'], data=['key', 'dictionary']]
else:
dictionary[key] = 1 |
def format_image(self, image, image_format, **kwargs):
"""Returns an image in the request format"""
if image_format in ('png', 'jpg', 'jpeg', 'gif', 'bmp', 'webp'):
if image_format != 'webp' and FORCE_WEBP:
# Always return WebP when supported by the browser
accept = self.request.META['HTTP_ACCEPT'].split(',')
if 'image/webp' in accept:
image = image.convert('RGBA')
image_format = 'webp'
kwargs = {'lossless': True}
if image_format == 'png':
kwargs['optimize'] = True
elif image_format == 'jpg':
image.convert('RGB')
kwargs['progressive'] = True
buffer = six.BytesIO()
image.save(buffer, image_format, **kwargs)
return buffer.getvalue(), "image/{}".format(image_format)
else:
raise ValueError('Unsupported format: {}'.format(image_format)) | def function[format_image, parameter[self, image, image_format]]:
constant[Returns an image in the request format]
if compare[name[image_format] in tuple[[<ast.Constant object at 0x7da18fe91a80>, <ast.Constant object at 0x7da18fe912d0>, <ast.Constant object at 0x7da18fe91c60>, <ast.Constant object at 0x7da18fe904f0>, <ast.Constant object at 0x7da18fe928f0>, <ast.Constant object at 0x7da18fe91e70>]]] begin[:]
if <ast.BoolOp object at 0x7da18fe934f0> begin[:]
variable[accept] assign[=] call[call[name[self].request.META][constant[HTTP_ACCEPT]].split, parameter[constant[,]]]
if compare[constant[image/webp] in name[accept]] begin[:]
variable[image] assign[=] call[name[image].convert, parameter[constant[RGBA]]]
variable[image_format] assign[=] constant[webp]
variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da18fe90f70>], [<ast.Constant object at 0x7da18fe937f0>]]
if compare[name[image_format] equal[==] constant[png]] begin[:]
call[name[kwargs]][constant[optimize]] assign[=] constant[True]
variable[buffer] assign[=] call[name[six].BytesIO, parameter[]]
call[name[image].save, parameter[name[buffer], name[image_format]]]
return[tuple[[<ast.Call object at 0x7da18fe93b20>, <ast.Call object at 0x7da18fe93970>]]] | keyword[def] identifier[format_image] ( identifier[self] , identifier[image] , identifier[image_format] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[image_format] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ):
keyword[if] identifier[image_format] != literal[string] keyword[and] identifier[FORCE_WEBP] :
identifier[accept] = identifier[self] . identifier[request] . identifier[META] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[accept] :
identifier[image] = identifier[image] . identifier[convert] ( literal[string] )
identifier[image_format] = literal[string]
identifier[kwargs] ={ literal[string] : keyword[True] }
keyword[if] identifier[image_format] == literal[string] :
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[elif] identifier[image_format] == literal[string] :
identifier[image] . identifier[convert] ( literal[string] )
identifier[kwargs] [ literal[string] ]= keyword[True]
identifier[buffer] = identifier[six] . identifier[BytesIO] ()
identifier[image] . identifier[save] ( identifier[buffer] , identifier[image_format] ,** identifier[kwargs] )
keyword[return] identifier[buffer] . identifier[getvalue] (), literal[string] . identifier[format] ( identifier[image_format] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[image_format] )) | def format_image(self, image, image_format, **kwargs):
"""Returns an image in the request format"""
if image_format in ('png', 'jpg', 'jpeg', 'gif', 'bmp', 'webp'):
if image_format != 'webp' and FORCE_WEBP:
# Always return WebP when supported by the browser
accept = self.request.META['HTTP_ACCEPT'].split(',')
if 'image/webp' in accept:
image = image.convert('RGBA')
image_format = 'webp'
kwargs = {'lossless': True} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if image_format == 'png':
kwargs['optimize'] = True # depends on [control=['if'], data=[]]
elif image_format == 'jpg':
image.convert('RGB')
kwargs['progressive'] = True # depends on [control=['if'], data=[]]
buffer = six.BytesIO()
image.save(buffer, image_format, **kwargs)
return (buffer.getvalue(), 'image/{}'.format(image_format)) # depends on [control=['if'], data=['image_format']]
else:
raise ValueError('Unsupported format: {}'.format(image_format)) |
def response_music(self, music_url, title=None, description=None, hq_music_url=None, thumb_media_id=None):
"""
将音乐信息组装为符合微信服务器要求的响应数据
:param music_url: 音乐链接
:param title: 音乐标题
:param description: 音乐描述
:param hq_music_url: 高质量音乐链接, WIFI环境优先使用该链接播放音乐
:param thumb_media_id: 缩略图的 MediaID
:return: 符合微信服务器要求的 XML 响应数据
"""
self._check_parse()
music_url = self._transcoding(music_url)
title = self._transcoding(title)
description = self._transcoding(description)
hq_music_url = self._transcoding(hq_music_url)
response = MusicReply(message=self.__message, title=title, description=description, music_url=music_url,
hq_music_url=hq_music_url, thumb_media_id=thumb_media_id).render()
return self._encrypt_response(response) | def function[response_music, parameter[self, music_url, title, description, hq_music_url, thumb_media_id]]:
constant[
将音乐信息组装为符合微信服务器要求的响应数据
:param music_url: 音乐链接
:param title: 音乐标题
:param description: 音乐描述
:param hq_music_url: 高质量音乐链接, WIFI环境优先使用该链接播放音乐
:param thumb_media_id: 缩略图的 MediaID
:return: 符合微信服务器要求的 XML 响应数据
]
call[name[self]._check_parse, parameter[]]
variable[music_url] assign[=] call[name[self]._transcoding, parameter[name[music_url]]]
variable[title] assign[=] call[name[self]._transcoding, parameter[name[title]]]
variable[description] assign[=] call[name[self]._transcoding, parameter[name[description]]]
variable[hq_music_url] assign[=] call[name[self]._transcoding, parameter[name[hq_music_url]]]
variable[response] assign[=] call[call[name[MusicReply], parameter[]].render, parameter[]]
return[call[name[self]._encrypt_response, parameter[name[response]]]] | keyword[def] identifier[response_music] ( identifier[self] , identifier[music_url] , identifier[title] = keyword[None] , identifier[description] = keyword[None] , identifier[hq_music_url] = keyword[None] , identifier[thumb_media_id] = keyword[None] ):
literal[string]
identifier[self] . identifier[_check_parse] ()
identifier[music_url] = identifier[self] . identifier[_transcoding] ( identifier[music_url] )
identifier[title] = identifier[self] . identifier[_transcoding] ( identifier[title] )
identifier[description] = identifier[self] . identifier[_transcoding] ( identifier[description] )
identifier[hq_music_url] = identifier[self] . identifier[_transcoding] ( identifier[hq_music_url] )
identifier[response] = identifier[MusicReply] ( identifier[message] = identifier[self] . identifier[__message] , identifier[title] = identifier[title] , identifier[description] = identifier[description] , identifier[music_url] = identifier[music_url] ,
identifier[hq_music_url] = identifier[hq_music_url] , identifier[thumb_media_id] = identifier[thumb_media_id] ). identifier[render] ()
keyword[return] identifier[self] . identifier[_encrypt_response] ( identifier[response] ) | def response_music(self, music_url, title=None, description=None, hq_music_url=None, thumb_media_id=None):
"""
将音乐信息组装为符合微信服务器要求的响应数据
:param music_url: 音乐链接
:param title: 音乐标题
:param description: 音乐描述
:param hq_music_url: 高质量音乐链接, WIFI环境优先使用该链接播放音乐
:param thumb_media_id: 缩略图的 MediaID
:return: 符合微信服务器要求的 XML 响应数据
"""
self._check_parse()
music_url = self._transcoding(music_url)
title = self._transcoding(title)
description = self._transcoding(description)
hq_music_url = self._transcoding(hq_music_url)
response = MusicReply(message=self.__message, title=title, description=description, music_url=music_url, hq_music_url=hq_music_url, thumb_media_id=thumb_media_id).render()
return self._encrypt_response(response) |
def update(self, new_games):
""" new_games is a list of .tfrecord.zz new game records. """
new_games.sort(key=os.path.basename)
first_new_game = None
for idx, game in enumerate(new_games):
timestamp = file_timestamp(game)
if timestamp <= self.examples[-1][0]:
continue
elif first_new_game is None:
first_new_game = idx
num_new_games = len(new_games) - idx
print("Found {}/{} new games".format(
num_new_games, len(new_games)))
self.total_updates += num_new_games
self.examples.extend(self.func(game))
if first_new_game is None:
print("No new games", file_timestamp(
new_games[-1]), self.examples[-1][0]) | def function[update, parameter[self, new_games]]:
constant[ new_games is a list of .tfrecord.zz new game records. ]
call[name[new_games].sort, parameter[]]
variable[first_new_game] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da18f00fca0>, <ast.Name object at 0x7da18f00ea70>]]] in starred[call[name[enumerate], parameter[name[new_games]]]] begin[:]
variable[timestamp] assign[=] call[name[file_timestamp], parameter[name[game]]]
if compare[name[timestamp] less_or_equal[<=] call[call[name[self].examples][<ast.UnaryOp object at 0x7da18f00f430>]][constant[0]]] begin[:]
continue
call[name[self].examples.extend, parameter[call[name[self].func, parameter[name[game]]]]]
if compare[name[first_new_game] is constant[None]] begin[:]
call[name[print], parameter[constant[No new games], call[name[file_timestamp], parameter[call[name[new_games]][<ast.UnaryOp object at 0x7da1b21d6e30>]]], call[call[name[self].examples][<ast.UnaryOp object at 0x7da1b21d5330>]][constant[0]]]] | keyword[def] identifier[update] ( identifier[self] , identifier[new_games] ):
literal[string]
identifier[new_games] . identifier[sort] ( identifier[key] = identifier[os] . identifier[path] . identifier[basename] )
identifier[first_new_game] = keyword[None]
keyword[for] identifier[idx] , identifier[game] keyword[in] identifier[enumerate] ( identifier[new_games] ):
identifier[timestamp] = identifier[file_timestamp] ( identifier[game] )
keyword[if] identifier[timestamp] <= identifier[self] . identifier[examples] [- literal[int] ][ literal[int] ]:
keyword[continue]
keyword[elif] identifier[first_new_game] keyword[is] keyword[None] :
identifier[first_new_game] = identifier[idx]
identifier[num_new_games] = identifier[len] ( identifier[new_games] )- identifier[idx]
identifier[print] ( literal[string] . identifier[format] (
identifier[num_new_games] , identifier[len] ( identifier[new_games] )))
identifier[self] . identifier[total_updates] += identifier[num_new_games]
identifier[self] . identifier[examples] . identifier[extend] ( identifier[self] . identifier[func] ( identifier[game] ))
keyword[if] identifier[first_new_game] keyword[is] keyword[None] :
identifier[print] ( literal[string] , identifier[file_timestamp] (
identifier[new_games] [- literal[int] ]), identifier[self] . identifier[examples] [- literal[int] ][ literal[int] ]) | def update(self, new_games):
""" new_games is a list of .tfrecord.zz new game records. """
new_games.sort(key=os.path.basename)
first_new_game = None
for (idx, game) in enumerate(new_games):
timestamp = file_timestamp(game)
if timestamp <= self.examples[-1][0]:
continue # depends on [control=['if'], data=[]]
elif first_new_game is None:
first_new_game = idx
num_new_games = len(new_games) - idx
print('Found {}/{} new games'.format(num_new_games, len(new_games)))
self.total_updates += num_new_games # depends on [control=['if'], data=['first_new_game']]
self.examples.extend(self.func(game)) # depends on [control=['for'], data=[]]
if first_new_game is None:
print('No new games', file_timestamp(new_games[-1]), self.examples[-1][0]) # depends on [control=['if'], data=[]] |
def pif_search(self, pif_system_returning_query):
"""
Run a PIF query against Citrination.
:param pif_system_returning_query: The PIF system query to execute.
:type pif_system_returning_query: :class:`PifSystemReturningQuery`
:return: :class:`PifSearchResult` object with the results of the query.
:rtype: :class:`PifSearchResult`
"""
self._validate_search_query(pif_system_returning_query)
return self._execute_search_query(
pif_system_returning_query,
PifSearchResult
) | def function[pif_search, parameter[self, pif_system_returning_query]]:
constant[
Run a PIF query against Citrination.
:param pif_system_returning_query: The PIF system query to execute.
:type pif_system_returning_query: :class:`PifSystemReturningQuery`
:return: :class:`PifSearchResult` object with the results of the query.
:rtype: :class:`PifSearchResult`
]
call[name[self]._validate_search_query, parameter[name[pif_system_returning_query]]]
return[call[name[self]._execute_search_query, parameter[name[pif_system_returning_query], name[PifSearchResult]]]] | keyword[def] identifier[pif_search] ( identifier[self] , identifier[pif_system_returning_query] ):
literal[string]
identifier[self] . identifier[_validate_search_query] ( identifier[pif_system_returning_query] )
keyword[return] identifier[self] . identifier[_execute_search_query] (
identifier[pif_system_returning_query] ,
identifier[PifSearchResult]
) | def pif_search(self, pif_system_returning_query):
"""
Run a PIF query against Citrination.
:param pif_system_returning_query: The PIF system query to execute.
:type pif_system_returning_query: :class:`PifSystemReturningQuery`
:return: :class:`PifSearchResult` object with the results of the query.
:rtype: :class:`PifSearchResult`
"""
self._validate_search_query(pif_system_returning_query)
return self._execute_search_query(pif_system_returning_query, PifSearchResult) |
def from_regression(cls, clock_model):
"""
Create the conversion object automatically from the tree
Parameters
----------
clock_model : dict
dictionary as returned from TreeRegression with fields intercept and slope
"""
dc = cls()
dc.clock_rate = clock_model['slope']
dc.intercept = clock_model['intercept']
dc.chisq = clock_model['chisq'] if 'chisq' in clock_model else None
dc.valid_confidence = clock_model['valid_confidence'] if 'valid_confidence' in clock_model else False
if 'cov' in clock_model and dc.valid_confidence:
dc.cov = clock_model['cov']
dc.r_val = clock_model['r_val']
return dc | def function[from_regression, parameter[cls, clock_model]]:
constant[
Create the conversion object automatically from the tree
Parameters
----------
clock_model : dict
dictionary as returned from TreeRegression with fields intercept and slope
]
variable[dc] assign[=] call[name[cls], parameter[]]
name[dc].clock_rate assign[=] call[name[clock_model]][constant[slope]]
name[dc].intercept assign[=] call[name[clock_model]][constant[intercept]]
name[dc].chisq assign[=] <ast.IfExp object at 0x7da20e954820>
name[dc].valid_confidence assign[=] <ast.IfExp object at 0x7da20e957fd0>
if <ast.BoolOp object at 0x7da20e954bb0> begin[:]
name[dc].cov assign[=] call[name[clock_model]][constant[cov]]
name[dc].r_val assign[=] call[name[clock_model]][constant[r_val]]
return[name[dc]] | keyword[def] identifier[from_regression] ( identifier[cls] , identifier[clock_model] ):
literal[string]
identifier[dc] = identifier[cls] ()
identifier[dc] . identifier[clock_rate] = identifier[clock_model] [ literal[string] ]
identifier[dc] . identifier[intercept] = identifier[clock_model] [ literal[string] ]
identifier[dc] . identifier[chisq] = identifier[clock_model] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[clock_model] keyword[else] keyword[None]
identifier[dc] . identifier[valid_confidence] = identifier[clock_model] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[clock_model] keyword[else] keyword[False]
keyword[if] literal[string] keyword[in] identifier[clock_model] keyword[and] identifier[dc] . identifier[valid_confidence] :
identifier[dc] . identifier[cov] = identifier[clock_model] [ literal[string] ]
identifier[dc] . identifier[r_val] = identifier[clock_model] [ literal[string] ]
keyword[return] identifier[dc] | def from_regression(cls, clock_model):
"""
Create the conversion object automatically from the tree
Parameters
----------
clock_model : dict
dictionary as returned from TreeRegression with fields intercept and slope
"""
dc = cls()
dc.clock_rate = clock_model['slope']
dc.intercept = clock_model['intercept']
dc.chisq = clock_model['chisq'] if 'chisq' in clock_model else None
dc.valid_confidence = clock_model['valid_confidence'] if 'valid_confidence' in clock_model else False
if 'cov' in clock_model and dc.valid_confidence:
dc.cov = clock_model['cov'] # depends on [control=['if'], data=[]]
dc.r_val = clock_model['r_val']
return dc |
def _get_event_stream(self):
"""Spawn a thread and monitor the Arlo Event Stream."""
self.__event_handle = threading.Event()
event_thread = threading.Thread(target=self.thread_function)
event_thread.start() | def function[_get_event_stream, parameter[self]]:
constant[Spawn a thread and monitor the Arlo Event Stream.]
name[self].__event_handle assign[=] call[name[threading].Event, parameter[]]
variable[event_thread] assign[=] call[name[threading].Thread, parameter[]]
call[name[event_thread].start, parameter[]] | keyword[def] identifier[_get_event_stream] ( identifier[self] ):
literal[string]
identifier[self] . identifier[__event_handle] = identifier[threading] . identifier[Event] ()
identifier[event_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[thread_function] )
identifier[event_thread] . identifier[start] () | def _get_event_stream(self):
"""Spawn a thread and monitor the Arlo Event Stream."""
self.__event_handle = threading.Event()
event_thread = threading.Thread(target=self.thread_function)
event_thread.start() |
def _get_internal_field_by_name(self, name):
"""Gets the field by name, or None if not found."""
field = self._all_fields.get(name, self._all_fields.get('%s.%s' % (self._full_name, name)))
if field is not None:
return field
for field_name in self._all_fields:
# Specific name
if field_name.endswith('.%s' % name):
return self._all_fields[field_name] | def function[_get_internal_field_by_name, parameter[self, name]]:
constant[Gets the field by name, or None if not found.]
variable[field] assign[=] call[name[self]._all_fields.get, parameter[name[name], call[name[self]._all_fields.get, parameter[binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b1b0d600>, <ast.Name object at 0x7da1b1b0f850>]]]]]]]
if compare[name[field] is_not constant[None]] begin[:]
return[name[field]]
for taget[name[field_name]] in starred[name[self]._all_fields] begin[:]
if call[name[field_name].endswith, parameter[binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> name[name]]]] begin[:]
return[call[name[self]._all_fields][name[field_name]]] | keyword[def] identifier[_get_internal_field_by_name] ( identifier[self] , identifier[name] ):
literal[string]
identifier[field] = identifier[self] . identifier[_all_fields] . identifier[get] ( identifier[name] , identifier[self] . identifier[_all_fields] . identifier[get] ( literal[string] %( identifier[self] . identifier[_full_name] , identifier[name] )))
keyword[if] identifier[field] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[field]
keyword[for] identifier[field_name] keyword[in] identifier[self] . identifier[_all_fields] :
keyword[if] identifier[field_name] . identifier[endswith] ( literal[string] % identifier[name] ):
keyword[return] identifier[self] . identifier[_all_fields] [ identifier[field_name] ] | def _get_internal_field_by_name(self, name):
"""Gets the field by name, or None if not found."""
field = self._all_fields.get(name, self._all_fields.get('%s.%s' % (self._full_name, name)))
if field is not None:
return field # depends on [control=['if'], data=['field']]
for field_name in self._all_fields:
# Specific name
if field_name.endswith('.%s' % name):
return self._all_fields[field_name] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field_name']] |
def _calculate_edges(self):
"""Calculate edges of the current label. Not intended for external use.
"""
# Calculate the left edge of the label.
left = self.specs.left_margin
left += (self.specs.label_width * (self._position[1] - 1))
if self.specs.column_gap:
left += (self.specs.column_gap * (self._position[1] - 1))
left *= mm
# And the bottom.
bottom = self.specs.sheet_height - self.specs.top_margin
bottom -= (self.specs.label_height * self._position[0])
if self.specs.row_gap:
bottom -= (self.specs.row_gap * (self._position[0] - 1))
bottom *= mm
# Done.
return float(left), float(bottom) | def function[_calculate_edges, parameter[self]]:
constant[Calculate edges of the current label. Not intended for external use.
]
variable[left] assign[=] name[self].specs.left_margin
<ast.AugAssign object at 0x7da20c6e6c80>
if name[self].specs.column_gap begin[:]
<ast.AugAssign object at 0x7da20c6e62f0>
<ast.AugAssign object at 0x7da20c6e6200>
variable[bottom] assign[=] binary_operation[name[self].specs.sheet_height - name[self].specs.top_margin]
<ast.AugAssign object at 0x7da20c6e60b0>
if name[self].specs.row_gap begin[:]
<ast.AugAssign object at 0x7da20c6e4580>
<ast.AugAssign object at 0x7da20c6e4af0>
return[tuple[[<ast.Call object at 0x7da20c6e40a0>, <ast.Call object at 0x7da20c6e4730>]]] | keyword[def] identifier[_calculate_edges] ( identifier[self] ):
literal[string]
identifier[left] = identifier[self] . identifier[specs] . identifier[left_margin]
identifier[left] +=( identifier[self] . identifier[specs] . identifier[label_width] *( identifier[self] . identifier[_position] [ literal[int] ]- literal[int] ))
keyword[if] identifier[self] . identifier[specs] . identifier[column_gap] :
identifier[left] +=( identifier[self] . identifier[specs] . identifier[column_gap] *( identifier[self] . identifier[_position] [ literal[int] ]- literal[int] ))
identifier[left] *= identifier[mm]
identifier[bottom] = identifier[self] . identifier[specs] . identifier[sheet_height] - identifier[self] . identifier[specs] . identifier[top_margin]
identifier[bottom] -=( identifier[self] . identifier[specs] . identifier[label_height] * identifier[self] . identifier[_position] [ literal[int] ])
keyword[if] identifier[self] . identifier[specs] . identifier[row_gap] :
identifier[bottom] -=( identifier[self] . identifier[specs] . identifier[row_gap] *( identifier[self] . identifier[_position] [ literal[int] ]- literal[int] ))
identifier[bottom] *= identifier[mm]
keyword[return] identifier[float] ( identifier[left] ), identifier[float] ( identifier[bottom] ) | def _calculate_edges(self):
"""Calculate edges of the current label. Not intended for external use.
"""
# Calculate the left edge of the label.
left = self.specs.left_margin
left += self.specs.label_width * (self._position[1] - 1)
if self.specs.column_gap:
left += self.specs.column_gap * (self._position[1] - 1) # depends on [control=['if'], data=[]]
left *= mm
# And the bottom.
bottom = self.specs.sheet_height - self.specs.top_margin
bottom -= self.specs.label_height * self._position[0]
if self.specs.row_gap:
bottom -= self.specs.row_gap * (self._position[0] - 1) # depends on [control=['if'], data=[]]
bottom *= mm
# Done.
return (float(left), float(bottom)) |
def fixcode(**kwargs):
"""
auto pep8 format all python file in ``source code`` and ``tests`` dir.
"""
# repository direcotry
repo_dir = Path(__file__).parent.absolute()
# source code directory
source_dir = Path(repo_dir, package.__name__)
if source_dir.exists():
print("Source code locate at: '%s'." % source_dir)
print("Auto pep8 all python file ...")
source_dir.autopep8(**kwargs)
else:
print("Source code directory not found!")
# unittest code directory
unittest_dir = Path(repo_dir, "tests")
if unittest_dir.exists():
print("Unittest code locate at: '%s'." % unittest_dir)
print("Auto pep8 all python file ...")
unittest_dir.autopep8(**kwargs)
else:
print("Unittest code directory not found!")
print("Complete!") | def function[fixcode, parameter[]]:
constant[
auto pep8 format all python file in ``source code`` and ``tests`` dir.
]
variable[repo_dir] assign[=] call[call[name[Path], parameter[name[__file__]]].parent.absolute, parameter[]]
variable[source_dir] assign[=] call[name[Path], parameter[name[repo_dir], name[package].__name__]]
if call[name[source_dir].exists, parameter[]] begin[:]
call[name[print], parameter[binary_operation[constant[Source code locate at: '%s'.] <ast.Mod object at 0x7da2590d6920> name[source_dir]]]]
call[name[print], parameter[constant[Auto pep8 all python file ...]]]
call[name[source_dir].autopep8, parameter[]]
variable[unittest_dir] assign[=] call[name[Path], parameter[name[repo_dir], constant[tests]]]
if call[name[unittest_dir].exists, parameter[]] begin[:]
call[name[print], parameter[binary_operation[constant[Unittest code locate at: '%s'.] <ast.Mod object at 0x7da2590d6920> name[unittest_dir]]]]
call[name[print], parameter[constant[Auto pep8 all python file ...]]]
call[name[unittest_dir].autopep8, parameter[]]
call[name[print], parameter[constant[Complete!]]] | keyword[def] identifier[fixcode] (** identifier[kwargs] ):
literal[string]
identifier[repo_dir] = identifier[Path] ( identifier[__file__] ). identifier[parent] . identifier[absolute] ()
identifier[source_dir] = identifier[Path] ( identifier[repo_dir] , identifier[package] . identifier[__name__] )
keyword[if] identifier[source_dir] . identifier[exists] ():
identifier[print] ( literal[string] % identifier[source_dir] )
identifier[print] ( literal[string] )
identifier[source_dir] . identifier[autopep8] (** identifier[kwargs] )
keyword[else] :
identifier[print] ( literal[string] )
identifier[unittest_dir] = identifier[Path] ( identifier[repo_dir] , literal[string] )
keyword[if] identifier[unittest_dir] . identifier[exists] ():
identifier[print] ( literal[string] % identifier[unittest_dir] )
identifier[print] ( literal[string] )
identifier[unittest_dir] . identifier[autopep8] (** identifier[kwargs] )
keyword[else] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] ) | def fixcode(**kwargs):
"""
auto pep8 format all python file in ``source code`` and ``tests`` dir.
"""
# repository direcotry
repo_dir = Path(__file__).parent.absolute()
# source code directory
source_dir = Path(repo_dir, package.__name__)
if source_dir.exists():
print("Source code locate at: '%s'." % source_dir)
print('Auto pep8 all python file ...')
source_dir.autopep8(**kwargs) # depends on [control=['if'], data=[]]
else:
print('Source code directory not found!')
# unittest code directory
unittest_dir = Path(repo_dir, 'tests')
if unittest_dir.exists():
print("Unittest code locate at: '%s'." % unittest_dir)
print('Auto pep8 all python file ...')
unittest_dir.autopep8(**kwargs) # depends on [control=['if'], data=[]]
else:
print('Unittest code directory not found!')
print('Complete!') |
def _populate_trie_reducer(self, trie_accumulator=CharTrie(), value="") -> CharTrie:
"""Adds value to trie accumulator"""
if self._case_sensitive:
key = self._joiner.join([x.orth_ if isinstance(x, Token) else x for x in
self._default_tokenizer.tokenize(value, disable=disable_spacy)])
else:
key = self._joiner.join([x.lower_ if isinstance(x, Token) else x.lower() for x in
self._default_tokenizer.tokenize(value, disable=disable_spacy)])
trie_accumulator[key] = value
return trie_accumulator | def function[_populate_trie_reducer, parameter[self, trie_accumulator, value]]:
constant[Adds value to trie accumulator]
if name[self]._case_sensitive begin[:]
variable[key] assign[=] call[name[self]._joiner.join, parameter[<ast.ListComp object at 0x7da1b0b58eb0>]]
call[name[trie_accumulator]][name[key]] assign[=] name[value]
return[name[trie_accumulator]] | keyword[def] identifier[_populate_trie_reducer] ( identifier[self] , identifier[trie_accumulator] = identifier[CharTrie] (), identifier[value] = literal[string] )-> identifier[CharTrie] :
literal[string]
keyword[if] identifier[self] . identifier[_case_sensitive] :
identifier[key] = identifier[self] . identifier[_joiner] . identifier[join] ([ identifier[x] . identifier[orth_] keyword[if] identifier[isinstance] ( identifier[x] , identifier[Token] ) keyword[else] identifier[x] keyword[for] identifier[x] keyword[in]
identifier[self] . identifier[_default_tokenizer] . identifier[tokenize] ( identifier[value] , identifier[disable] = identifier[disable_spacy] )])
keyword[else] :
identifier[key] = identifier[self] . identifier[_joiner] . identifier[join] ([ identifier[x] . identifier[lower_] keyword[if] identifier[isinstance] ( identifier[x] , identifier[Token] ) keyword[else] identifier[x] . identifier[lower] () keyword[for] identifier[x] keyword[in]
identifier[self] . identifier[_default_tokenizer] . identifier[tokenize] ( identifier[value] , identifier[disable] = identifier[disable_spacy] )])
identifier[trie_accumulator] [ identifier[key] ]= identifier[value]
keyword[return] identifier[trie_accumulator] | def _populate_trie_reducer(self, trie_accumulator=CharTrie(), value='') -> CharTrie:
"""Adds value to trie accumulator"""
if self._case_sensitive:
key = self._joiner.join([x.orth_ if isinstance(x, Token) else x for x in self._default_tokenizer.tokenize(value, disable=disable_spacy)]) # depends on [control=['if'], data=[]]
else:
key = self._joiner.join([x.lower_ if isinstance(x, Token) else x.lower() for x in self._default_tokenizer.tokenize(value, disable=disable_spacy)])
trie_accumulator[key] = value
return trie_accumulator |
def delete_tag(self, tag):
"""
::
DELETE /:login/machines/:id/tags/:tag
Delete a tag and its corresponding value on the machine.
"""
j, r = self.datacenter.request('DELETE', self.path + '/tags/' + tag)
r.raise_for_status() | def function[delete_tag, parameter[self, tag]]:
constant[
::
DELETE /:login/machines/:id/tags/:tag
Delete a tag and its corresponding value on the machine.
]
<ast.Tuple object at 0x7da212db4cd0> assign[=] call[name[self].datacenter.request, parameter[constant[DELETE], binary_operation[binary_operation[name[self].path + constant[/tags/]] + name[tag]]]]
call[name[r].raise_for_status, parameter[]] | keyword[def] identifier[delete_tag] ( identifier[self] , identifier[tag] ):
literal[string]
identifier[j] , identifier[r] = identifier[self] . identifier[datacenter] . identifier[request] ( literal[string] , identifier[self] . identifier[path] + literal[string] + identifier[tag] )
identifier[r] . identifier[raise_for_status] () | def delete_tag(self, tag):
"""
::
DELETE /:login/machines/:id/tags/:tag
Delete a tag and its corresponding value on the machine.
"""
(j, r) = self.datacenter.request('DELETE', self.path + '/tags/' + tag)
r.raise_for_status() |
def cross(*sequences):
"""
From: http://book.opensourceproject.org.cn/lamp/python/pythoncook2/opensource/0596007973/pythoncook2-chp-19-sect-9.html
"""
# visualize an odometer, with "wheels" displaying "digits"...:
wheels = map(iter, sequences)
digits = [it.next( ) for it in wheels]
while True:
yield tuple(digits)
for i in range(len(digits)-1, -1, -1):
try:
digits[i] = wheels[i].next( )
break
except StopIteration:
wheels[i] = iter(sequences[i])
digits[i] = wheels[i].next( )
else:
break | def function[cross, parameter[]]:
constant[
From: http://book.opensourceproject.org.cn/lamp/python/pythoncook2/opensource/0596007973/pythoncook2-chp-19-sect-9.html
]
variable[wheels] assign[=] call[name[map], parameter[name[iter], name[sequences]]]
variable[digits] assign[=] <ast.ListComp object at 0x7da18c4ce0e0>
while constant[True] begin[:]
<ast.Yield object at 0x7da18c4cfee0>
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[digits]]] - constant[1]], <ast.UnaryOp object at 0x7da18c4ccb50>, <ast.UnaryOp object at 0x7da18c4ce890>]]] begin[:]
<ast.Try object at 0x7da18c4ce860> | keyword[def] identifier[cross] (* identifier[sequences] ):
literal[string]
identifier[wheels] = identifier[map] ( identifier[iter] , identifier[sequences] )
identifier[digits] =[ identifier[it] . identifier[next] () keyword[for] identifier[it] keyword[in] identifier[wheels] ]
keyword[while] keyword[True] :
keyword[yield] identifier[tuple] ( identifier[digits] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[digits] )- literal[int] ,- literal[int] ,- literal[int] ):
keyword[try] :
identifier[digits] [ identifier[i] ]= identifier[wheels] [ identifier[i] ]. identifier[next] ()
keyword[break]
keyword[except] identifier[StopIteration] :
identifier[wheels] [ identifier[i] ]= identifier[iter] ( identifier[sequences] [ identifier[i] ])
identifier[digits] [ identifier[i] ]= identifier[wheels] [ identifier[i] ]. identifier[next] ()
keyword[else] :
keyword[break] | def cross(*sequences):
"""
From: http://book.opensourceproject.org.cn/lamp/python/pythoncook2/opensource/0596007973/pythoncook2-chp-19-sect-9.html
"""
# visualize an odometer, with "wheels" displaying "digits"...:
wheels = map(iter, sequences)
digits = [it.next() for it in wheels]
while True:
yield tuple(digits)
for i in range(len(digits) - 1, -1, -1):
try:
digits[i] = wheels[i].next()
break # depends on [control=['try'], data=[]]
except StopIteration:
wheels[i] = iter(sequences[i])
digits[i] = wheels[i].next() # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']]
else:
break # depends on [control=['while'], data=[]] |
def analyze_dists(dists, cutoff=1000, alpha=.1):
"""
The dists can show bimodal distribution if they come from a mate-pair
library. Assume bimodal distribution and then separate the two peaks. Based
on the percentage in each peak, we can decide if it is indeed one peak or
two peaks, and report the median respectively.
"""
peak0 = [d for d in dists if d < cutoff]
peak1 = [d for d in dists if d >= cutoff]
c0, c1 = len(peak0), len(peak1)
logging.debug("Component counts: {0} {1}".format(c0, c1))
if c0 == 0 or c1 == 0 or float(c1) / len(dists) < alpha:
logging.debug("Single peak identified ({0} / {1} < {2})".\
format(c1, len(dists), alpha))
return np.median(dists)
peak0_median = np.median(peak0)
peak1_median = np.median(peak1)
logging.debug("Dual peaks identified: {0}bp ({1}), {2}bp ({3}) (selected)".\
format(int(peak0_median), c0, int(peak1_median), c1))
return peak1_median | def function[analyze_dists, parameter[dists, cutoff, alpha]]:
constant[
The dists can show bimodal distribution if they come from a mate-pair
library. Assume bimodal distribution and then separate the two peaks. Based
on the percentage in each peak, we can decide if it is indeed one peak or
two peaks, and report the median respectively.
]
variable[peak0] assign[=] <ast.ListComp object at 0x7da18c4cf0a0>
variable[peak1] assign[=] <ast.ListComp object at 0x7da18c4ccee0>
<ast.Tuple object at 0x7da18c4cd8d0> assign[=] tuple[[<ast.Call object at 0x7da18c4cd690>, <ast.Call object at 0x7da18fe937c0>]]
call[name[logging].debug, parameter[call[constant[Component counts: {0} {1}].format, parameter[name[c0], name[c1]]]]]
if <ast.BoolOp object at 0x7da18fe90e50> begin[:]
call[name[logging].debug, parameter[call[constant[Single peak identified ({0} / {1} < {2})].format, parameter[name[c1], call[name[len], parameter[name[dists]]], name[alpha]]]]]
return[call[name[np].median, parameter[name[dists]]]]
variable[peak0_median] assign[=] call[name[np].median, parameter[name[peak0]]]
variable[peak1_median] assign[=] call[name[np].median, parameter[name[peak1]]]
call[name[logging].debug, parameter[call[constant[Dual peaks identified: {0}bp ({1}), {2}bp ({3}) (selected)].format, parameter[call[name[int], parameter[name[peak0_median]]], name[c0], call[name[int], parameter[name[peak1_median]]], name[c1]]]]]
return[name[peak1_median]] | keyword[def] identifier[analyze_dists] ( identifier[dists] , identifier[cutoff] = literal[int] , identifier[alpha] = literal[int] ):
literal[string]
identifier[peak0] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[dists] keyword[if] identifier[d] < identifier[cutoff] ]
identifier[peak1] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[dists] keyword[if] identifier[d] >= identifier[cutoff] ]
identifier[c0] , identifier[c1] = identifier[len] ( identifier[peak0] ), identifier[len] ( identifier[peak1] )
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[c0] , identifier[c1] ))
keyword[if] identifier[c0] == literal[int] keyword[or] identifier[c1] == literal[int] keyword[or] identifier[float] ( identifier[c1] )/ identifier[len] ( identifier[dists] )< identifier[alpha] :
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[c1] , identifier[len] ( identifier[dists] ), identifier[alpha] ))
keyword[return] identifier[np] . identifier[median] ( identifier[dists] )
identifier[peak0_median] = identifier[np] . identifier[median] ( identifier[peak0] )
identifier[peak1_median] = identifier[np] . identifier[median] ( identifier[peak1] )
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[int] ( identifier[peak0_median] ), identifier[c0] , identifier[int] ( identifier[peak1_median] ), identifier[c1] ))
keyword[return] identifier[peak1_median] | def analyze_dists(dists, cutoff=1000, alpha=0.1):
"""
The dists can show bimodal distribution if they come from a mate-pair
library. Assume bimodal distribution and then separate the two peaks. Based
on the percentage in each peak, we can decide if it is indeed one peak or
two peaks, and report the median respectively.
"""
peak0 = [d for d in dists if d < cutoff]
peak1 = [d for d in dists if d >= cutoff]
(c0, c1) = (len(peak0), len(peak1))
logging.debug('Component counts: {0} {1}'.format(c0, c1))
if c0 == 0 or c1 == 0 or float(c1) / len(dists) < alpha:
logging.debug('Single peak identified ({0} / {1} < {2})'.format(c1, len(dists), alpha))
return np.median(dists) # depends on [control=['if'], data=[]]
peak0_median = np.median(peak0)
peak1_median = np.median(peak1)
logging.debug('Dual peaks identified: {0}bp ({1}), {2}bp ({3}) (selected)'.format(int(peak0_median), c0, int(peak1_median), c1))
return peak1_median |
def set(self, instance, value, **kw):
"""Decodes base64 value and set the file object
"""
value = str(value).decode("base64")
# handle the filename
if "filename" not in kw:
logger.debug("FielFieldManager::set: No Filename detected "
"-> using title or id")
kw["filename"] = kw.get("id") or kw.get("title")
self._set(instance, value, **kw) | def function[set, parameter[self, instance, value]]:
constant[Decodes base64 value and set the file object
]
variable[value] assign[=] call[call[name[str], parameter[name[value]]].decode, parameter[constant[base64]]]
if compare[constant[filename] <ast.NotIn object at 0x7da2590d7190> name[kw]] begin[:]
call[name[logger].debug, parameter[constant[FielFieldManager::set: No Filename detected -> using title or id]]]
call[name[kw]][constant[filename]] assign[=] <ast.BoolOp object at 0x7da20c6a8a30>
call[name[self]._set, parameter[name[instance], name[value]]] | keyword[def] identifier[set] ( identifier[self] , identifier[instance] , identifier[value] ,** identifier[kw] ):
literal[string]
identifier[value] = identifier[str] ( identifier[value] ). identifier[decode] ( literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[kw] :
identifier[logger] . identifier[debug] ( literal[string]
literal[string] )
identifier[kw] [ literal[string] ]= identifier[kw] . identifier[get] ( literal[string] ) keyword[or] identifier[kw] . identifier[get] ( literal[string] )
identifier[self] . identifier[_set] ( identifier[instance] , identifier[value] ,** identifier[kw] ) | def set(self, instance, value, **kw):
"""Decodes base64 value and set the file object
"""
value = str(value).decode('base64')
# handle the filename
if 'filename' not in kw:
logger.debug('FielFieldManager::set: No Filename detected -> using title or id')
kw['filename'] = kw.get('id') or kw.get('title') # depends on [control=['if'], data=['kw']]
self._set(instance, value, **kw) |
def get_email_link(application):
""" Retrieve a link that can be emailed to the applicant. """
# don't use secret_token unless we have to
if (application.content_type.model == 'person'
and application.applicant.has_usable_password()):
url = '%s/applications/%d/' % (
settings.REGISTRATION_BASE_URL, application.pk)
is_secret = False
else:
url = '%s/applications/%s/' % (
settings.REGISTRATION_BASE_URL, application.secret_token)
is_secret = True
return url, is_secret | def function[get_email_link, parameter[application]]:
constant[ Retrieve a link that can be emailed to the applicant. ]
if <ast.BoolOp object at 0x7da18bc70e50> begin[:]
variable[url] assign[=] binary_operation[constant[%s/applications/%d/] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18bc71300>, <ast.Attribute object at 0x7da18bc702b0>]]]
variable[is_secret] assign[=] constant[False]
return[tuple[[<ast.Name object at 0x7da1b0336860>, <ast.Name object at 0x7da1b0337190>]]] | keyword[def] identifier[get_email_link] ( identifier[application] ):
literal[string]
keyword[if] ( identifier[application] . identifier[content_type] . identifier[model] == literal[string]
keyword[and] identifier[application] . identifier[applicant] . identifier[has_usable_password] ()):
identifier[url] = literal[string] %(
identifier[settings] . identifier[REGISTRATION_BASE_URL] , identifier[application] . identifier[pk] )
identifier[is_secret] = keyword[False]
keyword[else] :
identifier[url] = literal[string] %(
identifier[settings] . identifier[REGISTRATION_BASE_URL] , identifier[application] . identifier[secret_token] )
identifier[is_secret] = keyword[True]
keyword[return] identifier[url] , identifier[is_secret] | def get_email_link(application):
""" Retrieve a link that can be emailed to the applicant. """
# don't use secret_token unless we have to
if application.content_type.model == 'person' and application.applicant.has_usable_password():
url = '%s/applications/%d/' % (settings.REGISTRATION_BASE_URL, application.pk)
is_secret = False # depends on [control=['if'], data=[]]
else:
url = '%s/applications/%s/' % (settings.REGISTRATION_BASE_URL, application.secret_token)
is_secret = True
return (url, is_secret) |
def validate(self, generator, axesToMove=None, **kwargs):
# type: (AGenerator, AAxesToMove, **Any) -> AConfigureParams
"""Validate configuration parameters and return validated parameters.
Doesn't take device state into account so can be run in any state
"""
iterations = 10
# We will return this, so make sure we fill in defaults
for k, default in self._block.configure.defaults.items():
if k not in kwargs:
kwargs[k] = default
# The validated parameters we will eventually return
params = ConfigureParams(generator, axesToMove, **kwargs)
# Make some tasks just for validate
part_contexts = self.create_part_contexts()
# Get any status from all parts
status_part_info = self.run_hooks(
ReportStatusHook(p, c) for p, c in part_contexts.items())
while iterations > 0:
# Try up to 10 times to get a valid set of parameters
iterations -= 1
# Validate the params with all the parts
validate_part_info = self.run_hooks(
ValidateHook(p, c, status_part_info, **kwargs)
for p, c, kwargs in self._part_params(part_contexts, params))
tweaks = ParameterTweakInfo.filter_values(validate_part_info)
if tweaks:
for tweak in tweaks:
deserialized = self._block.configure.takes.elements[
tweak.parameter].validate(tweak.value)
setattr(params, tweak.parameter, deserialized)
self.log.debug(
"Tweaking %s to %s", tweak.parameter, deserialized)
else:
# Consistent set, just return the params
return params
raise ValueError("Could not get a consistent set of parameters") | def function[validate, parameter[self, generator, axesToMove]]:
constant[Validate configuration parameters and return validated parameters.
Doesn't take device state into account so can be run in any state
]
variable[iterations] assign[=] constant[10]
for taget[tuple[[<ast.Name object at 0x7da18ede5f00>, <ast.Name object at 0x7da18ede7100>]]] in starred[call[name[self]._block.configure.defaults.items, parameter[]]] begin[:]
if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][name[k]] assign[=] name[default]
variable[params] assign[=] call[name[ConfigureParams], parameter[name[generator], name[axesToMove]]]
variable[part_contexts] assign[=] call[name[self].create_part_contexts, parameter[]]
variable[status_part_info] assign[=] call[name[self].run_hooks, parameter[<ast.GeneratorExp object at 0x7da18ede6a40>]]
while compare[name[iterations] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18ede65c0>
variable[validate_part_info] assign[=] call[name[self].run_hooks, parameter[<ast.GeneratorExp object at 0x7da18ede4730>]]
variable[tweaks] assign[=] call[name[ParameterTweakInfo].filter_values, parameter[name[validate_part_info]]]
if name[tweaks] begin[:]
for taget[name[tweak]] in starred[name[tweaks]] begin[:]
variable[deserialized] assign[=] call[call[name[self]._block.configure.takes.elements][name[tweak].parameter].validate, parameter[name[tweak].value]]
call[name[setattr], parameter[name[params], name[tweak].parameter, name[deserialized]]]
call[name[self].log.debug, parameter[constant[Tweaking %s to %s], name[tweak].parameter, name[deserialized]]]
<ast.Raise object at 0x7da1b04ff460> | keyword[def] identifier[validate] ( identifier[self] , identifier[generator] , identifier[axesToMove] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[iterations] = literal[int]
keyword[for] identifier[k] , identifier[default] keyword[in] identifier[self] . identifier[_block] . identifier[configure] . identifier[defaults] . identifier[items] ():
keyword[if] identifier[k] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ identifier[k] ]= identifier[default]
identifier[params] = identifier[ConfigureParams] ( identifier[generator] , identifier[axesToMove] ,** identifier[kwargs] )
identifier[part_contexts] = identifier[self] . identifier[create_part_contexts] ()
identifier[status_part_info] = identifier[self] . identifier[run_hooks] (
identifier[ReportStatusHook] ( identifier[p] , identifier[c] ) keyword[for] identifier[p] , identifier[c] keyword[in] identifier[part_contexts] . identifier[items] ())
keyword[while] identifier[iterations] > literal[int] :
identifier[iterations] -= literal[int]
identifier[validate_part_info] = identifier[self] . identifier[run_hooks] (
identifier[ValidateHook] ( identifier[p] , identifier[c] , identifier[status_part_info] ,** identifier[kwargs] )
keyword[for] identifier[p] , identifier[c] , identifier[kwargs] keyword[in] identifier[self] . identifier[_part_params] ( identifier[part_contexts] , identifier[params] ))
identifier[tweaks] = identifier[ParameterTweakInfo] . identifier[filter_values] ( identifier[validate_part_info] )
keyword[if] identifier[tweaks] :
keyword[for] identifier[tweak] keyword[in] identifier[tweaks] :
identifier[deserialized] = identifier[self] . identifier[_block] . identifier[configure] . identifier[takes] . identifier[elements] [
identifier[tweak] . identifier[parameter] ]. identifier[validate] ( identifier[tweak] . identifier[value] )
identifier[setattr] ( identifier[params] , identifier[tweak] . identifier[parameter] , identifier[deserialized] )
identifier[self] . identifier[log] . identifier[debug] (
literal[string] , identifier[tweak] . identifier[parameter] , identifier[deserialized] )
keyword[else] :
keyword[return] identifier[params]
keyword[raise] identifier[ValueError] ( literal[string] ) | def validate(self, generator, axesToMove=None, **kwargs):
# type: (AGenerator, AAxesToMove, **Any) -> AConfigureParams
"Validate configuration parameters and return validated parameters.\n\n Doesn't take device state into account so can be run in any state\n "
iterations = 10
# We will return this, so make sure we fill in defaults
for (k, default) in self._block.configure.defaults.items():
if k not in kwargs:
kwargs[k] = default # depends on [control=['if'], data=['k', 'kwargs']] # depends on [control=['for'], data=[]]
# The validated parameters we will eventually return
params = ConfigureParams(generator, axesToMove, **kwargs)
# Make some tasks just for validate
part_contexts = self.create_part_contexts()
# Get any status from all parts
status_part_info = self.run_hooks((ReportStatusHook(p, c) for (p, c) in part_contexts.items()))
while iterations > 0:
# Try up to 10 times to get a valid set of parameters
iterations -= 1
# Validate the params with all the parts
validate_part_info = self.run_hooks((ValidateHook(p, c, status_part_info, **kwargs) for (p, c, kwargs) in self._part_params(part_contexts, params)))
tweaks = ParameterTweakInfo.filter_values(validate_part_info)
if tweaks:
for tweak in tweaks:
deserialized = self._block.configure.takes.elements[tweak.parameter].validate(tweak.value)
setattr(params, tweak.parameter, deserialized)
self.log.debug('Tweaking %s to %s', tweak.parameter, deserialized) # depends on [control=['for'], data=['tweak']] # depends on [control=['if'], data=[]]
else:
# Consistent set, just return the params
return params # depends on [control=['while'], data=['iterations']]
raise ValueError('Could not get a consistent set of parameters') |
def getSubjectSequence(self, title):
"""
Obtain information about a subject sequence given its title.
@param title: A C{str} sequence title from a DIAMOND hit.
@raise KeyError: If the C{title} is not present in the DIAMOND
database.
@return: An C{AAReadWithX} instance.
"""
if self._subjectTitleToSubject is None:
if self._databaseFilename is None:
# An Sqlite3 database is used to look up subjects.
self._subjectTitleToSubject = SqliteIndex(
self._sqliteDatabaseFilename,
fastaDirectory=self._databaseDirectory,
readClass=AAReadWithX)
else:
# Build a dict to look up subjects.
titles = {}
for read in FastaReads(self._databaseFilename,
readClass=AAReadWithX):
titles[read.id] = read
self._subjectTitleToSubject = titles
return self._subjectTitleToSubject[title] | def function[getSubjectSequence, parameter[self, title]]:
constant[
Obtain information about a subject sequence given its title.
@param title: A C{str} sequence title from a DIAMOND hit.
@raise KeyError: If the C{title} is not present in the DIAMOND
database.
@return: An C{AAReadWithX} instance.
]
if compare[name[self]._subjectTitleToSubject is constant[None]] begin[:]
if compare[name[self]._databaseFilename is constant[None]] begin[:]
name[self]._subjectTitleToSubject assign[=] call[name[SqliteIndex], parameter[name[self]._sqliteDatabaseFilename]]
return[call[name[self]._subjectTitleToSubject][name[title]]] | keyword[def] identifier[getSubjectSequence] ( identifier[self] , identifier[title] ):
literal[string]
keyword[if] identifier[self] . identifier[_subjectTitleToSubject] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[_databaseFilename] keyword[is] keyword[None] :
identifier[self] . identifier[_subjectTitleToSubject] = identifier[SqliteIndex] (
identifier[self] . identifier[_sqliteDatabaseFilename] ,
identifier[fastaDirectory] = identifier[self] . identifier[_databaseDirectory] ,
identifier[readClass] = identifier[AAReadWithX] )
keyword[else] :
identifier[titles] ={}
keyword[for] identifier[read] keyword[in] identifier[FastaReads] ( identifier[self] . identifier[_databaseFilename] ,
identifier[readClass] = identifier[AAReadWithX] ):
identifier[titles] [ identifier[read] . identifier[id] ]= identifier[read]
identifier[self] . identifier[_subjectTitleToSubject] = identifier[titles]
keyword[return] identifier[self] . identifier[_subjectTitleToSubject] [ identifier[title] ] | def getSubjectSequence(self, title):
"""
Obtain information about a subject sequence given its title.
@param title: A C{str} sequence title from a DIAMOND hit.
@raise KeyError: If the C{title} is not present in the DIAMOND
database.
@return: An C{AAReadWithX} instance.
"""
if self._subjectTitleToSubject is None:
if self._databaseFilename is None:
# An Sqlite3 database is used to look up subjects.
self._subjectTitleToSubject = SqliteIndex(self._sqliteDatabaseFilename, fastaDirectory=self._databaseDirectory, readClass=AAReadWithX) # depends on [control=['if'], data=[]]
else:
# Build a dict to look up subjects.
titles = {}
for read in FastaReads(self._databaseFilename, readClass=AAReadWithX):
titles[read.id] = read # depends on [control=['for'], data=['read']]
self._subjectTitleToSubject = titles # depends on [control=['if'], data=[]]
return self._subjectTitleToSubject[title] |
def topil(self, **kwargs):
"""
Get PIL Image.
:return: :py:class:`PIL.Image`, or `None` if the composed image is not
available.
"""
if self.has_preview():
return pil_io.convert_image_data_to_pil(self._record, **kwargs)
return None | def function[topil, parameter[self]]:
constant[
Get PIL Image.
:return: :py:class:`PIL.Image`, or `None` if the composed image is not
available.
]
if call[name[self].has_preview, parameter[]] begin[:]
return[call[name[pil_io].convert_image_data_to_pil, parameter[name[self]._record]]]
return[constant[None]] | keyword[def] identifier[topil] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[has_preview] ():
keyword[return] identifier[pil_io] . identifier[convert_image_data_to_pil] ( identifier[self] . identifier[_record] ,** identifier[kwargs] )
keyword[return] keyword[None] | def topil(self, **kwargs):
"""
Get PIL Image.
:return: :py:class:`PIL.Image`, or `None` if the composed image is not
available.
"""
if self.has_preview():
return pil_io.convert_image_data_to_pil(self._record, **kwargs) # depends on [control=['if'], data=[]]
return None |
def normalize_unf(self, value):
"""Normalizes Unicode Normal Form (to NFC); called if
``unicode_normal`` is true."""
if isinstance(value, unicode):
return unicodedata.normalize('NFC', value)
else:
return value | def function[normalize_unf, parameter[self, value]]:
constant[Normalizes Unicode Normal Form (to NFC); called if
``unicode_normal`` is true.]
if call[name[isinstance], parameter[name[value], name[unicode]]] begin[:]
return[call[name[unicodedata].normalize, parameter[constant[NFC], name[value]]]] | keyword[def] identifier[normalize_unf] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[unicode] ):
keyword[return] identifier[unicodedata] . identifier[normalize] ( literal[string] , identifier[value] )
keyword[else] :
keyword[return] identifier[value] | def normalize_unf(self, value):
"""Normalizes Unicode Normal Form (to NFC); called if
``unicode_normal`` is true."""
if isinstance(value, unicode):
return unicodedata.normalize('NFC', value) # depends on [control=['if'], data=[]]
else:
return value |
def circDiff(length, ary1, ary2):
"""calculate the circular difference between two paired arrays.
This function will return the difference between pairs of numbers; however
the difference that is output will be minimal in the sense that if we
assume an array with length = 4: [0, 1, 2, 3], the difference between
0 and 3 will not be 3, but 1 (i.e. circular difference)"""
x = np.arange(length)
mod = length % 2
if mod == 0:
temp = np.ones(length)
temp[length/2:] = -1
else:
x = x - np.floor(length/2)
temp = np.copy(x)
temp[np.less(x, 0)] = 1
temp[np.greater(x, 0)] = -1
x = np.cumsum(temp)
diagDiffmat = np.empty((length, length))
for idx in np.arange(length):
x = np.roll(x, 1)
diagDiffmat[idx, :] = x
# return diagDiffmat[ary1][ary2]
flat = diagDiffmat.flatten()
ind = ary1*diagDiffmat.shape[0] + ary2
ind = ind.astype('int')
return flat[ind] | def function[circDiff, parameter[length, ary1, ary2]]:
constant[calculate the circular difference between two paired arrays.
This function will return the difference between pairs of numbers; however
the difference that is output will be minimal in the sense that if we
assume an array with length = 4: [0, 1, 2, 3], the difference between
0 and 3 will not be 3, but 1 (i.e. circular difference)]
variable[x] assign[=] call[name[np].arange, parameter[name[length]]]
variable[mod] assign[=] binary_operation[name[length] <ast.Mod object at 0x7da2590d6920> constant[2]]
if compare[name[mod] equal[==] constant[0]] begin[:]
variable[temp] assign[=] call[name[np].ones, parameter[name[length]]]
call[name[temp]][<ast.Slice object at 0x7da20c76e380>] assign[=] <ast.UnaryOp object at 0x7da20c76d6c0>
variable[x] assign[=] call[name[np].cumsum, parameter[name[temp]]]
variable[diagDiffmat] assign[=] call[name[np].empty, parameter[tuple[[<ast.Name object at 0x7da20c76cfa0>, <ast.Name object at 0x7da20c76f700>]]]]
for taget[name[idx]] in starred[call[name[np].arange, parameter[name[length]]]] begin[:]
variable[x] assign[=] call[name[np].roll, parameter[name[x], constant[1]]]
call[name[diagDiffmat]][tuple[[<ast.Name object at 0x7da20c76c1c0>, <ast.Slice object at 0x7da20c76e800>]]] assign[=] name[x]
variable[flat] assign[=] call[name[diagDiffmat].flatten, parameter[]]
variable[ind] assign[=] binary_operation[binary_operation[name[ary1] * call[name[diagDiffmat].shape][constant[0]]] + name[ary2]]
variable[ind] assign[=] call[name[ind].astype, parameter[constant[int]]]
return[call[name[flat]][name[ind]]] | keyword[def] identifier[circDiff] ( identifier[length] , identifier[ary1] , identifier[ary2] ):
literal[string]
identifier[x] = identifier[np] . identifier[arange] ( identifier[length] )
identifier[mod] = identifier[length] % literal[int]
keyword[if] identifier[mod] == literal[int] :
identifier[temp] = identifier[np] . identifier[ones] ( identifier[length] )
identifier[temp] [ identifier[length] / literal[int] :]=- literal[int]
keyword[else] :
identifier[x] = identifier[x] - identifier[np] . identifier[floor] ( identifier[length] / literal[int] )
identifier[temp] = identifier[np] . identifier[copy] ( identifier[x] )
identifier[temp] [ identifier[np] . identifier[less] ( identifier[x] , literal[int] )]= literal[int]
identifier[temp] [ identifier[np] . identifier[greater] ( identifier[x] , literal[int] )]=- literal[int]
identifier[x] = identifier[np] . identifier[cumsum] ( identifier[temp] )
identifier[diagDiffmat] = identifier[np] . identifier[empty] (( identifier[length] , identifier[length] ))
keyword[for] identifier[idx] keyword[in] identifier[np] . identifier[arange] ( identifier[length] ):
identifier[x] = identifier[np] . identifier[roll] ( identifier[x] , literal[int] )
identifier[diagDiffmat] [ identifier[idx] ,:]= identifier[x]
identifier[flat] = identifier[diagDiffmat] . identifier[flatten] ()
identifier[ind] = identifier[ary1] * identifier[diagDiffmat] . identifier[shape] [ literal[int] ]+ identifier[ary2]
identifier[ind] = identifier[ind] . identifier[astype] ( literal[string] )
keyword[return] identifier[flat] [ identifier[ind] ] | def circDiff(length, ary1, ary2):
"""calculate the circular difference between two paired arrays.
This function will return the difference between pairs of numbers; however
the difference that is output will be minimal in the sense that if we
assume an array with length = 4: [0, 1, 2, 3], the difference between
0 and 3 will not be 3, but 1 (i.e. circular difference)"""
x = np.arange(length)
mod = length % 2
if mod == 0:
temp = np.ones(length)
temp[length / 2:] = -1 # depends on [control=['if'], data=[]]
else:
x = x - np.floor(length / 2)
temp = np.copy(x)
temp[np.less(x, 0)] = 1
temp[np.greater(x, 0)] = -1
x = np.cumsum(temp)
diagDiffmat = np.empty((length, length))
for idx in np.arange(length):
x = np.roll(x, 1)
diagDiffmat[idx, :] = x # depends on [control=['for'], data=['idx']]
# return diagDiffmat[ary1][ary2]
flat = diagDiffmat.flatten()
ind = ary1 * diagDiffmat.shape[0] + ary2
ind = ind.astype('int')
return flat[ind] |
def get_tiles(self):
"""Get all TileCoordinates contained in the region"""
for x, y in griditer(self.root_tile.x, self.root_tile.y, ncol=self.tiles_per_row):
yield TileCoordinate(self.root_tile.zoom, x, y) | def function[get_tiles, parameter[self]]:
constant[Get all TileCoordinates contained in the region]
for taget[tuple[[<ast.Name object at 0x7da2046234c0>, <ast.Name object at 0x7da2046217b0>]]] in starred[call[name[griditer], parameter[name[self].root_tile.x, name[self].root_tile.y]]] begin[:]
<ast.Yield object at 0x7da204622fe0> | keyword[def] identifier[get_tiles] ( identifier[self] ):
literal[string]
keyword[for] identifier[x] , identifier[y] keyword[in] identifier[griditer] ( identifier[self] . identifier[root_tile] . identifier[x] , identifier[self] . identifier[root_tile] . identifier[y] , identifier[ncol] = identifier[self] . identifier[tiles_per_row] ):
keyword[yield] identifier[TileCoordinate] ( identifier[self] . identifier[root_tile] . identifier[zoom] , identifier[x] , identifier[y] ) | def get_tiles(self):
"""Get all TileCoordinates contained in the region"""
for (x, y) in griditer(self.root_tile.x, self.root_tile.y, ncol=self.tiles_per_row):
yield TileCoordinate(self.root_tile.zoom, x, y) # depends on [control=['for'], data=[]] |
def from_dict(cls, parm, pool = None):
""" Create new Pool-object from dict.
Suitable for creating objects from XML-RPC data.
All available keys must exist.
"""
if pool is None:
pool = Pool()
pool.id = parm['id']
pool.name = parm['name']
pool.description = parm['description']
pool.default_type = parm['default_type']
pool.ipv4_default_prefix_length = parm['ipv4_default_prefix_length']
pool.ipv6_default_prefix_length = parm['ipv6_default_prefix_length']
for val in ('member_prefixes_v4', 'member_prefixes_v6',
'used_prefixes_v4', 'used_prefixes_v6', 'free_prefixes_v4',
'free_prefixes_v6', 'total_prefixes_v4', 'total_prefixes_v6',
'total_addresses_v4', 'total_addresses_v6', 'used_addresses_v4',
'used_addresses_v6', 'free_addresses_v4', 'free_addresses_v6'):
if parm[val] is not None:
setattr(pool, val, int(parm[val]))
pool.tags = {}
for tag_name in parm['tags']:
tag = Tag.from_dict({'name': tag_name })
pool.tags[tag_name] = tag
pool.avps = parm['avps']
# store VRF object in pool.vrf
if parm['vrf_id'] is not None:
pool.vrf = VRF.get(parm['vrf_id'])
return pool | def function[from_dict, parameter[cls, parm, pool]]:
constant[ Create new Pool-object from dict.
Suitable for creating objects from XML-RPC data.
All available keys must exist.
]
if compare[name[pool] is constant[None]] begin[:]
variable[pool] assign[=] call[name[Pool], parameter[]]
name[pool].id assign[=] call[name[parm]][constant[id]]
name[pool].name assign[=] call[name[parm]][constant[name]]
name[pool].description assign[=] call[name[parm]][constant[description]]
name[pool].default_type assign[=] call[name[parm]][constant[default_type]]
name[pool].ipv4_default_prefix_length assign[=] call[name[parm]][constant[ipv4_default_prefix_length]]
name[pool].ipv6_default_prefix_length assign[=] call[name[parm]][constant[ipv6_default_prefix_length]]
for taget[name[val]] in starred[tuple[[<ast.Constant object at 0x7da2041d9030>, <ast.Constant object at 0x7da2041db7f0>, <ast.Constant object at 0x7da2041dadd0>, <ast.Constant object at 0x7da2041da0b0>, <ast.Constant object at 0x7da2041d84c0>, <ast.Constant object at 0x7da2041db610>, <ast.Constant object at 0x7da2041d8160>, <ast.Constant object at 0x7da2041dbca0>, <ast.Constant object at 0x7da2041d9870>, <ast.Constant object at 0x7da2041dba30>, <ast.Constant object at 0x7da2041d87c0>, <ast.Constant object at 0x7da2041db8b0>, <ast.Constant object at 0x7da2041da080>, <ast.Constant object at 0x7da2041db760>]]] begin[:]
if compare[call[name[parm]][name[val]] is_not constant[None]] begin[:]
call[name[setattr], parameter[name[pool], name[val], call[name[int], parameter[call[name[parm]][name[val]]]]]]
name[pool].tags assign[=] dictionary[[], []]
for taget[name[tag_name]] in starred[call[name[parm]][constant[tags]]] begin[:]
variable[tag] assign[=] call[name[Tag].from_dict, parameter[dictionary[[<ast.Constant object at 0x7da2041d9450>], [<ast.Name object at 0x7da2041da620>]]]]
call[name[pool].tags][name[tag_name]] assign[=] name[tag]
name[pool].avps assign[=] call[name[parm]][constant[avps]]
if compare[call[name[parm]][constant[vrf_id]] is_not constant[None]] begin[:]
name[pool].vrf assign[=] call[name[VRF].get, parameter[call[name[parm]][constant[vrf_id]]]]
return[name[pool]] | keyword[def] identifier[from_dict] ( identifier[cls] , identifier[parm] , identifier[pool] = keyword[None] ):
literal[string]
keyword[if] identifier[pool] keyword[is] keyword[None] :
identifier[pool] = identifier[Pool] ()
identifier[pool] . identifier[id] = identifier[parm] [ literal[string] ]
identifier[pool] . identifier[name] = identifier[parm] [ literal[string] ]
identifier[pool] . identifier[description] = identifier[parm] [ literal[string] ]
identifier[pool] . identifier[default_type] = identifier[parm] [ literal[string] ]
identifier[pool] . identifier[ipv4_default_prefix_length] = identifier[parm] [ literal[string] ]
identifier[pool] . identifier[ipv6_default_prefix_length] = identifier[parm] [ literal[string] ]
keyword[for] identifier[val] keyword[in] ( literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ):
keyword[if] identifier[parm] [ identifier[val] ] keyword[is] keyword[not] keyword[None] :
identifier[setattr] ( identifier[pool] , identifier[val] , identifier[int] ( identifier[parm] [ identifier[val] ]))
identifier[pool] . identifier[tags] ={}
keyword[for] identifier[tag_name] keyword[in] identifier[parm] [ literal[string] ]:
identifier[tag] = identifier[Tag] . identifier[from_dict] ({ literal[string] : identifier[tag_name] })
identifier[pool] . identifier[tags] [ identifier[tag_name] ]= identifier[tag]
identifier[pool] . identifier[avps] = identifier[parm] [ literal[string] ]
keyword[if] identifier[parm] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
identifier[pool] . identifier[vrf] = identifier[VRF] . identifier[get] ( identifier[parm] [ literal[string] ])
keyword[return] identifier[pool] | def from_dict(cls, parm, pool=None):
""" Create new Pool-object from dict.
Suitable for creating objects from XML-RPC data.
All available keys must exist.
"""
if pool is None:
pool = Pool() # depends on [control=['if'], data=['pool']]
pool.id = parm['id']
pool.name = parm['name']
pool.description = parm['description']
pool.default_type = parm['default_type']
pool.ipv4_default_prefix_length = parm['ipv4_default_prefix_length']
pool.ipv6_default_prefix_length = parm['ipv6_default_prefix_length']
for val in ('member_prefixes_v4', 'member_prefixes_v6', 'used_prefixes_v4', 'used_prefixes_v6', 'free_prefixes_v4', 'free_prefixes_v6', 'total_prefixes_v4', 'total_prefixes_v6', 'total_addresses_v4', 'total_addresses_v6', 'used_addresses_v4', 'used_addresses_v6', 'free_addresses_v4', 'free_addresses_v6'):
if parm[val] is not None:
setattr(pool, val, int(parm[val])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['val']]
pool.tags = {}
for tag_name in parm['tags']:
tag = Tag.from_dict({'name': tag_name})
pool.tags[tag_name] = tag # depends on [control=['for'], data=['tag_name']]
pool.avps = parm['avps']
# store VRF object in pool.vrf
if parm['vrf_id'] is not None:
pool.vrf = VRF.get(parm['vrf_id']) # depends on [control=['if'], data=[]]
return pool |
def artifact_already_exists(cli, meta, owner):
"""
Checks to see whether the built recipe (aka distribution) already
exists on the owner/user's binstar account.
"""
distro_name = '{}/{}.tar.bz2'.format(conda.config.subdir, meta.dist())
try:
dist_info = cli.distribution(owner, meta.name(), meta.version(), distro_name)
except binstar_client.errors.NotFound:
dist_info = {}
return bool(dist_info) | def function[artifact_already_exists, parameter[cli, meta, owner]]:
constant[
Checks to see whether the built recipe (aka distribution) already
exists on the owner/user's binstar account.
]
variable[distro_name] assign[=] call[constant[{}/{}.tar.bz2].format, parameter[name[conda].config.subdir, call[name[meta].dist, parameter[]]]]
<ast.Try object at 0x7da1b0a8a920>
return[call[name[bool], parameter[name[dist_info]]]] | keyword[def] identifier[artifact_already_exists] ( identifier[cli] , identifier[meta] , identifier[owner] ):
literal[string]
identifier[distro_name] = literal[string] . identifier[format] ( identifier[conda] . identifier[config] . identifier[subdir] , identifier[meta] . identifier[dist] ())
keyword[try] :
identifier[dist_info] = identifier[cli] . identifier[distribution] ( identifier[owner] , identifier[meta] . identifier[name] (), identifier[meta] . identifier[version] (), identifier[distro_name] )
keyword[except] identifier[binstar_client] . identifier[errors] . identifier[NotFound] :
identifier[dist_info] ={}
keyword[return] identifier[bool] ( identifier[dist_info] ) | def artifact_already_exists(cli, meta, owner):
"""
Checks to see whether the built recipe (aka distribution) already
exists on the owner/user's binstar account.
"""
distro_name = '{}/{}.tar.bz2'.format(conda.config.subdir, meta.dist())
try:
dist_info = cli.distribution(owner, meta.name(), meta.version(), distro_name) # depends on [control=['try'], data=[]]
except binstar_client.errors.NotFound:
dist_info = {} # depends on [control=['except'], data=[]]
return bool(dist_info) |
def retrieve_request(self, url):
"""Open the given url and decode and return the response
url: The url to open.
"""
try:
data = urlopen(url)
except:
print("Error Retrieving Data from Steam")
sys.exit(2)
return data.read().decode('utf-8') | def function[retrieve_request, parameter[self, url]]:
constant[Open the given url and decode and return the response
url: The url to open.
]
<ast.Try object at 0x7da207f02020>
return[call[call[name[data].read, parameter[]].decode, parameter[constant[utf-8]]]] | keyword[def] identifier[retrieve_request] ( identifier[self] , identifier[url] ):
literal[string]
keyword[try] :
identifier[data] = identifier[urlopen] ( identifier[url] )
keyword[except] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[return] identifier[data] . identifier[read] (). identifier[decode] ( literal[string] ) | def retrieve_request(self, url):
"""Open the given url and decode and return the response
url: The url to open.
"""
try:
data = urlopen(url) # depends on [control=['try'], data=[]]
except:
print('Error Retrieving Data from Steam')
sys.exit(2) # depends on [control=['except'], data=[]]
return data.read().decode('utf-8') |
def destroy(name, call=None):
''' Destroy a node.
.. versionadded:: 2018.3.0
Disconnect a minion from the master, and remove its keys.
Optionally, (if ``remove_config_on_destroy`` is ``True``),
disables salt-minion from running on the minion, and
erases the Salt configuration files from it.
Optionally, (if ``shutdown_on_destroy`` is ``True``),
orders the minion to halt.
CLI Example:
.. code-block:: bash
salt-cloud --destroy mymachine
'''
if call == 'function':
raise SaltCloudSystemExit(
'The destroy action must be called with -d, --destroy, '
'-a, or --action.'
)
opts = __opts__
__utils__['cloud.fire_event'](
'event',
'destroying instance',
'salt/cloud/{0}/destroying'.format(name),
args={'name': name},
sock_dir=opts['sock_dir'],
transport=opts['transport']
)
vm_ = get_configured_provider()
local = salt.client.LocalClient()
my_info = local.cmd(name, 'grains.get', ['salt-cloud'])
try:
vm_.update(my_info[name]) # get profile name to get config value
except (IndexError, TypeError):
pass
if config.get_cloud_config_value(
'remove_config_on_destroy', vm_, opts, default=True
):
ret = local.cmd(name, # prevent generating new keys on restart
'service.disable',
['salt-minion'])
if ret and ret[name]:
log.info('disabled salt-minion service on %s', name)
ret = local.cmd(name, 'config.get', ['conf_file'])
if ret and ret[name]:
confile = ret[name]
ret = local.cmd(name, 'file.remove', [confile])
if ret and ret[name]:
log.info('removed minion %s configuration file %s',
name, confile)
ret = local.cmd(name, 'config.get', ['pki_dir'])
if ret and ret[name]:
pki_dir = ret[name]
ret = local.cmd(name, 'file.remove', [pki_dir])
if ret and ret[name]:
log.info(
'removed minion %s key files in %s',
name,
pki_dir)
if config.get_cloud_config_value(
'shutdown_on_destroy', vm_, opts, default=False
):
ret = local.cmd(name, 'system.shutdown')
if ret and ret[name]:
log.info('system.shutdown for minion %s successful', name)
__utils__['cloud.fire_event'](
'event',
'destroyed instance',
'salt/cloud/{0}/destroyed'.format(name),
args={'name': name},
sock_dir=opts['sock_dir'],
transport=opts['transport']
)
return {'Destroyed': '{0} was destroyed.'.format(name)} | def function[destroy, parameter[name, call]]:
constant[ Destroy a node.
.. versionadded:: 2018.3.0
Disconnect a minion from the master, and remove its keys.
Optionally, (if ``remove_config_on_destroy`` is ``True``),
disables salt-minion from running on the minion, and
erases the Salt configuration files from it.
Optionally, (if ``shutdown_on_destroy`` is ``True``),
orders the minion to halt.
CLI Example:
.. code-block:: bash
salt-cloud --destroy mymachine
]
if compare[name[call] equal[==] constant[function]] begin[:]
<ast.Raise object at 0x7da207f02d70>
variable[opts] assign[=] name[__opts__]
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[destroying instance], call[constant[salt/cloud/{0}/destroying].format, parameter[name[name]]]]]
variable[vm_] assign[=] call[name[get_configured_provider], parameter[]]
variable[local] assign[=] call[name[salt].client.LocalClient, parameter[]]
variable[my_info] assign[=] call[name[local].cmd, parameter[name[name], constant[grains.get], list[[<ast.Constant object at 0x7da207f00f40>]]]]
<ast.Try object at 0x7da207f01840>
if call[name[config].get_cloud_config_value, parameter[constant[remove_config_on_destroy], name[vm_], name[opts]]] begin[:]
variable[ret] assign[=] call[name[local].cmd, parameter[name[name], constant[service.disable], list[[<ast.Constant object at 0x7da207f01150>]]]]
if <ast.BoolOp object at 0x7da207f02a40> begin[:]
call[name[log].info, parameter[constant[disabled salt-minion service on %s], name[name]]]
variable[ret] assign[=] call[name[local].cmd, parameter[name[name], constant[config.get], list[[<ast.Constant object at 0x7da207f00d00>]]]]
if <ast.BoolOp object at 0x7da207f02bc0> begin[:]
variable[confile] assign[=] call[name[ret]][name[name]]
variable[ret] assign[=] call[name[local].cmd, parameter[name[name], constant[file.remove], list[[<ast.Name object at 0x7da207f01330>]]]]
if <ast.BoolOp object at 0x7da207f02680> begin[:]
call[name[log].info, parameter[constant[removed minion %s configuration file %s], name[name], name[confile]]]
variable[ret] assign[=] call[name[local].cmd, parameter[name[name], constant[config.get], list[[<ast.Constant object at 0x7da207f01960>]]]]
if <ast.BoolOp object at 0x7da207f009a0> begin[:]
variable[pki_dir] assign[=] call[name[ret]][name[name]]
variable[ret] assign[=] call[name[local].cmd, parameter[name[name], constant[file.remove], list[[<ast.Name object at 0x7da207f002e0>]]]]
if <ast.BoolOp object at 0x7da207f000a0> begin[:]
call[name[log].info, parameter[constant[removed minion %s key files in %s], name[name], name[pki_dir]]]
if call[name[config].get_cloud_config_value, parameter[constant[shutdown_on_destroy], name[vm_], name[opts]]] begin[:]
variable[ret] assign[=] call[name[local].cmd, parameter[name[name], constant[system.shutdown]]]
if <ast.BoolOp object at 0x7da207f02b00> begin[:]
call[name[log].info, parameter[constant[system.shutdown for minion %s successful], name[name]]]
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[destroyed instance], call[constant[salt/cloud/{0}/destroyed].format, parameter[name[name]]]]]
return[dictionary[[<ast.Constant object at 0x7da207f998a0>], [<ast.Call object at 0x7da207f9aad0>]]] | keyword[def] identifier[destroy] ( identifier[name] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] == literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
literal[string]
)
identifier[opts] = identifier[__opts__]
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[name] ),
identifier[args] ={ literal[string] : identifier[name] },
identifier[sock_dir] = identifier[opts] [ literal[string] ],
identifier[transport] = identifier[opts] [ literal[string] ]
)
identifier[vm_] = identifier[get_configured_provider] ()
identifier[local] = identifier[salt] . identifier[client] . identifier[LocalClient] ()
identifier[my_info] = identifier[local] . identifier[cmd] ( identifier[name] , literal[string] ,[ literal[string] ])
keyword[try] :
identifier[vm_] . identifier[update] ( identifier[my_info] [ identifier[name] ])
keyword[except] ( identifier[IndexError] , identifier[TypeError] ):
keyword[pass]
keyword[if] identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[opts] , identifier[default] = keyword[True]
):
identifier[ret] = identifier[local] . identifier[cmd] ( identifier[name] ,
literal[string] ,
[ literal[string] ])
keyword[if] identifier[ret] keyword[and] identifier[ret] [ identifier[name] ]:
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
identifier[ret] = identifier[local] . identifier[cmd] ( identifier[name] , literal[string] ,[ literal[string] ])
keyword[if] identifier[ret] keyword[and] identifier[ret] [ identifier[name] ]:
identifier[confile] = identifier[ret] [ identifier[name] ]
identifier[ret] = identifier[local] . identifier[cmd] ( identifier[name] , literal[string] ,[ identifier[confile] ])
keyword[if] identifier[ret] keyword[and] identifier[ret] [ identifier[name] ]:
identifier[log] . identifier[info] ( literal[string] ,
identifier[name] , identifier[confile] )
identifier[ret] = identifier[local] . identifier[cmd] ( identifier[name] , literal[string] ,[ literal[string] ])
keyword[if] identifier[ret] keyword[and] identifier[ret] [ identifier[name] ]:
identifier[pki_dir] = identifier[ret] [ identifier[name] ]
identifier[ret] = identifier[local] . identifier[cmd] ( identifier[name] , literal[string] ,[ identifier[pki_dir] ])
keyword[if] identifier[ret] keyword[and] identifier[ret] [ identifier[name] ]:
identifier[log] . identifier[info] (
literal[string] ,
identifier[name] ,
identifier[pki_dir] )
keyword[if] identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[opts] , identifier[default] = keyword[False]
):
identifier[ret] = identifier[local] . identifier[cmd] ( identifier[name] , literal[string] )
keyword[if] identifier[ret] keyword[and] identifier[ret] [ identifier[name] ]:
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[name] ),
identifier[args] ={ literal[string] : identifier[name] },
identifier[sock_dir] = identifier[opts] [ literal[string] ],
identifier[transport] = identifier[opts] [ literal[string] ]
)
keyword[return] { literal[string] : literal[string] . identifier[format] ( identifier[name] )} | def destroy(name, call=None):
""" Destroy a node.
.. versionadded:: 2018.3.0
Disconnect a minion from the master, and remove its keys.
Optionally, (if ``remove_config_on_destroy`` is ``True``),
disables salt-minion from running on the minion, and
erases the Salt configuration files from it.
Optionally, (if ``shutdown_on_destroy`` is ``True``),
orders the minion to halt.
CLI Example:
.. code-block:: bash
salt-cloud --destroy mymachine
"""
if call == 'function':
raise SaltCloudSystemExit('The destroy action must be called with -d, --destroy, -a, or --action.') # depends on [control=['if'], data=[]]
opts = __opts__
__utils__['cloud.fire_event']('event', 'destroying instance', 'salt/cloud/{0}/destroying'.format(name), args={'name': name}, sock_dir=opts['sock_dir'], transport=opts['transport'])
vm_ = get_configured_provider()
local = salt.client.LocalClient()
my_info = local.cmd(name, 'grains.get', ['salt-cloud'])
try:
vm_.update(my_info[name]) # get profile name to get config value # depends on [control=['try'], data=[]]
except (IndexError, TypeError):
pass # depends on [control=['except'], data=[]]
if config.get_cloud_config_value('remove_config_on_destroy', vm_, opts, default=True): # prevent generating new keys on restart
ret = local.cmd(name, 'service.disable', ['salt-minion'])
if ret and ret[name]:
log.info('disabled salt-minion service on %s', name) # depends on [control=['if'], data=[]]
ret = local.cmd(name, 'config.get', ['conf_file'])
if ret and ret[name]:
confile = ret[name]
ret = local.cmd(name, 'file.remove', [confile])
if ret and ret[name]:
log.info('removed minion %s configuration file %s', name, confile) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
ret = local.cmd(name, 'config.get', ['pki_dir'])
if ret and ret[name]:
pki_dir = ret[name]
ret = local.cmd(name, 'file.remove', [pki_dir])
if ret and ret[name]:
log.info('removed minion %s key files in %s', name, pki_dir) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if config.get_cloud_config_value('shutdown_on_destroy', vm_, opts, default=False):
ret = local.cmd(name, 'system.shutdown')
if ret and ret[name]:
log.info('system.shutdown for minion %s successful', name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
__utils__['cloud.fire_event']('event', 'destroyed instance', 'salt/cloud/{0}/destroyed'.format(name), args={'name': name}, sock_dir=opts['sock_dir'], transport=opts['transport'])
return {'Destroyed': '{0} was destroyed.'.format(name)} |
def magicquil(f):
"""
Decorator to enable a more convenient syntax for writing quil programs. With this decorator there is no need to
keep track of a Program object and regular Python if/else branches can be used for classical control flow.
Example usage:
@magicquil
def fast_reset(q1):
reg1 = MEASURE(q1, None)
if reg1:
X(q1)
else:
I(q1)
my_program = fast_reset(0) # this will be a Program object
"""
rewritten_function = _rewrite_function(f)
@functools.wraps(f)
def wrapper(*args, **kwargs):
if _program_context.get(None) is not None:
rewritten_function(*args, **kwargs)
program = _program_context.get()
else:
token = _program_context.set(Program())
rewritten_function(*args, **kwargs)
program = _program_context.get()
_program_context.reset(token)
return program
return wrapper | def function[magicquil, parameter[f]]:
constant[
Decorator to enable a more convenient syntax for writing quil programs. With this decorator there is no need to
keep track of a Program object and regular Python if/else branches can be used for classical control flow.
Example usage:
@magicquil
def fast_reset(q1):
reg1 = MEASURE(q1, None)
if reg1:
X(q1)
else:
I(q1)
my_program = fast_reset(0) # this will be a Program object
]
variable[rewritten_function] assign[=] call[name[_rewrite_function], parameter[name[f]]]
def function[wrapper, parameter[]]:
if compare[call[name[_program_context].get, parameter[constant[None]]] is_not constant[None]] begin[:]
call[name[rewritten_function], parameter[<ast.Starred object at 0x7da1b1c5a050>]]
variable[program] assign[=] call[name[_program_context].get, parameter[]]
return[name[program]]
return[name[wrapper]] | keyword[def] identifier[magicquil] ( identifier[f] ):
literal[string]
identifier[rewritten_function] = identifier[_rewrite_function] ( identifier[f] )
@ identifier[functools] . identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[_program_context] . identifier[get] ( keyword[None] ) keyword[is] keyword[not] keyword[None] :
identifier[rewritten_function] (* identifier[args] ,** identifier[kwargs] )
identifier[program] = identifier[_program_context] . identifier[get] ()
keyword[else] :
identifier[token] = identifier[_program_context] . identifier[set] ( identifier[Program] ())
identifier[rewritten_function] (* identifier[args] ,** identifier[kwargs] )
identifier[program] = identifier[_program_context] . identifier[get] ()
identifier[_program_context] . identifier[reset] ( identifier[token] )
keyword[return] identifier[program]
keyword[return] identifier[wrapper] | def magicquil(f):
"""
Decorator to enable a more convenient syntax for writing quil programs. With this decorator there is no need to
keep track of a Program object and regular Python if/else branches can be used for classical control flow.
Example usage:
@magicquil
def fast_reset(q1):
reg1 = MEASURE(q1, None)
if reg1:
X(q1)
else:
I(q1)
my_program = fast_reset(0) # this will be a Program object
"""
rewritten_function = _rewrite_function(f)
@functools.wraps(f)
def wrapper(*args, **kwargs):
if _program_context.get(None) is not None:
rewritten_function(*args, **kwargs)
program = _program_context.get() # depends on [control=['if'], data=[]]
else:
token = _program_context.set(Program())
rewritten_function(*args, **kwargs)
program = _program_context.get()
_program_context.reset(token)
return program
return wrapper |
def create_customer(self, name, **kwargs):
"""
Creates a customer with a name. All other parameters are optional. They
are: `note`, `active_hourly_rate`, `hourly_rate`,
`hourly_rates_per_service`, and `archived`.
"""
data = self._wrap_dict("customer", kwargs)
data["customer"]["name"] = name
return self.post("/customers.json", data=data) | def function[create_customer, parameter[self, name]]:
constant[
Creates a customer with a name. All other parameters are optional. They
are: `note`, `active_hourly_rate`, `hourly_rate`,
`hourly_rates_per_service`, and `archived`.
]
variable[data] assign[=] call[name[self]._wrap_dict, parameter[constant[customer], name[kwargs]]]
call[call[name[data]][constant[customer]]][constant[name]] assign[=] name[name]
return[call[name[self].post, parameter[constant[/customers.json]]]] | keyword[def] identifier[create_customer] ( identifier[self] , identifier[name] ,** identifier[kwargs] ):
literal[string]
identifier[data] = identifier[self] . identifier[_wrap_dict] ( literal[string] , identifier[kwargs] )
identifier[data] [ literal[string] ][ literal[string] ]= identifier[name]
keyword[return] identifier[self] . identifier[post] ( literal[string] , identifier[data] = identifier[data] ) | def create_customer(self, name, **kwargs):
"""
Creates a customer with a name. All other parameters are optional. They
are: `note`, `active_hourly_rate`, `hourly_rate`,
`hourly_rates_per_service`, and `archived`.
"""
data = self._wrap_dict('customer', kwargs)
data['customer']['name'] = name
return self.post('/customers.json', data=data) |
def gen_password_and_salt(cls, password_text):
""" 生成加密后的密码和盐 """
salt = os.urandom(32)
dk = hashlib.pbkdf2_hmac(
config.PASSWORD_HASH_FUNC_NAME,
password_text.encode('utf-8'),
salt,
config.PASSWORD_HASH_ITERATIONS,
)
return {'password': dk, 'salt': salt} | def function[gen_password_and_salt, parameter[cls, password_text]]:
constant[ 生成加密后的密码和盐 ]
variable[salt] assign[=] call[name[os].urandom, parameter[constant[32]]]
variable[dk] assign[=] call[name[hashlib].pbkdf2_hmac, parameter[name[config].PASSWORD_HASH_FUNC_NAME, call[name[password_text].encode, parameter[constant[utf-8]]], name[salt], name[config].PASSWORD_HASH_ITERATIONS]]
return[dictionary[[<ast.Constant object at 0x7da1b0011540>, <ast.Constant object at 0x7da1b00124d0>], [<ast.Name object at 0x7da1b0010d00>, <ast.Name object at 0x7da1b0011c60>]]] | keyword[def] identifier[gen_password_and_salt] ( identifier[cls] , identifier[password_text] ):
literal[string]
identifier[salt] = identifier[os] . identifier[urandom] ( literal[int] )
identifier[dk] = identifier[hashlib] . identifier[pbkdf2_hmac] (
identifier[config] . identifier[PASSWORD_HASH_FUNC_NAME] ,
identifier[password_text] . identifier[encode] ( literal[string] ),
identifier[salt] ,
identifier[config] . identifier[PASSWORD_HASH_ITERATIONS] ,
)
keyword[return] { literal[string] : identifier[dk] , literal[string] : identifier[salt] } | def gen_password_and_salt(cls, password_text):
""" 生成加密后的密码和盐 """
salt = os.urandom(32)
dk = hashlib.pbkdf2_hmac(config.PASSWORD_HASH_FUNC_NAME, password_text.encode('utf-8'), salt, config.PASSWORD_HASH_ITERATIONS)
return {'password': dk, 'salt': salt} |
def fit(ts, fs=[], all_params=[], fit_vars=None,
alg='leastsq', make_bounded=True):
"""
Use a minimization algorithm to fit a AstonSeries with
analytical functions.
"""
if fit_vars is None:
fit_vars = [f._peakargs for f in fs]
initc = [min(ts.values)]
for f, peak_params, to_fit in zip(fs, all_params, fit_vars):
if 'v' in to_fit:
to_fit.remove('v')
if make_bounded and hasattr(f, '_pbounds'):
new_v = _to_unbnd_p({i: peak_params[i] for i in to_fit},
f._pbounds)
initc += [new_v[i] for i in to_fit]
else:
initc += [peak_params[i] for i in to_fit]
def errfunc_lsq(fit_params, t, y, all_params):
# first value in fit_params is baseline
# fit_y = np.ones(len(t)) * fit_params[0]
fit_y = np.zeros(len(t))
param_i = 1
for f, peak_params, to_fit in zip(fs, all_params, fit_vars):
for k in to_fit:
peak_params[k] = fit_params[param_i]
param_i += 1
if make_bounded and hasattr(f, '_pbounds'):
fit_y += f(t, **_to_bound_p(peak_params, f._pbounds))
else:
fit_y += f(t, **peak_params)
return fit_y - y
def errfunc(p, t, y, all_params):
return np.sum(errfunc_lsq(p, t, y, all_params) ** 2)
if alg == 'simplex':
fit_p, _ = fmin(errfunc, initc, args=(ts.index, ts.values,
peak_params))
# elif alg == 'anneal':
# fit_p, _ = anneal(errfunc, initc, args=(ts.index, ts.values,
# peak_params))
elif alg == 'lbfgsb':
# TODO: use bounds param
fitp, _ = fmin_l_bfgs_b(errfunc, fit_p,
args=(ts.index, ts.values, peak_params),
approx_grad=True)
elif alg == 'leastsq':
fit_p, _ = leastsq(errfunc_lsq, initc, args=(ts.index, ts.values,
all_params))
# else:
# r = minimize(errfunc, initc, \
# args=(ts.index, ts.values, all_params), \
# jac=False, gtol=1e-2)
# #if not r['success']:
# # print('Fail:' + str(f))
# # print(r)
# #if np.nan in r['x']: # not r['success']?
# # fit_p = initc
# #else:
# # fit_p = r['x']
fit_pl = fit_p.tolist()
v = fit_pl.pop(0) # noqa
fitted_params = []
for f, to_fit in zip(fs, fit_vars):
fit_p_dict = {v: fit_pl.pop(0) for v in to_fit}
# fit_p_dict['v'] = v
if make_bounded and hasattr(f, '_pbounds'):
fitted_params.append(_to_bound_p(fit_p_dict, f._pbounds))
else:
fitted_params.append(fit_p_dict)
# calculate r^2 of the fit
ss_err = errfunc(fit_p, ts.index, ts.values, fitted_params)
ss_tot = np.sum((ts.values - np.mean(ts.values)) ** 2)
r2 = 1 - ss_err / ss_tot
res = {'r^2': r2}
return fitted_params, res | def function[fit, parameter[ts, fs, all_params, fit_vars, alg, make_bounded]]:
constant[
Use a minimization algorithm to fit a AstonSeries with
analytical functions.
]
if compare[name[fit_vars] is constant[None]] begin[:]
variable[fit_vars] assign[=] <ast.ListComp object at 0x7da18f58cc70>
variable[initc] assign[=] list[[<ast.Call object at 0x7da18f58e230>]]
for taget[tuple[[<ast.Name object at 0x7da18f58d630>, <ast.Name object at 0x7da18f58c4c0>, <ast.Name object at 0x7da18f58de10>]]] in starred[call[name[zip], parameter[name[fs], name[all_params], name[fit_vars]]]] begin[:]
if compare[constant[v] in name[to_fit]] begin[:]
call[name[to_fit].remove, parameter[constant[v]]]
if <ast.BoolOp object at 0x7da18f58fc10> begin[:]
variable[new_v] assign[=] call[name[_to_unbnd_p], parameter[<ast.DictComp object at 0x7da18f58c4f0>, name[f]._pbounds]]
<ast.AugAssign object at 0x7da18f58f580>
def function[errfunc_lsq, parameter[fit_params, t, y, all_params]]:
variable[fit_y] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[t]]]]]
variable[param_i] assign[=] constant[1]
for taget[tuple[[<ast.Name object at 0x7da18dc07e80>, <ast.Name object at 0x7da18dc05810>, <ast.Name object at 0x7da18dc05ea0>]]] in starred[call[name[zip], parameter[name[fs], name[all_params], name[fit_vars]]]] begin[:]
for taget[name[k]] in starred[name[to_fit]] begin[:]
call[name[peak_params]][name[k]] assign[=] call[name[fit_params]][name[param_i]]
<ast.AugAssign object at 0x7da18dc040a0>
if <ast.BoolOp object at 0x7da18dc07ca0> begin[:]
<ast.AugAssign object at 0x7da18dc05f00>
return[binary_operation[name[fit_y] - name[y]]]
def function[errfunc, parameter[p, t, y, all_params]]:
return[call[name[np].sum, parameter[binary_operation[call[name[errfunc_lsq], parameter[name[p], name[t], name[y], name[all_params]]] ** constant[2]]]]]
if compare[name[alg] equal[==] constant[simplex]] begin[:]
<ast.Tuple object at 0x7da18dc078b0> assign[=] call[name[fmin], parameter[name[errfunc], name[initc]]]
variable[fit_pl] assign[=] call[name[fit_p].tolist, parameter[]]
variable[v] assign[=] call[name[fit_pl].pop, parameter[constant[0]]]
variable[fitted_params] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18f09f4c0>, <ast.Name object at 0x7da18f09ff70>]]] in starred[call[name[zip], parameter[name[fs], name[fit_vars]]]] begin[:]
variable[fit_p_dict] assign[=] <ast.DictComp object at 0x7da18f09ed40>
if <ast.BoolOp object at 0x7da18f09ee60> begin[:]
call[name[fitted_params].append, parameter[call[name[_to_bound_p], parameter[name[fit_p_dict], name[f]._pbounds]]]]
variable[ss_err] assign[=] call[name[errfunc], parameter[name[fit_p], name[ts].index, name[ts].values, name[fitted_params]]]
variable[ss_tot] assign[=] call[name[np].sum, parameter[binary_operation[binary_operation[name[ts].values - call[name[np].mean, parameter[name[ts].values]]] ** constant[2]]]]
variable[r2] assign[=] binary_operation[constant[1] - binary_operation[name[ss_err] / name[ss_tot]]]
variable[res] assign[=] dictionary[[<ast.Constant object at 0x7da18f09cfa0>], [<ast.Name object at 0x7da18f09f280>]]
return[tuple[[<ast.Name object at 0x7da18f09e9e0>, <ast.Name object at 0x7da18f09dcf0>]]] | keyword[def] identifier[fit] ( identifier[ts] , identifier[fs] =[], identifier[all_params] =[], identifier[fit_vars] = keyword[None] ,
identifier[alg] = literal[string] , identifier[make_bounded] = keyword[True] ):
literal[string]
keyword[if] identifier[fit_vars] keyword[is] keyword[None] :
identifier[fit_vars] =[ identifier[f] . identifier[_peakargs] keyword[for] identifier[f] keyword[in] identifier[fs] ]
identifier[initc] =[ identifier[min] ( identifier[ts] . identifier[values] )]
keyword[for] identifier[f] , identifier[peak_params] , identifier[to_fit] keyword[in] identifier[zip] ( identifier[fs] , identifier[all_params] , identifier[fit_vars] ):
keyword[if] literal[string] keyword[in] identifier[to_fit] :
identifier[to_fit] . identifier[remove] ( literal[string] )
keyword[if] identifier[make_bounded] keyword[and] identifier[hasattr] ( identifier[f] , literal[string] ):
identifier[new_v] = identifier[_to_unbnd_p] ({ identifier[i] : identifier[peak_params] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[to_fit] },
identifier[f] . identifier[_pbounds] )
identifier[initc] +=[ identifier[new_v] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[to_fit] ]
keyword[else] :
identifier[initc] +=[ identifier[peak_params] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[to_fit] ]
keyword[def] identifier[errfunc_lsq] ( identifier[fit_params] , identifier[t] , identifier[y] , identifier[all_params] ):
identifier[fit_y] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[t] ))
identifier[param_i] = literal[int]
keyword[for] identifier[f] , identifier[peak_params] , identifier[to_fit] keyword[in] identifier[zip] ( identifier[fs] , identifier[all_params] , identifier[fit_vars] ):
keyword[for] identifier[k] keyword[in] identifier[to_fit] :
identifier[peak_params] [ identifier[k] ]= identifier[fit_params] [ identifier[param_i] ]
identifier[param_i] += literal[int]
keyword[if] identifier[make_bounded] keyword[and] identifier[hasattr] ( identifier[f] , literal[string] ):
identifier[fit_y] += identifier[f] ( identifier[t] ,** identifier[_to_bound_p] ( identifier[peak_params] , identifier[f] . identifier[_pbounds] ))
keyword[else] :
identifier[fit_y] += identifier[f] ( identifier[t] ,** identifier[peak_params] )
keyword[return] identifier[fit_y] - identifier[y]
keyword[def] identifier[errfunc] ( identifier[p] , identifier[t] , identifier[y] , identifier[all_params] ):
keyword[return] identifier[np] . identifier[sum] ( identifier[errfunc_lsq] ( identifier[p] , identifier[t] , identifier[y] , identifier[all_params] )** literal[int] )
keyword[if] identifier[alg] == literal[string] :
identifier[fit_p] , identifier[_] = identifier[fmin] ( identifier[errfunc] , identifier[initc] , identifier[args] =( identifier[ts] . identifier[index] , identifier[ts] . identifier[values] ,
identifier[peak_params] ))
keyword[elif] identifier[alg] == literal[string] :
identifier[fitp] , identifier[_] = identifier[fmin_l_bfgs_b] ( identifier[errfunc] , identifier[fit_p] ,
identifier[args] =( identifier[ts] . identifier[index] , identifier[ts] . identifier[values] , identifier[peak_params] ),
identifier[approx_grad] = keyword[True] )
keyword[elif] identifier[alg] == literal[string] :
identifier[fit_p] , identifier[_] = identifier[leastsq] ( identifier[errfunc_lsq] , identifier[initc] , identifier[args] =( identifier[ts] . identifier[index] , identifier[ts] . identifier[values] ,
identifier[all_params] ))
identifier[fit_pl] = identifier[fit_p] . identifier[tolist] ()
identifier[v] = identifier[fit_pl] . identifier[pop] ( literal[int] )
identifier[fitted_params] =[]
keyword[for] identifier[f] , identifier[to_fit] keyword[in] identifier[zip] ( identifier[fs] , identifier[fit_vars] ):
identifier[fit_p_dict] ={ identifier[v] : identifier[fit_pl] . identifier[pop] ( literal[int] ) keyword[for] identifier[v] keyword[in] identifier[to_fit] }
keyword[if] identifier[make_bounded] keyword[and] identifier[hasattr] ( identifier[f] , literal[string] ):
identifier[fitted_params] . identifier[append] ( identifier[_to_bound_p] ( identifier[fit_p_dict] , identifier[f] . identifier[_pbounds] ))
keyword[else] :
identifier[fitted_params] . identifier[append] ( identifier[fit_p_dict] )
identifier[ss_err] = identifier[errfunc] ( identifier[fit_p] , identifier[ts] . identifier[index] , identifier[ts] . identifier[values] , identifier[fitted_params] )
identifier[ss_tot] = identifier[np] . identifier[sum] (( identifier[ts] . identifier[values] - identifier[np] . identifier[mean] ( identifier[ts] . identifier[values] ))** literal[int] )
identifier[r2] = literal[int] - identifier[ss_err] / identifier[ss_tot]
identifier[res] ={ literal[string] : identifier[r2] }
keyword[return] identifier[fitted_params] , identifier[res] | def fit(ts, fs=[], all_params=[], fit_vars=None, alg='leastsq', make_bounded=True):
"""
Use a minimization algorithm to fit a AstonSeries with
analytical functions.
"""
if fit_vars is None:
fit_vars = [f._peakargs for f in fs] # depends on [control=['if'], data=['fit_vars']]
initc = [min(ts.values)]
for (f, peak_params, to_fit) in zip(fs, all_params, fit_vars):
if 'v' in to_fit:
to_fit.remove('v') # depends on [control=['if'], data=['to_fit']]
if make_bounded and hasattr(f, '_pbounds'):
new_v = _to_unbnd_p({i: peak_params[i] for i in to_fit}, f._pbounds)
initc += [new_v[i] for i in to_fit] # depends on [control=['if'], data=[]]
else:
initc += [peak_params[i] for i in to_fit] # depends on [control=['for'], data=[]]
def errfunc_lsq(fit_params, t, y, all_params):
# first value in fit_params is baseline
# fit_y = np.ones(len(t)) * fit_params[0]
fit_y = np.zeros(len(t))
param_i = 1
for (f, peak_params, to_fit) in zip(fs, all_params, fit_vars):
for k in to_fit:
peak_params[k] = fit_params[param_i]
param_i += 1 # depends on [control=['for'], data=['k']]
if make_bounded and hasattr(f, '_pbounds'):
fit_y += f(t, **_to_bound_p(peak_params, f._pbounds)) # depends on [control=['if'], data=[]]
else:
fit_y += f(t, **peak_params) # depends on [control=['for'], data=[]]
return fit_y - y
def errfunc(p, t, y, all_params):
return np.sum(errfunc_lsq(p, t, y, all_params) ** 2)
if alg == 'simplex':
(fit_p, _) = fmin(errfunc, initc, args=(ts.index, ts.values, peak_params)) # depends on [control=['if'], data=[]]
# elif alg == 'anneal':
# fit_p, _ = anneal(errfunc, initc, args=(ts.index, ts.values,
# peak_params))
elif alg == 'lbfgsb':
# TODO: use bounds param
(fitp, _) = fmin_l_bfgs_b(errfunc, fit_p, args=(ts.index, ts.values, peak_params), approx_grad=True) # depends on [control=['if'], data=[]]
elif alg == 'leastsq':
(fit_p, _) = leastsq(errfunc_lsq, initc, args=(ts.index, ts.values, all_params)) # depends on [control=['if'], data=[]]
# else:
# r = minimize(errfunc, initc, \
# args=(ts.index, ts.values, all_params), \
# jac=False, gtol=1e-2)
# #if not r['success']:
# # print('Fail:' + str(f))
# # print(r)
# #if np.nan in r['x']: # not r['success']?
# # fit_p = initc
# #else:
# # fit_p = r['x']
fit_pl = fit_p.tolist()
v = fit_pl.pop(0) # noqa
fitted_params = []
for (f, to_fit) in zip(fs, fit_vars):
fit_p_dict = {v: fit_pl.pop(0) for v in to_fit}
# fit_p_dict['v'] = v
if make_bounded and hasattr(f, '_pbounds'):
fitted_params.append(_to_bound_p(fit_p_dict, f._pbounds)) # depends on [control=['if'], data=[]]
else:
fitted_params.append(fit_p_dict) # depends on [control=['for'], data=[]]
# calculate r^2 of the fit
ss_err = errfunc(fit_p, ts.index, ts.values, fitted_params)
ss_tot = np.sum((ts.values - np.mean(ts.values)) ** 2)
r2 = 1 - ss_err / ss_tot
res = {'r^2': r2}
return (fitted_params, res) |
def _gcs_delete(args, _):
""" Delete one or more buckets or objects. """
objects = _expand_list(args['bucket'])
objects.extend(_expand_list(args['object']))
errs = []
for obj in objects:
try:
bucket, key = google.datalab.storage._bucket.parse_name(obj)
if bucket and key:
gcs_object = google.datalab.storage.Object(bucket, key)
if gcs_object.exists():
google.datalab.storage.Object(bucket, key).delete()
else:
errs.append("%s does not exist" % obj)
elif bucket:
gcs_bucket = google.datalab.storage.Bucket(bucket)
if gcs_bucket.exists():
gcs_bucket.delete()
else:
errs.append("%s does not exist" % obj)
else:
raise Exception("Can't delete object with invalid name %s" % obj)
except Exception as e:
errs.append("Couldn't delete %s: %s" %
(obj, _extract_gcs_api_response_error(str(e))))
if errs:
raise Exception('\n'.join(errs)) | def function[_gcs_delete, parameter[args, _]]:
constant[ Delete one or more buckets or objects. ]
variable[objects] assign[=] call[name[_expand_list], parameter[call[name[args]][constant[bucket]]]]
call[name[objects].extend, parameter[call[name[_expand_list], parameter[call[name[args]][constant[object]]]]]]
variable[errs] assign[=] list[[]]
for taget[name[obj]] in starred[name[objects]] begin[:]
<ast.Try object at 0x7da18ede46d0>
if name[errs] begin[:]
<ast.Raise object at 0x7da18fe91600> | keyword[def] identifier[_gcs_delete] ( identifier[args] , identifier[_] ):
literal[string]
identifier[objects] = identifier[_expand_list] ( identifier[args] [ literal[string] ])
identifier[objects] . identifier[extend] ( identifier[_expand_list] ( identifier[args] [ literal[string] ]))
identifier[errs] =[]
keyword[for] identifier[obj] keyword[in] identifier[objects] :
keyword[try] :
identifier[bucket] , identifier[key] = identifier[google] . identifier[datalab] . identifier[storage] . identifier[_bucket] . identifier[parse_name] ( identifier[obj] )
keyword[if] identifier[bucket] keyword[and] identifier[key] :
identifier[gcs_object] = identifier[google] . identifier[datalab] . identifier[storage] . identifier[Object] ( identifier[bucket] , identifier[key] )
keyword[if] identifier[gcs_object] . identifier[exists] ():
identifier[google] . identifier[datalab] . identifier[storage] . identifier[Object] ( identifier[bucket] , identifier[key] ). identifier[delete] ()
keyword[else] :
identifier[errs] . identifier[append] ( literal[string] % identifier[obj] )
keyword[elif] identifier[bucket] :
identifier[gcs_bucket] = identifier[google] . identifier[datalab] . identifier[storage] . identifier[Bucket] ( identifier[bucket] )
keyword[if] identifier[gcs_bucket] . identifier[exists] ():
identifier[gcs_bucket] . identifier[delete] ()
keyword[else] :
identifier[errs] . identifier[append] ( literal[string] % identifier[obj] )
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[obj] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[errs] . identifier[append] ( literal[string] %
( identifier[obj] , identifier[_extract_gcs_api_response_error] ( identifier[str] ( identifier[e] ))))
keyword[if] identifier[errs] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[join] ( identifier[errs] )) | def _gcs_delete(args, _):
""" Delete one or more buckets or objects. """
objects = _expand_list(args['bucket'])
objects.extend(_expand_list(args['object']))
errs = []
for obj in objects:
try:
(bucket, key) = google.datalab.storage._bucket.parse_name(obj)
if bucket and key:
gcs_object = google.datalab.storage.Object(bucket, key)
if gcs_object.exists():
google.datalab.storage.Object(bucket, key).delete() # depends on [control=['if'], data=[]]
else:
errs.append('%s does not exist' % obj) # depends on [control=['if'], data=[]]
elif bucket:
gcs_bucket = google.datalab.storage.Bucket(bucket)
if gcs_bucket.exists():
gcs_bucket.delete() # depends on [control=['if'], data=[]]
else:
errs.append('%s does not exist' % obj) # depends on [control=['if'], data=[]]
else:
raise Exception("Can't delete object with invalid name %s" % obj) # depends on [control=['try'], data=[]]
except Exception as e:
errs.append("Couldn't delete %s: %s" % (obj, _extract_gcs_api_response_error(str(e)))) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['obj']]
if errs:
raise Exception('\n'.join(errs)) # depends on [control=['if'], data=[]] |
def fit_list_of_wvfeatures(list_of_wvfeatures,
naxis1_arc,
crpix1,
poly_degree_wfit,
weighted=False,
plot_title=None,
geometry=None,
debugplot=0):
"""Fit polynomial to arc calibration list_of_wvfeatures.
Parameters
----------
list_of_wvfeatures : list (of WavecalFeature instances)
A list of size equal to the number of identified lines, which
elements are instances of the class WavecalFeature, containing
all the relevant information concerning the line
identification.
naxis1_arc : int
NAXIS1 of arc spectrum.
crpix1 : float
CRPIX1 value to be employed in the wavelength calibration.
poly_degree_wfit : int
Polynomial degree corresponding to the wavelength calibration
function to be fitted.
weighted : bool
Determines whether the polynomial fit is weighted or not,
using as weights the values of the cost function obtained in
the line identification. Since the weights can be very
different, typically weighted fits are not good because, in
practice, they totally ignore the points with the smallest
weights (which, in the other hand, are useful when handling
the borders of the wavelength calibration range).
plot_title : string or None
Title for residuals plot.
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the window geometry.
debugplot : int
Determines whether intermediate computations and/or plots
are displayed. The valid codes are defined in
numina.array.display.pause_debugplot.
Returns
-------
solution_wv : SolutionArcCalibration instance
Instance of class SolutionArcCalibration, containing the
information concerning the arc lines that have been properly
identified. The information about all the lines (including
those initially found but at the end discarded) is stored in
the list of WavecalFeature instances 'list_of_wvfeatures'.
"""
nlines_arc = len(list_of_wvfeatures)
# select information from valid lines.
nfit, ifit, xfit, yfit, wfit = select_data_for_fit(list_of_wvfeatures)
# select list of filtered out and unidentified lines
list_r = []
list_t = []
list_p = []
list_k = []
list_unidentified = []
for i in range(nlines_arc):
if not list_of_wvfeatures[i].line_ok:
if list_of_wvfeatures[i].category == 'X':
list_unidentified.append(i)
elif list_of_wvfeatures[i].category == 'R':
list_r.append(i)
elif list_of_wvfeatures[i].category == 'T':
list_t.append(i)
elif list_of_wvfeatures[i].category == 'P':
list_p.append(i)
elif list_of_wvfeatures[i].category == 'K':
list_k.append(i)
else:
raise ValueError('Unexpected "category"')
# polynomial fit
if weighted:
weights = 1.0 / wfit
else:
weights = np.zeros_like(wfit) + 1.0
if xfit.size <= poly_degree_wfit:
raise ValueError("Insufficient number of points for fit.")
poly, stats_list = Polynomial.fit(
x=xfit, y=yfit, deg=poly_degree_wfit, full=True, w=weights
)
poly = Polynomial.cast(poly)
coeff = poly.coef
if len(xfit) > poly_degree_wfit + 1:
residual_std = np.sqrt(stats_list[0]/(len(xfit)-poly_degree_wfit-1))[0]
else:
residual_std = 0.0
if abs(debugplot) >= 10:
print('>>> Fitted coefficients:\n', coeff)
print('>>> Residual std.......:', residual_std)
# obtain CRVAL1 and CDELT1 for a linear wavelength scale from the
# last polynomial fit
crval1_linear = poly(crpix1)
crmin1_linear = poly(1)
crmax1_linear = poly(naxis1_arc)
cdelt1_linear = (crmax1_linear - crval1_linear) / (naxis1_arc - crpix1)
if abs(debugplot) >= 10:
print('>>> CRVAL1 linear scale:', crval1_linear)
print('>>> CDELT1 linear scale:', cdelt1_linear)
# generate solution (note that the class SolutionArcCalibration
# only sotres the information in list_of_wvfeatures corresponding
# to lines that have been properly identified
cr_linear = CrLinear(
crpix1,
crval1_linear,
crmin1_linear,
crmax1_linear,
cdelt1_linear
)
solution_wv = SolutionArcCalibration(
features=list_of_wvfeatures,
coeff=coeff,
residual_std=residual_std,
cr_linear=cr_linear
)
if abs(debugplot) % 10 != 0:
# polynomial fit
xpol = np.linspace(1, naxis1_arc, naxis1_arc)
ypol = poly(xpol) - (crval1_linear + (xpol - crpix1) * cdelt1_linear)
# identified lines
xp = np.copy(xfit)
yp = yfit - (crval1_linear + (xp - crpix1) * cdelt1_linear)
yres = yfit - poly(xp) # residuals
# include residuals plot with identified lines
from numina.array.display.matplotlib_qt import plt
fig = plt.figure()
ax2 = fig.add_subplot(2, 1, 2)
ax2.set_xlim(1 - 0.05 * naxis1_arc, naxis1_arc + 0.05 * naxis1_arc)
ax2.set_xlabel('pixel position in arc spectrum [from 1 to NAXIS1]')
ax2.set_ylabel('residuals (Angstrom)')
ax2.plot(xp, yres, 'go')
ax2.axhline(y=0.0, color="black", linestyle="dashed")
# residuals with R, T, P and K lines
for val in zip(["R", "T", "P", "K"],
[list_r, list_t, list_p, list_k],
['red', 'blue', 'magenta', 'orange']):
list_x = val[1]
if len(list_x) > 0:
xxp = np.array([])
yyp = np.array([])
for i in list_x:
xxp = np.append(xxp, [list_of_wvfeatures[i].xpos])
yyp = np.append(yyp, [list_of_wvfeatures[i].reference])
yyres = yyp - poly(xxp)
ax2.plot(xxp, yyres, marker='x', markersize=15, c=val[2],
linewidth=0)
# plot with differences between linear fit and fitted
# polynomial
ax = fig.add_subplot(2, 1, 1, sharex=ax2)
ax.set_xlim(1 - 0.05 * naxis1_arc, naxis1_arc + 0.05 * naxis1_arc)
ax.set_ylabel('differences with\nlinear solution (Angstrom)')
ax.plot(xp, yp, 'go', label="identified")
for i in range(nfit):
ax.text(xp[i], yp[i], list_of_wvfeatures[ifit[i]].category,
fontsize=15)
# polynomial fit
ax.plot(xpol, ypol, 'c-', label="fit")
# unidentified lines
if len(list_unidentified) > 0:
ymin = np.concatenate((yp, ypol)).min()
ymax = np.concatenate((yp, ypol)).max()
for i in list_unidentified:
xxp = np.array([list_of_wvfeatures[i].xpos,
list_of_wvfeatures[i].xpos])
yyp = np.array([ymin, ymax])
if i == list_unidentified[0]:
ax.plot(xxp, yyp, 'r--', label='unidentified')
else:
ax.plot(xxp, yyp, 'r--')
# R, T, P and K lines
for val in zip(["R", "T", "P", "K"],
[list_r, list_t, list_p, list_k],
['red', 'blue', 'magenta', 'orange']):
list_x = val[1]
if len(list_x) > 0:
xxp = np.array([])
yyp = np.array([])
for i in list_x:
xxp = np.append(xxp, [list_of_wvfeatures[i].xpos])
yyp = np.append(yyp, [list_of_wvfeatures[i].reference])
yyp -= crval1_linear + (xxp - crpix1) * cdelt1_linear
ax.plot(xxp, yyp, marker='x', markersize=15, c=val[2],
linewidth=0, label='removed')
for k in range(len(xxp)):
ax.text(xxp[k], yyp[k], val[0], fontsize=15)
# legend
ax.legend()
# title
if plot_title is None:
plt.title("Wavelength calibration")
else:
plt.title(plot_title)
# include important parameters in plot
ax.text(0.50, 0.25, "poldeg: " + str(poly_degree_wfit) +
", nfit: " + str(len(xfit)),
fontsize=12,
transform=ax.transAxes,
horizontalalignment="center",
verticalalignment="bottom")
ax.text(0.50, 0.15, "CRVAL1: " + str(round(crval1_linear, 4)),
fontsize=12,
transform=ax.transAxes,
horizontalalignment="center",
verticalalignment="bottom")
ax.text(0.50, 0.05, "CDELT1: " + str(round(cdelt1_linear, 4)),
fontsize=12,
transform=ax.transAxes,
horizontalalignment="center",
verticalalignment="bottom")
ax2.text(0.50, 0.05, "r.m.s.: " + str(round(residual_std, 4)),
fontsize=12,
transform=ax2.transAxes,
horizontalalignment="center",
verticalalignment="bottom")
# set window geometry
set_window_geometry(geometry)
pause_debugplot(debugplot, pltshow=True, tight_layout=False)
return solution_wv | def function[fit_list_of_wvfeatures, parameter[list_of_wvfeatures, naxis1_arc, crpix1, poly_degree_wfit, weighted, plot_title, geometry, debugplot]]:
constant[Fit polynomial to arc calibration list_of_wvfeatures.
Parameters
----------
list_of_wvfeatures : list (of WavecalFeature instances)
A list of size equal to the number of identified lines, which
elements are instances of the class WavecalFeature, containing
all the relevant information concerning the line
identification.
naxis1_arc : int
NAXIS1 of arc spectrum.
crpix1 : float
CRPIX1 value to be employed in the wavelength calibration.
poly_degree_wfit : int
Polynomial degree corresponding to the wavelength calibration
function to be fitted.
weighted : bool
Determines whether the polynomial fit is weighted or not,
using as weights the values of the cost function obtained in
the line identification. Since the weights can be very
different, typically weighted fits are not good because, in
practice, they totally ignore the points with the smallest
weights (which, in the other hand, are useful when handling
the borders of the wavelength calibration range).
plot_title : string or None
Title for residuals plot.
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the window geometry.
debugplot : int
Determines whether intermediate computations and/or plots
are displayed. The valid codes are defined in
numina.array.display.pause_debugplot.
Returns
-------
solution_wv : SolutionArcCalibration instance
Instance of class SolutionArcCalibration, containing the
information concerning the arc lines that have been properly
identified. The information about all the lines (including
those initially found but at the end discarded) is stored in
the list of WavecalFeature instances 'list_of_wvfeatures'.
]
variable[nlines_arc] assign[=] call[name[len], parameter[name[list_of_wvfeatures]]]
<ast.Tuple object at 0x7da1b24d3b50> assign[=] call[name[select_data_for_fit], parameter[name[list_of_wvfeatures]]]
variable[list_r] assign[=] list[[]]
variable[list_t] assign[=] list[[]]
variable[list_p] assign[=] list[[]]
variable[list_k] assign[=] list[[]]
variable[list_unidentified] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[nlines_arc]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b24d3580> begin[:]
if compare[call[name[list_of_wvfeatures]][name[i]].category equal[==] constant[X]] begin[:]
call[name[list_unidentified].append, parameter[name[i]]]
if name[weighted] begin[:]
variable[weights] assign[=] binary_operation[constant[1.0] / name[wfit]]
if compare[name[xfit].size less_or_equal[<=] name[poly_degree_wfit]] begin[:]
<ast.Raise object at 0x7da1b24d23e0>
<ast.Tuple object at 0x7da1b24d22f0> assign[=] call[name[Polynomial].fit, parameter[]]
variable[poly] assign[=] call[name[Polynomial].cast, parameter[name[poly]]]
variable[coeff] assign[=] name[poly].coef
if compare[call[name[len], parameter[name[xfit]]] greater[>] binary_operation[name[poly_degree_wfit] + constant[1]]] begin[:]
variable[residual_std] assign[=] call[call[name[np].sqrt, parameter[binary_operation[call[name[stats_list]][constant[0]] / binary_operation[binary_operation[call[name[len], parameter[name[xfit]]] - name[poly_degree_wfit]] - constant[1]]]]]][constant[0]]
if compare[call[name[abs], parameter[name[debugplot]]] greater_or_equal[>=] constant[10]] begin[:]
call[name[print], parameter[constant[>>> Fitted coefficients:
], name[coeff]]]
call[name[print], parameter[constant[>>> Residual std.......:], name[residual_std]]]
variable[crval1_linear] assign[=] call[name[poly], parameter[name[crpix1]]]
variable[crmin1_linear] assign[=] call[name[poly], parameter[constant[1]]]
variable[crmax1_linear] assign[=] call[name[poly], parameter[name[naxis1_arc]]]
variable[cdelt1_linear] assign[=] binary_operation[binary_operation[name[crmax1_linear] - name[crval1_linear]] / binary_operation[name[naxis1_arc] - name[crpix1]]]
if compare[call[name[abs], parameter[name[debugplot]]] greater_or_equal[>=] constant[10]] begin[:]
call[name[print], parameter[constant[>>> CRVAL1 linear scale:], name[crval1_linear]]]
call[name[print], parameter[constant[>>> CDELT1 linear scale:], name[cdelt1_linear]]]
variable[cr_linear] assign[=] call[name[CrLinear], parameter[name[crpix1], name[crval1_linear], name[crmin1_linear], name[crmax1_linear], name[cdelt1_linear]]]
variable[solution_wv] assign[=] call[name[SolutionArcCalibration], parameter[]]
if compare[binary_operation[call[name[abs], parameter[name[debugplot]]] <ast.Mod object at 0x7da2590d6920> constant[10]] not_equal[!=] constant[0]] begin[:]
variable[xpol] assign[=] call[name[np].linspace, parameter[constant[1], name[naxis1_arc], name[naxis1_arc]]]
variable[ypol] assign[=] binary_operation[call[name[poly], parameter[name[xpol]]] - binary_operation[name[crval1_linear] + binary_operation[binary_operation[name[xpol] - name[crpix1]] * name[cdelt1_linear]]]]
variable[xp] assign[=] call[name[np].copy, parameter[name[xfit]]]
variable[yp] assign[=] binary_operation[name[yfit] - binary_operation[name[crval1_linear] + binary_operation[binary_operation[name[xp] - name[crpix1]] * name[cdelt1_linear]]]]
variable[yres] assign[=] binary_operation[name[yfit] - call[name[poly], parameter[name[xp]]]]
from relative_module[numina.array.display.matplotlib_qt] import module[plt]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax2] assign[=] call[name[fig].add_subplot, parameter[constant[2], constant[1], constant[2]]]
call[name[ax2].set_xlim, parameter[binary_operation[constant[1] - binary_operation[constant[0.05] * name[naxis1_arc]]], binary_operation[name[naxis1_arc] + binary_operation[constant[0.05] * name[naxis1_arc]]]]]
call[name[ax2].set_xlabel, parameter[constant[pixel position in arc spectrum [from 1 to NAXIS1]]]]
call[name[ax2].set_ylabel, parameter[constant[residuals (Angstrom)]]]
call[name[ax2].plot, parameter[name[xp], name[yres], constant[go]]]
call[name[ax2].axhline, parameter[]]
for taget[name[val]] in starred[call[name[zip], parameter[list[[<ast.Constant object at 0x7da1b24149a0>, <ast.Constant object at 0x7da1b24147f0>, <ast.Constant object at 0x7da1b24147c0>, <ast.Constant object at 0x7da1b2414820>]], list[[<ast.Name object at 0x7da1b2414850>, <ast.Name object at 0x7da1b24148e0>, <ast.Name object at 0x7da1b24148b0>, <ast.Name object at 0x7da1b2414910>]], list[[<ast.Constant object at 0x7da1b2414940>, <ast.Constant object at 0x7da1b2414970>, <ast.Constant object at 0x7da1b24149d0>, <ast.Constant object at 0x7da1b2414760>]]]]] begin[:]
variable[list_x] assign[=] call[name[val]][constant[1]]
if compare[call[name[len], parameter[name[list_x]]] greater[>] constant[0]] begin[:]
variable[xxp] assign[=] call[name[np].array, parameter[list[[]]]]
variable[yyp] assign[=] call[name[np].array, parameter[list[[]]]]
for taget[name[i]] in starred[name[list_x]] begin[:]
variable[xxp] assign[=] call[name[np].append, parameter[name[xxp], list[[<ast.Attribute object at 0x7da1b2415120>]]]]
variable[yyp] assign[=] call[name[np].append, parameter[name[yyp], list[[<ast.Attribute object at 0x7da1b24158d0>]]]]
variable[yyres] assign[=] binary_operation[name[yyp] - call[name[poly], parameter[name[xxp]]]]
call[name[ax2].plot, parameter[name[xxp], name[yyres]]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[2], constant[1], constant[1]]]
call[name[ax].set_xlim, parameter[binary_operation[constant[1] - binary_operation[constant[0.05] * name[naxis1_arc]]], binary_operation[name[naxis1_arc] + binary_operation[constant[0.05] * name[naxis1_arc]]]]]
call[name[ax].set_ylabel, parameter[constant[differences with
linear solution (Angstrom)]]]
call[name[ax].plot, parameter[name[xp], name[yp], constant[go]]]
for taget[name[i]] in starred[call[name[range], parameter[name[nfit]]]] begin[:]
call[name[ax].text, parameter[call[name[xp]][name[i]], call[name[yp]][name[i]], call[name[list_of_wvfeatures]][call[name[ifit]][name[i]]].category]]
call[name[ax].plot, parameter[name[xpol], name[ypol], constant[c-]]]
if compare[call[name[len], parameter[name[list_unidentified]]] greater[>] constant[0]] begin[:]
variable[ymin] assign[=] call[call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da1b24915a0>, <ast.Name object at 0x7da1b2491600>]]]].min, parameter[]]
variable[ymax] assign[=] call[call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da1b24917e0>, <ast.Name object at 0x7da1b24917b0>]]]].max, parameter[]]
for taget[name[i]] in starred[name[list_unidentified]] begin[:]
variable[xxp] assign[=] call[name[np].array, parameter[list[[<ast.Attribute object at 0x7da1b2491990>, <ast.Attribute object at 0x7da1b2491a50>]]]]
variable[yyp] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b2491c30>, <ast.Name object at 0x7da1b2491c90>]]]]
if compare[name[i] equal[==] call[name[list_unidentified]][constant[0]]] begin[:]
call[name[ax].plot, parameter[name[xxp], name[yyp], constant[r--]]]
for taget[name[val]] in starred[call[name[zip], parameter[list[[<ast.Constant object at 0x7da1b2492200>, <ast.Constant object at 0x7da1b2492260>, <ast.Constant object at 0x7da1b2492350>, <ast.Constant object at 0x7da1b2492bc0>]], list[[<ast.Name object at 0x7da1b2492bf0>, <ast.Name object at 0x7da1b2492980>, <ast.Name object at 0x7da1b2492950>, <ast.Name object at 0x7da1b24929b0>]], list[[<ast.Constant object at 0x7da1b24929e0>, <ast.Constant object at 0x7da1b2492a40>, <ast.Constant object at 0x7da1b2492aa0>, <ast.Constant object at 0x7da1b2492a70>]]]]] begin[:]
variable[list_x] assign[=] call[name[val]][constant[1]]
if compare[call[name[len], parameter[name[list_x]]] greater[>] constant[0]] begin[:]
variable[xxp] assign[=] call[name[np].array, parameter[list[[]]]]
variable[yyp] assign[=] call[name[np].array, parameter[list[[]]]]
for taget[name[i]] in starred[name[list_x]] begin[:]
variable[xxp] assign[=] call[name[np].append, parameter[name[xxp], list[[<ast.Attribute object at 0x7da1b2492920>]]]]
variable[yyp] assign[=] call[name[np].append, parameter[name[yyp], list[[<ast.Attribute object at 0x7da1b2492e60>]]]]
<ast.AugAssign object at 0x7da1b2492e30>
call[name[ax].plot, parameter[name[xxp], name[yyp]]]
for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[xxp]]]]]] begin[:]
call[name[ax].text, parameter[call[name[xxp]][name[k]], call[name[yyp]][name[k]], call[name[val]][constant[0]]]]
call[name[ax].legend, parameter[]]
if compare[name[plot_title] is constant[None]] begin[:]
call[name[plt].title, parameter[constant[Wavelength calibration]]]
call[name[ax].text, parameter[constant[0.5], constant[0.25], binary_operation[binary_operation[binary_operation[constant[poldeg: ] + call[name[str], parameter[name[poly_degree_wfit]]]] + constant[, nfit: ]] + call[name[str], parameter[call[name[len], parameter[name[xfit]]]]]]]]
call[name[ax].text, parameter[constant[0.5], constant[0.15], binary_operation[constant[CRVAL1: ] + call[name[str], parameter[call[name[round], parameter[name[crval1_linear], constant[4]]]]]]]]
call[name[ax].text, parameter[constant[0.5], constant[0.05], binary_operation[constant[CDELT1: ] + call[name[str], parameter[call[name[round], parameter[name[cdelt1_linear], constant[4]]]]]]]]
call[name[ax2].text, parameter[constant[0.5], constant[0.05], binary_operation[constant[r.m.s.: ] + call[name[str], parameter[call[name[round], parameter[name[residual_std], constant[4]]]]]]]]
call[name[set_window_geometry], parameter[name[geometry]]]
call[name[pause_debugplot], parameter[name[debugplot]]]
return[name[solution_wv]] | keyword[def] identifier[fit_list_of_wvfeatures] ( identifier[list_of_wvfeatures] ,
identifier[naxis1_arc] ,
identifier[crpix1] ,
identifier[poly_degree_wfit] ,
identifier[weighted] = keyword[False] ,
identifier[plot_title] = keyword[None] ,
identifier[geometry] = keyword[None] ,
identifier[debugplot] = literal[int] ):
literal[string]
identifier[nlines_arc] = identifier[len] ( identifier[list_of_wvfeatures] )
identifier[nfit] , identifier[ifit] , identifier[xfit] , identifier[yfit] , identifier[wfit] = identifier[select_data_for_fit] ( identifier[list_of_wvfeatures] )
identifier[list_r] =[]
identifier[list_t] =[]
identifier[list_p] =[]
identifier[list_k] =[]
identifier[list_unidentified] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nlines_arc] ):
keyword[if] keyword[not] identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[line_ok] :
keyword[if] identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[category] == literal[string] :
identifier[list_unidentified] . identifier[append] ( identifier[i] )
keyword[elif] identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[category] == literal[string] :
identifier[list_r] . identifier[append] ( identifier[i] )
keyword[elif] identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[category] == literal[string] :
identifier[list_t] . identifier[append] ( identifier[i] )
keyword[elif] identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[category] == literal[string] :
identifier[list_p] . identifier[append] ( identifier[i] )
keyword[elif] identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[category] == literal[string] :
identifier[list_k] . identifier[append] ( identifier[i] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[weighted] :
identifier[weights] = literal[int] / identifier[wfit]
keyword[else] :
identifier[weights] = identifier[np] . identifier[zeros_like] ( identifier[wfit] )+ literal[int]
keyword[if] identifier[xfit] . identifier[size] <= identifier[poly_degree_wfit] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[poly] , identifier[stats_list] = identifier[Polynomial] . identifier[fit] (
identifier[x] = identifier[xfit] , identifier[y] = identifier[yfit] , identifier[deg] = identifier[poly_degree_wfit] , identifier[full] = keyword[True] , identifier[w] = identifier[weights]
)
identifier[poly] = identifier[Polynomial] . identifier[cast] ( identifier[poly] )
identifier[coeff] = identifier[poly] . identifier[coef]
keyword[if] identifier[len] ( identifier[xfit] )> identifier[poly_degree_wfit] + literal[int] :
identifier[residual_std] = identifier[np] . identifier[sqrt] ( identifier[stats_list] [ literal[int] ]/( identifier[len] ( identifier[xfit] )- identifier[poly_degree_wfit] - literal[int] ))[ literal[int] ]
keyword[else] :
identifier[residual_std] = literal[int]
keyword[if] identifier[abs] ( identifier[debugplot] )>= literal[int] :
identifier[print] ( literal[string] , identifier[coeff] )
identifier[print] ( literal[string] , identifier[residual_std] )
identifier[crval1_linear] = identifier[poly] ( identifier[crpix1] )
identifier[crmin1_linear] = identifier[poly] ( literal[int] )
identifier[crmax1_linear] = identifier[poly] ( identifier[naxis1_arc] )
identifier[cdelt1_linear] =( identifier[crmax1_linear] - identifier[crval1_linear] )/( identifier[naxis1_arc] - identifier[crpix1] )
keyword[if] identifier[abs] ( identifier[debugplot] )>= literal[int] :
identifier[print] ( literal[string] , identifier[crval1_linear] )
identifier[print] ( literal[string] , identifier[cdelt1_linear] )
identifier[cr_linear] = identifier[CrLinear] (
identifier[crpix1] ,
identifier[crval1_linear] ,
identifier[crmin1_linear] ,
identifier[crmax1_linear] ,
identifier[cdelt1_linear]
)
identifier[solution_wv] = identifier[SolutionArcCalibration] (
identifier[features] = identifier[list_of_wvfeatures] ,
identifier[coeff] = identifier[coeff] ,
identifier[residual_std] = identifier[residual_std] ,
identifier[cr_linear] = identifier[cr_linear]
)
keyword[if] identifier[abs] ( identifier[debugplot] )% literal[int] != literal[int] :
identifier[xpol] = identifier[np] . identifier[linspace] ( literal[int] , identifier[naxis1_arc] , identifier[naxis1_arc] )
identifier[ypol] = identifier[poly] ( identifier[xpol] )-( identifier[crval1_linear] +( identifier[xpol] - identifier[crpix1] )* identifier[cdelt1_linear] )
identifier[xp] = identifier[np] . identifier[copy] ( identifier[xfit] )
identifier[yp] = identifier[yfit] -( identifier[crval1_linear] +( identifier[xp] - identifier[crpix1] )* identifier[cdelt1_linear] )
identifier[yres] = identifier[yfit] - identifier[poly] ( identifier[xp] )
keyword[from] identifier[numina] . identifier[array] . identifier[display] . identifier[matplotlib_qt] keyword[import] identifier[plt]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax2] = identifier[fig] . identifier[add_subplot] ( literal[int] , literal[int] , literal[int] )
identifier[ax2] . identifier[set_xlim] ( literal[int] - literal[int] * identifier[naxis1_arc] , identifier[naxis1_arc] + literal[int] * identifier[naxis1_arc] )
identifier[ax2] . identifier[set_xlabel] ( literal[string] )
identifier[ax2] . identifier[set_ylabel] ( literal[string] )
identifier[ax2] . identifier[plot] ( identifier[xp] , identifier[yres] , literal[string] )
identifier[ax2] . identifier[axhline] ( identifier[y] = literal[int] , identifier[color] = literal[string] , identifier[linestyle] = literal[string] )
keyword[for] identifier[val] keyword[in] identifier[zip] ([ literal[string] , literal[string] , literal[string] , literal[string] ],
[ identifier[list_r] , identifier[list_t] , identifier[list_p] , identifier[list_k] ],
[ literal[string] , literal[string] , literal[string] , literal[string] ]):
identifier[list_x] = identifier[val] [ literal[int] ]
keyword[if] identifier[len] ( identifier[list_x] )> literal[int] :
identifier[xxp] = identifier[np] . identifier[array] ([])
identifier[yyp] = identifier[np] . identifier[array] ([])
keyword[for] identifier[i] keyword[in] identifier[list_x] :
identifier[xxp] = identifier[np] . identifier[append] ( identifier[xxp] ,[ identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[xpos] ])
identifier[yyp] = identifier[np] . identifier[append] ( identifier[yyp] ,[ identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[reference] ])
identifier[yyres] = identifier[yyp] - identifier[poly] ( identifier[xxp] )
identifier[ax2] . identifier[plot] ( identifier[xxp] , identifier[yyres] , identifier[marker] = literal[string] , identifier[markersize] = literal[int] , identifier[c] = identifier[val] [ literal[int] ],
identifier[linewidth] = literal[int] )
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] , literal[int] , literal[int] , identifier[sharex] = identifier[ax2] )
identifier[ax] . identifier[set_xlim] ( literal[int] - literal[int] * identifier[naxis1_arc] , identifier[naxis1_arc] + literal[int] * identifier[naxis1_arc] )
identifier[ax] . identifier[set_ylabel] ( literal[string] )
identifier[ax] . identifier[plot] ( identifier[xp] , identifier[yp] , literal[string] , identifier[label] = literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nfit] ):
identifier[ax] . identifier[text] ( identifier[xp] [ identifier[i] ], identifier[yp] [ identifier[i] ], identifier[list_of_wvfeatures] [ identifier[ifit] [ identifier[i] ]]. identifier[category] ,
identifier[fontsize] = literal[int] )
identifier[ax] . identifier[plot] ( identifier[xpol] , identifier[ypol] , literal[string] , identifier[label] = literal[string] )
keyword[if] identifier[len] ( identifier[list_unidentified] )> literal[int] :
identifier[ymin] = identifier[np] . identifier[concatenate] (( identifier[yp] , identifier[ypol] )). identifier[min] ()
identifier[ymax] = identifier[np] . identifier[concatenate] (( identifier[yp] , identifier[ypol] )). identifier[max] ()
keyword[for] identifier[i] keyword[in] identifier[list_unidentified] :
identifier[xxp] = identifier[np] . identifier[array] ([ identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[xpos] ,
identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[xpos] ])
identifier[yyp] = identifier[np] . identifier[array] ([ identifier[ymin] , identifier[ymax] ])
keyword[if] identifier[i] == identifier[list_unidentified] [ literal[int] ]:
identifier[ax] . identifier[plot] ( identifier[xxp] , identifier[yyp] , literal[string] , identifier[label] = literal[string] )
keyword[else] :
identifier[ax] . identifier[plot] ( identifier[xxp] , identifier[yyp] , literal[string] )
keyword[for] identifier[val] keyword[in] identifier[zip] ([ literal[string] , literal[string] , literal[string] , literal[string] ],
[ identifier[list_r] , identifier[list_t] , identifier[list_p] , identifier[list_k] ],
[ literal[string] , literal[string] , literal[string] , literal[string] ]):
identifier[list_x] = identifier[val] [ literal[int] ]
keyword[if] identifier[len] ( identifier[list_x] )> literal[int] :
identifier[xxp] = identifier[np] . identifier[array] ([])
identifier[yyp] = identifier[np] . identifier[array] ([])
keyword[for] identifier[i] keyword[in] identifier[list_x] :
identifier[xxp] = identifier[np] . identifier[append] ( identifier[xxp] ,[ identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[xpos] ])
identifier[yyp] = identifier[np] . identifier[append] ( identifier[yyp] ,[ identifier[list_of_wvfeatures] [ identifier[i] ]. identifier[reference] ])
identifier[yyp] -= identifier[crval1_linear] +( identifier[xxp] - identifier[crpix1] )* identifier[cdelt1_linear]
identifier[ax] . identifier[plot] ( identifier[xxp] , identifier[yyp] , identifier[marker] = literal[string] , identifier[markersize] = literal[int] , identifier[c] = identifier[val] [ literal[int] ],
identifier[linewidth] = literal[int] , identifier[label] = literal[string] )
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[xxp] )):
identifier[ax] . identifier[text] ( identifier[xxp] [ identifier[k] ], identifier[yyp] [ identifier[k] ], identifier[val] [ literal[int] ], identifier[fontsize] = literal[int] )
identifier[ax] . identifier[legend] ()
keyword[if] identifier[plot_title] keyword[is] keyword[None] :
identifier[plt] . identifier[title] ( literal[string] )
keyword[else] :
identifier[plt] . identifier[title] ( identifier[plot_title] )
identifier[ax] . identifier[text] ( literal[int] , literal[int] , literal[string] + identifier[str] ( identifier[poly_degree_wfit] )+
literal[string] + identifier[str] ( identifier[len] ( identifier[xfit] )),
identifier[fontsize] = literal[int] ,
identifier[transform] = identifier[ax] . identifier[transAxes] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] )
identifier[ax] . identifier[text] ( literal[int] , literal[int] , literal[string] + identifier[str] ( identifier[round] ( identifier[crval1_linear] , literal[int] )),
identifier[fontsize] = literal[int] ,
identifier[transform] = identifier[ax] . identifier[transAxes] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] )
identifier[ax] . identifier[text] ( literal[int] , literal[int] , literal[string] + identifier[str] ( identifier[round] ( identifier[cdelt1_linear] , literal[int] )),
identifier[fontsize] = literal[int] ,
identifier[transform] = identifier[ax] . identifier[transAxes] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] )
identifier[ax2] . identifier[text] ( literal[int] , literal[int] , literal[string] + identifier[str] ( identifier[round] ( identifier[residual_std] , literal[int] )),
identifier[fontsize] = literal[int] ,
identifier[transform] = identifier[ax2] . identifier[transAxes] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] )
identifier[set_window_geometry] ( identifier[geometry] )
identifier[pause_debugplot] ( identifier[debugplot] , identifier[pltshow] = keyword[True] , identifier[tight_layout] = keyword[False] )
keyword[return] identifier[solution_wv] | def fit_list_of_wvfeatures(list_of_wvfeatures, naxis1_arc, crpix1, poly_degree_wfit, weighted=False, plot_title=None, geometry=None, debugplot=0):
"""Fit polynomial to arc calibration list_of_wvfeatures.
Parameters
----------
list_of_wvfeatures : list (of WavecalFeature instances)
A list of size equal to the number of identified lines, which
elements are instances of the class WavecalFeature, containing
all the relevant information concerning the line
identification.
naxis1_arc : int
NAXIS1 of arc spectrum.
crpix1 : float
CRPIX1 value to be employed in the wavelength calibration.
poly_degree_wfit : int
Polynomial degree corresponding to the wavelength calibration
function to be fitted.
weighted : bool
Determines whether the polynomial fit is weighted or not,
using as weights the values of the cost function obtained in
the line identification. Since the weights can be very
different, typically weighted fits are not good because, in
practice, they totally ignore the points with the smallest
weights (which, in the other hand, are useful when handling
the borders of the wavelength calibration range).
plot_title : string or None
Title for residuals plot.
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the window geometry.
debugplot : int
Determines whether intermediate computations and/or plots
are displayed. The valid codes are defined in
numina.array.display.pause_debugplot.
Returns
-------
solution_wv : SolutionArcCalibration instance
Instance of class SolutionArcCalibration, containing the
information concerning the arc lines that have been properly
identified. The information about all the lines (including
those initially found but at the end discarded) is stored in
the list of WavecalFeature instances 'list_of_wvfeatures'.
"""
nlines_arc = len(list_of_wvfeatures)
# select information from valid lines.
(nfit, ifit, xfit, yfit, wfit) = select_data_for_fit(list_of_wvfeatures)
# select list of filtered out and unidentified lines
list_r = []
list_t = []
list_p = []
list_k = []
list_unidentified = []
for i in range(nlines_arc):
if not list_of_wvfeatures[i].line_ok:
if list_of_wvfeatures[i].category == 'X':
list_unidentified.append(i) # depends on [control=['if'], data=[]]
elif list_of_wvfeatures[i].category == 'R':
list_r.append(i) # depends on [control=['if'], data=[]]
elif list_of_wvfeatures[i].category == 'T':
list_t.append(i) # depends on [control=['if'], data=[]]
elif list_of_wvfeatures[i].category == 'P':
list_p.append(i) # depends on [control=['if'], data=[]]
elif list_of_wvfeatures[i].category == 'K':
list_k.append(i) # depends on [control=['if'], data=[]]
else:
raise ValueError('Unexpected "category"') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# polynomial fit
if weighted:
weights = 1.0 / wfit # depends on [control=['if'], data=[]]
else:
weights = np.zeros_like(wfit) + 1.0
if xfit.size <= poly_degree_wfit:
raise ValueError('Insufficient number of points for fit.') # depends on [control=['if'], data=[]]
(poly, stats_list) = Polynomial.fit(x=xfit, y=yfit, deg=poly_degree_wfit, full=True, w=weights)
poly = Polynomial.cast(poly)
coeff = poly.coef
if len(xfit) > poly_degree_wfit + 1:
residual_std = np.sqrt(stats_list[0] / (len(xfit) - poly_degree_wfit - 1))[0] # depends on [control=['if'], data=[]]
else:
residual_std = 0.0
if abs(debugplot) >= 10:
print('>>> Fitted coefficients:\n', coeff)
print('>>> Residual std.......:', residual_std) # depends on [control=['if'], data=[]]
# obtain CRVAL1 and CDELT1 for a linear wavelength scale from the
# last polynomial fit
crval1_linear = poly(crpix1)
crmin1_linear = poly(1)
crmax1_linear = poly(naxis1_arc)
cdelt1_linear = (crmax1_linear - crval1_linear) / (naxis1_arc - crpix1)
if abs(debugplot) >= 10:
print('>>> CRVAL1 linear scale:', crval1_linear)
print('>>> CDELT1 linear scale:', cdelt1_linear) # depends on [control=['if'], data=[]]
# generate solution (note that the class SolutionArcCalibration
# only sotres the information in list_of_wvfeatures corresponding
# to lines that have been properly identified
cr_linear = CrLinear(crpix1, crval1_linear, crmin1_linear, crmax1_linear, cdelt1_linear)
solution_wv = SolutionArcCalibration(features=list_of_wvfeatures, coeff=coeff, residual_std=residual_std, cr_linear=cr_linear)
if abs(debugplot) % 10 != 0:
# polynomial fit
xpol = np.linspace(1, naxis1_arc, naxis1_arc)
ypol = poly(xpol) - (crval1_linear + (xpol - crpix1) * cdelt1_linear)
# identified lines
xp = np.copy(xfit)
yp = yfit - (crval1_linear + (xp - crpix1) * cdelt1_linear)
yres = yfit - poly(xp) # residuals
# include residuals plot with identified lines
from numina.array.display.matplotlib_qt import plt
fig = plt.figure()
ax2 = fig.add_subplot(2, 1, 2)
ax2.set_xlim(1 - 0.05 * naxis1_arc, naxis1_arc + 0.05 * naxis1_arc)
ax2.set_xlabel('pixel position in arc spectrum [from 1 to NAXIS1]')
ax2.set_ylabel('residuals (Angstrom)')
ax2.plot(xp, yres, 'go')
ax2.axhline(y=0.0, color='black', linestyle='dashed')
# residuals with R, T, P and K lines
for val in zip(['R', 'T', 'P', 'K'], [list_r, list_t, list_p, list_k], ['red', 'blue', 'magenta', 'orange']):
list_x = val[1]
if len(list_x) > 0:
xxp = np.array([])
yyp = np.array([])
for i in list_x:
xxp = np.append(xxp, [list_of_wvfeatures[i].xpos])
yyp = np.append(yyp, [list_of_wvfeatures[i].reference]) # depends on [control=['for'], data=['i']]
yyres = yyp - poly(xxp)
ax2.plot(xxp, yyres, marker='x', markersize=15, c=val[2], linewidth=0) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['val']]
# plot with differences between linear fit and fitted
# polynomial
ax = fig.add_subplot(2, 1, 1, sharex=ax2)
ax.set_xlim(1 - 0.05 * naxis1_arc, naxis1_arc + 0.05 * naxis1_arc)
ax.set_ylabel('differences with\nlinear solution (Angstrom)')
ax.plot(xp, yp, 'go', label='identified')
for i in range(nfit):
ax.text(xp[i], yp[i], list_of_wvfeatures[ifit[i]].category, fontsize=15) # depends on [control=['for'], data=['i']]
# polynomial fit
ax.plot(xpol, ypol, 'c-', label='fit')
# unidentified lines
if len(list_unidentified) > 0:
ymin = np.concatenate((yp, ypol)).min()
ymax = np.concatenate((yp, ypol)).max()
for i in list_unidentified:
xxp = np.array([list_of_wvfeatures[i].xpos, list_of_wvfeatures[i].xpos])
yyp = np.array([ymin, ymax])
if i == list_unidentified[0]:
ax.plot(xxp, yyp, 'r--', label='unidentified') # depends on [control=['if'], data=[]]
else:
ax.plot(xxp, yyp, 'r--') # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
# R, T, P and K lines
for val in zip(['R', 'T', 'P', 'K'], [list_r, list_t, list_p, list_k], ['red', 'blue', 'magenta', 'orange']):
list_x = val[1]
if len(list_x) > 0:
xxp = np.array([])
yyp = np.array([])
for i in list_x:
xxp = np.append(xxp, [list_of_wvfeatures[i].xpos])
yyp = np.append(yyp, [list_of_wvfeatures[i].reference]) # depends on [control=['for'], data=['i']]
yyp -= crval1_linear + (xxp - crpix1) * cdelt1_linear
ax.plot(xxp, yyp, marker='x', markersize=15, c=val[2], linewidth=0, label='removed')
for k in range(len(xxp)):
ax.text(xxp[k], yyp[k], val[0], fontsize=15) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['val']]
# legend
ax.legend()
# title
if plot_title is None:
plt.title('Wavelength calibration') # depends on [control=['if'], data=[]]
else:
plt.title(plot_title)
# include important parameters in plot
ax.text(0.5, 0.25, 'poldeg: ' + str(poly_degree_wfit) + ', nfit: ' + str(len(xfit)), fontsize=12, transform=ax.transAxes, horizontalalignment='center', verticalalignment='bottom')
ax.text(0.5, 0.15, 'CRVAL1: ' + str(round(crval1_linear, 4)), fontsize=12, transform=ax.transAxes, horizontalalignment='center', verticalalignment='bottom')
ax.text(0.5, 0.05, 'CDELT1: ' + str(round(cdelt1_linear, 4)), fontsize=12, transform=ax.transAxes, horizontalalignment='center', verticalalignment='bottom')
ax2.text(0.5, 0.05, 'r.m.s.: ' + str(round(residual_std, 4)), fontsize=12, transform=ax2.transAxes, horizontalalignment='center', verticalalignment='bottom')
# set window geometry
set_window_geometry(geometry)
pause_debugplot(debugplot, pltshow=True, tight_layout=False) # depends on [control=['if'], data=[]]
return solution_wv |
def dict_from_cookiejar(cj):
"""Returns a key/value dictionary from a CookieJar.
:param cj: CookieJar object to extract cookies from.
"""
cookie_dict = {}
for _, cookies in cj._cookies.items():
for _, cookies in cookies.items():
for cookie in cookies.values():
# print cookie
cookie_dict[cookie.name] = cookie.value
return cookie_dict | def function[dict_from_cookiejar, parameter[cj]]:
constant[Returns a key/value dictionary from a CookieJar.
:param cj: CookieJar object to extract cookies from.
]
variable[cookie_dict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b25364a0>, <ast.Name object at 0x7da1b2536260>]]] in starred[call[name[cj]._cookies.items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b2536710>, <ast.Name object at 0x7da1b252abc0>]]] in starred[call[name[cookies].items, parameter[]]] begin[:]
for taget[name[cookie]] in starred[call[name[cookies].values, parameter[]]] begin[:]
call[name[cookie_dict]][name[cookie].name] assign[=] name[cookie].value
return[name[cookie_dict]] | keyword[def] identifier[dict_from_cookiejar] ( identifier[cj] ):
literal[string]
identifier[cookie_dict] ={}
keyword[for] identifier[_] , identifier[cookies] keyword[in] identifier[cj] . identifier[_cookies] . identifier[items] ():
keyword[for] identifier[_] , identifier[cookies] keyword[in] identifier[cookies] . identifier[items] ():
keyword[for] identifier[cookie] keyword[in] identifier[cookies] . identifier[values] ():
identifier[cookie_dict] [ identifier[cookie] . identifier[name] ]= identifier[cookie] . identifier[value]
keyword[return] identifier[cookie_dict] | def dict_from_cookiejar(cj):
"""Returns a key/value dictionary from a CookieJar.
:param cj: CookieJar object to extract cookies from.
"""
cookie_dict = {}
for (_, cookies) in cj._cookies.items():
for (_, cookies) in cookies.items():
for cookie in cookies.values():
# print cookie
cookie_dict[cookie.name] = cookie.value # depends on [control=['for'], data=['cookie']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return cookie_dict |
def vn_allocate(call=None, kwargs=None):
'''
Allocates a new virtual network in OpenNebula.
.. versionadded:: 2016.3.0
path
The path to a file containing the template of the virtual network to allocate.
Syntax within the file can be the usual attribute=value or XML. Can be used
instead of ``data``.
data
Contains the template of the virtual network to allocate. Syntax can be the
usual attribute=value or XML. Can be used instead of ``path``.
cluster_id
The ID of the cluster for which to add the new virtual network. Can be used
instead of ``cluster_name``. If neither ``cluster_id`` nor ``cluster_name``
are provided, the virtual network won’t be added to any cluster.
cluster_name
The name of the cluster for which to add the new virtual network. Can be used
instead of ``cluster_id``. If neither ``cluster_name`` nor ``cluster_id`` are
provided, the virtual network won't be added to any cluster.
CLI Example:
.. code-block:: bash
salt-cloud -f vn_allocate opennebula path=/path/to/vn_file.txt
'''
if call != 'function':
raise SaltCloudSystemExit(
'The vn_allocate function must be called with -f or --function.'
)
if kwargs is None:
kwargs = {}
cluster_id = kwargs.get('cluster_id', None)
cluster_name = kwargs.get('cluster_name', None)
path = kwargs.get('path', None)
data = kwargs.get('data', None)
if data:
if path:
log.warning(
'Both the \'data\' and \'path\' arguments were provided. '
'\'data\' will take precedence.'
)
elif path:
with salt.utils.files.fopen(path, mode='r') as rfh:
data = rfh.read()
else:
raise SaltCloudSystemExit(
'The vn_allocate function requires either \'data\' or a file \'path\' '
'to be provided.'
)
if cluster_id:
if cluster_name:
log.warning(
'Both the \'cluster_id\' and \'cluster_name\' arguments were provided. '
'\'cluster_id\' will take precedence.'
)
elif cluster_name:
cluster_id = get_cluster_id(kwargs={'name': cluster_name})
else:
cluster_id = '-1'
server, user, password = _get_xml_rpc()
auth = ':'.join([user, password])
response = server.one.vn.allocate(auth, data, int(cluster_id))
ret = {
'action': 'vn.allocate',
'allocated': response[0],
'vn_id': response[1],
'error_code': response[2],
}
return ret | def function[vn_allocate, parameter[call, kwargs]]:
constant[
Allocates a new virtual network in OpenNebula.
.. versionadded:: 2016.3.0
path
The path to a file containing the template of the virtual network to allocate.
Syntax within the file can be the usual attribute=value or XML. Can be used
instead of ``data``.
data
Contains the template of the virtual network to allocate. Syntax can be the
usual attribute=value or XML. Can be used instead of ``path``.
cluster_id
The ID of the cluster for which to add the new virtual network. Can be used
instead of ``cluster_name``. If neither ``cluster_id`` nor ``cluster_name``
are provided, the virtual network won’t be added to any cluster.
cluster_name
The name of the cluster for which to add the new virtual network. Can be used
instead of ``cluster_id``. If neither ``cluster_name`` nor ``cluster_id`` are
provided, the virtual network won't be added to any cluster.
CLI Example:
.. code-block:: bash
salt-cloud -f vn_allocate opennebula path=/path/to/vn_file.txt
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
<ast.Raise object at 0x7da20c6a9e70>
if compare[name[kwargs] is constant[None]] begin[:]
variable[kwargs] assign[=] dictionary[[], []]
variable[cluster_id] assign[=] call[name[kwargs].get, parameter[constant[cluster_id], constant[None]]]
variable[cluster_name] assign[=] call[name[kwargs].get, parameter[constant[cluster_name], constant[None]]]
variable[path] assign[=] call[name[kwargs].get, parameter[constant[path], constant[None]]]
variable[data] assign[=] call[name[kwargs].get, parameter[constant[data], constant[None]]]
if name[data] begin[:]
if name[path] begin[:]
call[name[log].warning, parameter[constant[Both the 'data' and 'path' arguments were provided. 'data' will take precedence.]]]
if name[cluster_id] begin[:]
if name[cluster_name] begin[:]
call[name[log].warning, parameter[constant[Both the 'cluster_id' and 'cluster_name' arguments were provided. 'cluster_id' will take precedence.]]]
<ast.Tuple object at 0x7da18dc07910> assign[=] call[name[_get_xml_rpc], parameter[]]
variable[auth] assign[=] call[constant[:].join, parameter[list[[<ast.Name object at 0x7da18dc04910>, <ast.Name object at 0x7da18dc04bb0>]]]]
variable[response] assign[=] call[name[server].one.vn.allocate, parameter[name[auth], name[data], call[name[int], parameter[name[cluster_id]]]]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18dc05660>, <ast.Constant object at 0x7da18dc04430>, <ast.Constant object at 0x7da18dc06cb0>, <ast.Constant object at 0x7da18dc06d40>], [<ast.Constant object at 0x7da18dc07760>, <ast.Subscript object at 0x7da18dc052a0>, <ast.Subscript object at 0x7da18dc05c90>, <ast.Subscript object at 0x7da18dc06890>]]
return[name[ret]] | keyword[def] identifier[vn_allocate] ( identifier[call] = keyword[None] , identifier[kwargs] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
keyword[if] identifier[kwargs] keyword[is] keyword[None] :
identifier[kwargs] ={}
identifier[cluster_id] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[cluster_name] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[path] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[data] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[data] :
keyword[if] identifier[path] :
identifier[log] . identifier[warning] (
literal[string]
literal[string]
)
keyword[elif] identifier[path] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[path] , identifier[mode] = literal[string] ) keyword[as] identifier[rfh] :
identifier[data] = identifier[rfh] . identifier[read] ()
keyword[else] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
literal[string]
)
keyword[if] identifier[cluster_id] :
keyword[if] identifier[cluster_name] :
identifier[log] . identifier[warning] (
literal[string]
literal[string]
)
keyword[elif] identifier[cluster_name] :
identifier[cluster_id] = identifier[get_cluster_id] ( identifier[kwargs] ={ literal[string] : identifier[cluster_name] })
keyword[else] :
identifier[cluster_id] = literal[string]
identifier[server] , identifier[user] , identifier[password] = identifier[_get_xml_rpc] ()
identifier[auth] = literal[string] . identifier[join] ([ identifier[user] , identifier[password] ])
identifier[response] = identifier[server] . identifier[one] . identifier[vn] . identifier[allocate] ( identifier[auth] , identifier[data] , identifier[int] ( identifier[cluster_id] ))
identifier[ret] ={
literal[string] : literal[string] ,
literal[string] : identifier[response] [ literal[int] ],
literal[string] : identifier[response] [ literal[int] ],
literal[string] : identifier[response] [ literal[int] ],
}
keyword[return] identifier[ret] | def vn_allocate(call=None, kwargs=None):
"""
Allocates a new virtual network in OpenNebula.
.. versionadded:: 2016.3.0
path
The path to a file containing the template of the virtual network to allocate.
Syntax within the file can be the usual attribute=value or XML. Can be used
instead of ``data``.
data
Contains the template of the virtual network to allocate. Syntax can be the
usual attribute=value or XML. Can be used instead of ``path``.
cluster_id
The ID of the cluster for which to add the new virtual network. Can be used
instead of ``cluster_name``. If neither ``cluster_id`` nor ``cluster_name``
are provided, the virtual network won’t be added to any cluster.
cluster_name
The name of the cluster for which to add the new virtual network. Can be used
instead of ``cluster_id``. If neither ``cluster_name`` nor ``cluster_id`` are
provided, the virtual network won't be added to any cluster.
CLI Example:
.. code-block:: bash
salt-cloud -f vn_allocate opennebula path=/path/to/vn_file.txt
"""
if call != 'function':
raise SaltCloudSystemExit('The vn_allocate function must be called with -f or --function.') # depends on [control=['if'], data=[]]
if kwargs is None:
kwargs = {} # depends on [control=['if'], data=['kwargs']]
cluster_id = kwargs.get('cluster_id', None)
cluster_name = kwargs.get('cluster_name', None)
path = kwargs.get('path', None)
data = kwargs.get('data', None)
if data:
if path:
log.warning("Both the 'data' and 'path' arguments were provided. 'data' will take precedence.") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif path:
with salt.utils.files.fopen(path, mode='r') as rfh:
data = rfh.read() # depends on [control=['with'], data=['rfh']] # depends on [control=['if'], data=[]]
else:
raise SaltCloudSystemExit("The vn_allocate function requires either 'data' or a file 'path' to be provided.")
if cluster_id:
if cluster_name:
log.warning("Both the 'cluster_id' and 'cluster_name' arguments were provided. 'cluster_id' will take precedence.") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif cluster_name:
cluster_id = get_cluster_id(kwargs={'name': cluster_name}) # depends on [control=['if'], data=[]]
else:
cluster_id = '-1'
(server, user, password) = _get_xml_rpc()
auth = ':'.join([user, password])
response = server.one.vn.allocate(auth, data, int(cluster_id))
ret = {'action': 'vn.allocate', 'allocated': response[0], 'vn_id': response[1], 'error_code': response[2]}
return ret |
def parse_schema_definition(lexer: Lexer) -> SchemaDefinitionNode:
"""SchemaDefinition"""
start = lexer.token
expect_keyword(lexer, "schema")
directives = parse_directives(lexer, True)
operation_types = many_nodes(
lexer, TokenKind.BRACE_L, parse_operation_type_definition, TokenKind.BRACE_R
)
return SchemaDefinitionNode(
directives=directives, operation_types=operation_types, loc=loc(lexer, start)
) | def function[parse_schema_definition, parameter[lexer]]:
constant[SchemaDefinition]
variable[start] assign[=] name[lexer].token
call[name[expect_keyword], parameter[name[lexer], constant[schema]]]
variable[directives] assign[=] call[name[parse_directives], parameter[name[lexer], constant[True]]]
variable[operation_types] assign[=] call[name[many_nodes], parameter[name[lexer], name[TokenKind].BRACE_L, name[parse_operation_type_definition], name[TokenKind].BRACE_R]]
return[call[name[SchemaDefinitionNode], parameter[]]] | keyword[def] identifier[parse_schema_definition] ( identifier[lexer] : identifier[Lexer] )-> identifier[SchemaDefinitionNode] :
literal[string]
identifier[start] = identifier[lexer] . identifier[token]
identifier[expect_keyword] ( identifier[lexer] , literal[string] )
identifier[directives] = identifier[parse_directives] ( identifier[lexer] , keyword[True] )
identifier[operation_types] = identifier[many_nodes] (
identifier[lexer] , identifier[TokenKind] . identifier[BRACE_L] , identifier[parse_operation_type_definition] , identifier[TokenKind] . identifier[BRACE_R]
)
keyword[return] identifier[SchemaDefinitionNode] (
identifier[directives] = identifier[directives] , identifier[operation_types] = identifier[operation_types] , identifier[loc] = identifier[loc] ( identifier[lexer] , identifier[start] )
) | def parse_schema_definition(lexer: Lexer) -> SchemaDefinitionNode:
"""SchemaDefinition"""
start = lexer.token
expect_keyword(lexer, 'schema')
directives = parse_directives(lexer, True)
operation_types = many_nodes(lexer, TokenKind.BRACE_L, parse_operation_type_definition, TokenKind.BRACE_R)
return SchemaDefinitionNode(directives=directives, operation_types=operation_types, loc=loc(lexer, start)) |
def is_in_bounds(self, x, y):
"""
:return: whether ``(x, y)`` is inside the :ref:`bounds
<png-builder-bounds>`
:rtype: bool
"""
lower = self._min_x <= x and self._min_y <= y
upper = self._max_x > x and self._max_y > y
return lower and upper | def function[is_in_bounds, parameter[self, x, y]]:
constant[
:return: whether ``(x, y)`` is inside the :ref:`bounds
<png-builder-bounds>`
:rtype: bool
]
variable[lower] assign[=] <ast.BoolOp object at 0x7da20c6e5000>
variable[upper] assign[=] <ast.BoolOp object at 0x7da20c6e6bf0>
return[<ast.BoolOp object at 0x7da20c6e6aa0>] | keyword[def] identifier[is_in_bounds] ( identifier[self] , identifier[x] , identifier[y] ):
literal[string]
identifier[lower] = identifier[self] . identifier[_min_x] <= identifier[x] keyword[and] identifier[self] . identifier[_min_y] <= identifier[y]
identifier[upper] = identifier[self] . identifier[_max_x] > identifier[x] keyword[and] identifier[self] . identifier[_max_y] > identifier[y]
keyword[return] identifier[lower] keyword[and] identifier[upper] | def is_in_bounds(self, x, y):
"""
:return: whether ``(x, y)`` is inside the :ref:`bounds
<png-builder-bounds>`
:rtype: bool
"""
lower = self._min_x <= x and self._min_y <= y
upper = self._max_x > x and self._max_y > y
return lower and upper |
def upload_image(self, subreddit, image_path, name=None,
header=False, upload_as=None):
"""Upload an image to the subreddit.
:param image_path: A path to the jpg or png image you want to upload.
:param name: The name to provide the image. When None the name will be
filename less any extension.
:param header: When True, upload the image as the subreddit header.
:param upload_as: Must be `'jpg'`, `'png'` or `None`. When None, this
will match the format of the image itself. In all cases where both
this value and the image format is not png, reddit will also
convert the image mode to RGBA. reddit optimizes the image
according to this value.
:returns: A link to the uploaded image. Raises an exception otherwise.
"""
if name and header:
raise TypeError('Both name and header cannot be set.')
if upload_as not in (None, 'png', 'jpg'):
raise TypeError("upload_as must be 'jpg', 'png', or None.")
with open(image_path, 'rb') as image:
image_type = upload_as or _image_type(image)
data = {'r': six.text_type(subreddit), 'img_type': image_type}
if header:
data['header'] = 1
else:
if not name:
name = os.path.splitext(os.path.basename(image.name))[0]
data['name'] = name
response = json.loads(self._request(
self.config['upload_image'], data=data, files={'file': image},
method=to_native_string('POST'), retry_on_error=False))
if response['errors']:
raise errors.APIException(response['errors'], None)
return response['img_src'] | def function[upload_image, parameter[self, subreddit, image_path, name, header, upload_as]]:
constant[Upload an image to the subreddit.
:param image_path: A path to the jpg or png image you want to upload.
:param name: The name to provide the image. When None the name will be
filename less any extension.
:param header: When True, upload the image as the subreddit header.
:param upload_as: Must be `'jpg'`, `'png'` or `None`. When None, this
will match the format of the image itself. In all cases where both
this value and the image format is not png, reddit will also
convert the image mode to RGBA. reddit optimizes the image
according to this value.
:returns: A link to the uploaded image. Raises an exception otherwise.
]
if <ast.BoolOp object at 0x7da18fe928f0> begin[:]
<ast.Raise object at 0x7da18fe93730>
if compare[name[upload_as] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da18fe91480>, <ast.Constant object at 0x7da18fe93be0>, <ast.Constant object at 0x7da18fe90670>]]] begin[:]
<ast.Raise object at 0x7da18fe90700>
with call[name[open], parameter[name[image_path], constant[rb]]] begin[:]
variable[image_type] assign[=] <ast.BoolOp object at 0x7da18fe90040>
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18fe92e90>, <ast.Constant object at 0x7da18fe91ed0>], [<ast.Call object at 0x7da18fe93910>, <ast.Name object at 0x7da18fe91030>]]
if name[header] begin[:]
call[name[data]][constant[header]] assign[=] constant[1]
variable[response] assign[=] call[name[json].loads, parameter[call[name[self]._request, parameter[call[name[self].config][constant[upload_image]]]]]]
if call[name[response]][constant[errors]] begin[:]
<ast.Raise object at 0x7da18f58e380>
return[call[name[response]][constant[img_src]]] | keyword[def] identifier[upload_image] ( identifier[self] , identifier[subreddit] , identifier[image_path] , identifier[name] = keyword[None] ,
identifier[header] = keyword[False] , identifier[upload_as] = keyword[None] ):
literal[string]
keyword[if] identifier[name] keyword[and] identifier[header] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[upload_as] keyword[not] keyword[in] ( keyword[None] , literal[string] , literal[string] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[with] identifier[open] ( identifier[image_path] , literal[string] ) keyword[as] identifier[image] :
identifier[image_type] = identifier[upload_as] keyword[or] identifier[_image_type] ( identifier[image] )
identifier[data] ={ literal[string] : identifier[six] . identifier[text_type] ( identifier[subreddit] ), literal[string] : identifier[image_type] }
keyword[if] identifier[header] :
identifier[data] [ literal[string] ]= literal[int]
keyword[else] :
keyword[if] keyword[not] identifier[name] :
identifier[name] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[image] . identifier[name] ))[ literal[int] ]
identifier[data] [ literal[string] ]= identifier[name]
identifier[response] = identifier[json] . identifier[loads] ( identifier[self] . identifier[_request] (
identifier[self] . identifier[config] [ literal[string] ], identifier[data] = identifier[data] , identifier[files] ={ literal[string] : identifier[image] },
identifier[method] = identifier[to_native_string] ( literal[string] ), identifier[retry_on_error] = keyword[False] ))
keyword[if] identifier[response] [ literal[string] ]:
keyword[raise] identifier[errors] . identifier[APIException] ( identifier[response] [ literal[string] ], keyword[None] )
keyword[return] identifier[response] [ literal[string] ] | def upload_image(self, subreddit, image_path, name=None, header=False, upload_as=None):
"""Upload an image to the subreddit.
:param image_path: A path to the jpg or png image you want to upload.
:param name: The name to provide the image. When None the name will be
filename less any extension.
:param header: When True, upload the image as the subreddit header.
:param upload_as: Must be `'jpg'`, `'png'` or `None`. When None, this
will match the format of the image itself. In all cases where both
this value and the image format is not png, reddit will also
convert the image mode to RGBA. reddit optimizes the image
according to this value.
:returns: A link to the uploaded image. Raises an exception otherwise.
"""
if name and header:
raise TypeError('Both name and header cannot be set.') # depends on [control=['if'], data=[]]
if upload_as not in (None, 'png', 'jpg'):
raise TypeError("upload_as must be 'jpg', 'png', or None.") # depends on [control=['if'], data=[]]
with open(image_path, 'rb') as image:
image_type = upload_as or _image_type(image)
data = {'r': six.text_type(subreddit), 'img_type': image_type}
if header:
data['header'] = 1 # depends on [control=['if'], data=[]]
else:
if not name:
name = os.path.splitext(os.path.basename(image.name))[0] # depends on [control=['if'], data=[]]
data['name'] = name
response = json.loads(self._request(self.config['upload_image'], data=data, files={'file': image}, method=to_native_string('POST'), retry_on_error=False)) # depends on [control=['with'], data=['image']]
if response['errors']:
raise errors.APIException(response['errors'], None) # depends on [control=['if'], data=[]]
return response['img_src'] |
def register(cls, panel):
"""Registers a :class:`~horizon.Panel` with this dashboard."""
panel_class = Horizon.register_panel(cls, panel)
# Support template loading from panel template directories.
panel_mod = import_module(panel.__module__)
panel_dir = os.path.dirname(panel_mod.__file__)
template_dir = os.path.join(panel_dir, "templates")
if os.path.exists(template_dir):
key = os.path.join(cls.slug, panel.slug)
loaders.panel_template_dirs[key] = template_dir
return panel_class | def function[register, parameter[cls, panel]]:
constant[Registers a :class:`~horizon.Panel` with this dashboard.]
variable[panel_class] assign[=] call[name[Horizon].register_panel, parameter[name[cls], name[panel]]]
variable[panel_mod] assign[=] call[name[import_module], parameter[name[panel].__module__]]
variable[panel_dir] assign[=] call[name[os].path.dirname, parameter[name[panel_mod].__file__]]
variable[template_dir] assign[=] call[name[os].path.join, parameter[name[panel_dir], constant[templates]]]
if call[name[os].path.exists, parameter[name[template_dir]]] begin[:]
variable[key] assign[=] call[name[os].path.join, parameter[name[cls].slug, name[panel].slug]]
call[name[loaders].panel_template_dirs][name[key]] assign[=] name[template_dir]
return[name[panel_class]] | keyword[def] identifier[register] ( identifier[cls] , identifier[panel] ):
literal[string]
identifier[panel_class] = identifier[Horizon] . identifier[register_panel] ( identifier[cls] , identifier[panel] )
identifier[panel_mod] = identifier[import_module] ( identifier[panel] . identifier[__module__] )
identifier[panel_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[panel_mod] . identifier[__file__] )
identifier[template_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[panel_dir] , literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[template_dir] ):
identifier[key] = identifier[os] . identifier[path] . identifier[join] ( identifier[cls] . identifier[slug] , identifier[panel] . identifier[slug] )
identifier[loaders] . identifier[panel_template_dirs] [ identifier[key] ]= identifier[template_dir]
keyword[return] identifier[panel_class] | def register(cls, panel):
"""Registers a :class:`~horizon.Panel` with this dashboard."""
panel_class = Horizon.register_panel(cls, panel)
# Support template loading from panel template directories.
panel_mod = import_module(panel.__module__)
panel_dir = os.path.dirname(panel_mod.__file__)
template_dir = os.path.join(panel_dir, 'templates')
if os.path.exists(template_dir):
key = os.path.join(cls.slug, panel.slug)
loaders.panel_template_dirs[key] = template_dir # depends on [control=['if'], data=[]]
return panel_class |
def values(self, *args):
"""
Returns list of dicts (field names as keys) for given fields.
Args:
\*args: List of fields to be returned as dict.
Returns:
list of dicts for given fields.
Example:
>>> Person.objects.filter(age__gte=16, name__startswith='jo').values('name', 'lastname')
"""
return [dict(zip(args, values_list))
for values_list in self.values_list(flatten=False, *args)] | def function[values, parameter[self]]:
constant[
Returns list of dicts (field names as keys) for given fields.
Args:
\*args: List of fields to be returned as dict.
Returns:
list of dicts for given fields.
Example:
>>> Person.objects.filter(age__gte=16, name__startswith='jo').values('name', 'lastname')
]
return[<ast.ListComp object at 0x7da20e954100>] | keyword[def] identifier[values] ( identifier[self] ,* identifier[args] ):
literal[string]
keyword[return] [ identifier[dict] ( identifier[zip] ( identifier[args] , identifier[values_list] ))
keyword[for] identifier[values_list] keyword[in] identifier[self] . identifier[values_list] ( identifier[flatten] = keyword[False] ,* identifier[args] )] | def values(self, *args):
"""
Returns list of dicts (field names as keys) for given fields.
Args:
\\*args: List of fields to be returned as dict.
Returns:
list of dicts for given fields.
Example:
>>> Person.objects.filter(age__gte=16, name__startswith='jo').values('name', 'lastname')
"""
return [dict(zip(args, values_list)) for values_list in self.values_list(*args, flatten=False)] |
def plural(self, text, count=None):
"""
Return the plural of text.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved.
"""
pre, word, post = self.partition_word(text)
if not word:
return text
plural = self.postprocess(
word,
self._pl_special_adjective(word, count)
or self._pl_special_verb(word, count)
or self._plnoun(word, count),
)
return "{}{}{}".format(pre, plural, post) | def function[plural, parameter[self, text, count]]:
constant[
Return the plural of text.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved.
]
<ast.Tuple object at 0x7da18f811c60> assign[=] call[name[self].partition_word, parameter[name[text]]]
if <ast.UnaryOp object at 0x7da18f812f20> begin[:]
return[name[text]]
variable[plural] assign[=] call[name[self].postprocess, parameter[name[word], <ast.BoolOp object at 0x7da18f811240>]]
return[call[constant[{}{}{}].format, parameter[name[pre], name[plural], name[post]]]] | keyword[def] identifier[plural] ( identifier[self] , identifier[text] , identifier[count] = keyword[None] ):
literal[string]
identifier[pre] , identifier[word] , identifier[post] = identifier[self] . identifier[partition_word] ( identifier[text] )
keyword[if] keyword[not] identifier[word] :
keyword[return] identifier[text]
identifier[plural] = identifier[self] . identifier[postprocess] (
identifier[word] ,
identifier[self] . identifier[_pl_special_adjective] ( identifier[word] , identifier[count] )
keyword[or] identifier[self] . identifier[_pl_special_verb] ( identifier[word] , identifier[count] )
keyword[or] identifier[self] . identifier[_plnoun] ( identifier[word] , identifier[count] ),
)
keyword[return] literal[string] . identifier[format] ( identifier[pre] , identifier[plural] , identifier[post] ) | def plural(self, text, count=None):
"""
Return the plural of text.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved.
"""
(pre, word, post) = self.partition_word(text)
if not word:
return text # depends on [control=['if'], data=[]]
plural = self.postprocess(word, self._pl_special_adjective(word, count) or self._pl_special_verb(word, count) or self._plnoun(word, count))
return '{}{}{}'.format(pre, plural, post) |
def categories(self):
"""
Lists the categories in the lexicon, except the
optional categories.
Returns:
list: A list of strings of category names.
"""
keys = [k for k in self.__dict__.keys() if k not in SPECIAL]
return keys | def function[categories, parameter[self]]:
constant[
Lists the categories in the lexicon, except the
optional categories.
Returns:
list: A list of strings of category names.
]
variable[keys] assign[=] <ast.ListComp object at 0x7da2041d90f0>
return[name[keys]] | keyword[def] identifier[categories] ( identifier[self] ):
literal[string]
identifier[keys] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[__dict__] . identifier[keys] () keyword[if] identifier[k] keyword[not] keyword[in] identifier[SPECIAL] ]
keyword[return] identifier[keys] | def categories(self):
"""
Lists the categories in the lexicon, except the
optional categories.
Returns:
list: A list of strings of category names.
"""
keys = [k for k in self.__dict__.keys() if k not in SPECIAL]
return keys |
def nl_cb_set(cb, type_, kind, func, arg):
"""Set up a callback. Updates `cb` in place.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/handlers.c#L293
Positional arguments:
cb -- nl_cb class instance.
type_ -- callback to modify (integer).
kind -- kind of implementation (integer).
func -- callback function (NL_CB_CUSTOM).
arg -- argument passed to callback.
Returns:
0 on success or a negative error code.
"""
if type_ < 0 or type_ > NL_CB_TYPE_MAX or kind < 0 or kind > NL_CB_KIND_MAX:
return -NLE_RANGE
if kind == NL_CB_CUSTOM:
cb.cb_set[type_] = func
cb.cb_args[type_] = arg
else:
cb.cb_set[type_] = cb_def[type_][kind]
cb.cb_args[type_] = arg
return 0 | def function[nl_cb_set, parameter[cb, type_, kind, func, arg]]:
constant[Set up a callback. Updates `cb` in place.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/handlers.c#L293
Positional arguments:
cb -- nl_cb class instance.
type_ -- callback to modify (integer).
kind -- kind of implementation (integer).
func -- callback function (NL_CB_CUSTOM).
arg -- argument passed to callback.
Returns:
0 on success or a negative error code.
]
if <ast.BoolOp object at 0x7da1b2626ec0> begin[:]
return[<ast.UnaryOp object at 0x7da1b2624ac0>]
if compare[name[kind] equal[==] name[NL_CB_CUSTOM]] begin[:]
call[name[cb].cb_set][name[type_]] assign[=] name[func]
call[name[cb].cb_args][name[type_]] assign[=] name[arg]
return[constant[0]] | keyword[def] identifier[nl_cb_set] ( identifier[cb] , identifier[type_] , identifier[kind] , identifier[func] , identifier[arg] ):
literal[string]
keyword[if] identifier[type_] < literal[int] keyword[or] identifier[type_] > identifier[NL_CB_TYPE_MAX] keyword[or] identifier[kind] < literal[int] keyword[or] identifier[kind] > identifier[NL_CB_KIND_MAX] :
keyword[return] - identifier[NLE_RANGE]
keyword[if] identifier[kind] == identifier[NL_CB_CUSTOM] :
identifier[cb] . identifier[cb_set] [ identifier[type_] ]= identifier[func]
identifier[cb] . identifier[cb_args] [ identifier[type_] ]= identifier[arg]
keyword[else] :
identifier[cb] . identifier[cb_set] [ identifier[type_] ]= identifier[cb_def] [ identifier[type_] ][ identifier[kind] ]
identifier[cb] . identifier[cb_args] [ identifier[type_] ]= identifier[arg]
keyword[return] literal[int] | def nl_cb_set(cb, type_, kind, func, arg):
"""Set up a callback. Updates `cb` in place.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/handlers.c#L293
Positional arguments:
cb -- nl_cb class instance.
type_ -- callback to modify (integer).
kind -- kind of implementation (integer).
func -- callback function (NL_CB_CUSTOM).
arg -- argument passed to callback.
Returns:
0 on success or a negative error code.
"""
if type_ < 0 or type_ > NL_CB_TYPE_MAX or kind < 0 or (kind > NL_CB_KIND_MAX):
return -NLE_RANGE # depends on [control=['if'], data=[]]
if kind == NL_CB_CUSTOM:
cb.cb_set[type_] = func
cb.cb_args[type_] = arg # depends on [control=['if'], data=[]]
else:
cb.cb_set[type_] = cb_def[type_][kind]
cb.cb_args[type_] = arg
return 0 |
def plot_predict(self, h=5, past_values=20, intervals=True, **kwargs):
""" Makes forecast with the estimated model
Parameters
----------
h : int (default : 5)
How many steps ahead would you like to forecast?
past_values : int (default : 20)
How many past observations to show on the forecast graph?
intervals : Boolean
Would you like to show prediction intervals for the forecast?
Returns
----------
- Plot of the forecast
"""
import matplotlib.pyplot as plt
import seaborn as sns
figsize = kwargs.get('figsize',(10,7))
nsims = kwargs.get('nsims', 200)
if self.latent_variables.estimated is False:
raise Exception("No latent variables estimated!")
else:
# Retrieve data, dates and (transformed) latent variables
if self.latent_variables.estimation_method in ['M-H']:
lower_final = 0
upper_final = 0
plot_values_final = 0
date_index = self.shift_dates(h)
plot_index = date_index[-h-past_values:]
for i in range(nsims):
t_params = self.draw_latent_variables(nsims=1).T[0]
a, P = self._forecast_model(t_params, h)
plot_values = a[0][-h-past_values:]
forecasted_values = a[0][-h:]
lower = forecasted_values - 1.96*np.power(P[0][0][-h:] + self.latent_variables.z_list[0].prior.transform(t_params[0]),0.5)
upper = forecasted_values + 1.96*np.power(P[0][0][-h:] + self.latent_variables.z_list[0].prior.transform(t_params[0]),0.5)
lower_final += np.append(plot_values[-h-1], lower)
upper_final += np.append(plot_values[-h-1], upper)
plot_values_final += plot_values
plot_values_final = plot_values_final / nsims
lower_final = lower_final / nsims
upper_final = upper_final / nsims
plt.figure(figsize=figsize)
if intervals == True:
plt.fill_between(date_index[-h-1:], lower_final, upper_final, alpha=0.2)
plt.plot(plot_index, plot_values_final)
plt.title("Forecast for " + self.data_name)
plt.xlabel("Time")
plt.ylabel(self.data_name)
plt.show()
else:
a, P = self._forecast_model(self.latent_variables.get_z_values(),h)
date_index = self.shift_dates(h)
plot_values = a[0][-h-past_values:]
forecasted_values = a[0][-h:]
lower = forecasted_values - 1.96*np.power(P[0][0][-h:] + self.latent_variables.z_list[0].prior.transform(self.latent_variables.get_z_values()[0]),0.5)
upper = forecasted_values + 1.96*np.power(P[0][0][-h:] + self.latent_variables.z_list[0].prior.transform(self.latent_variables.get_z_values()[0]),0.5)
lower = np.append(plot_values[-h-1],lower)
upper = np.append(plot_values[-h-1],upper)
plot_index = date_index[-h-past_values:]
plt.figure(figsize=figsize)
if intervals == True:
plt.fill_between(date_index[-h-1:], lower, upper, alpha=0.2)
plt.plot(plot_index,plot_values)
plt.title("Forecast for " + self.data_name)
plt.xlabel("Time")
plt.ylabel(self.data_name)
plt.show() | def function[plot_predict, parameter[self, h, past_values, intervals]]:
constant[ Makes forecast with the estimated model
Parameters
----------
h : int (default : 5)
How many steps ahead would you like to forecast?
past_values : int (default : 20)
How many past observations to show on the forecast graph?
intervals : Boolean
Would you like to show prediction intervals for the forecast?
Returns
----------
- Plot of the forecast
]
import module[matplotlib.pyplot] as alias[plt]
import module[seaborn] as alias[sns]
variable[figsize] assign[=] call[name[kwargs].get, parameter[constant[figsize], tuple[[<ast.Constant object at 0x7da204960430>, <ast.Constant object at 0x7da204960310>]]]]
variable[nsims] assign[=] call[name[kwargs].get, parameter[constant[nsims], constant[200]]]
if compare[name[self].latent_variables.estimated is constant[False]] begin[:]
<ast.Raise object at 0x7da2049631f0> | keyword[def] identifier[plot_predict] ( identifier[self] , identifier[h] = literal[int] , identifier[past_values] = literal[int] , identifier[intervals] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[import] identifier[seaborn] keyword[as] identifier[sns]
identifier[figsize] = identifier[kwargs] . identifier[get] ( literal[string] ,( literal[int] , literal[int] ))
identifier[nsims] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
keyword[if] identifier[self] . identifier[latent_variables] . identifier[estimated] keyword[is] keyword[False] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[else] :
keyword[if] identifier[self] . identifier[latent_variables] . identifier[estimation_method] keyword[in] [ literal[string] ]:
identifier[lower_final] = literal[int]
identifier[upper_final] = literal[int]
identifier[plot_values_final] = literal[int]
identifier[date_index] = identifier[self] . identifier[shift_dates] ( identifier[h] )
identifier[plot_index] = identifier[date_index] [- identifier[h] - identifier[past_values] :]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nsims] ):
identifier[t_params] = identifier[self] . identifier[draw_latent_variables] ( identifier[nsims] = literal[int] ). identifier[T] [ literal[int] ]
identifier[a] , identifier[P] = identifier[self] . identifier[_forecast_model] ( identifier[t_params] , identifier[h] )
identifier[plot_values] = identifier[a] [ literal[int] ][- identifier[h] - identifier[past_values] :]
identifier[forecasted_values] = identifier[a] [ literal[int] ][- identifier[h] :]
identifier[lower] = identifier[forecasted_values] - literal[int] * identifier[np] . identifier[power] ( identifier[P] [ literal[int] ][ literal[int] ][- identifier[h] :]+ identifier[self] . identifier[latent_variables] . identifier[z_list] [ literal[int] ]. identifier[prior] . identifier[transform] ( identifier[t_params] [ literal[int] ]), literal[int] )
identifier[upper] = identifier[forecasted_values] + literal[int] * identifier[np] . identifier[power] ( identifier[P] [ literal[int] ][ literal[int] ][- identifier[h] :]+ identifier[self] . identifier[latent_variables] . identifier[z_list] [ literal[int] ]. identifier[prior] . identifier[transform] ( identifier[t_params] [ literal[int] ]), literal[int] )
identifier[lower_final] += identifier[np] . identifier[append] ( identifier[plot_values] [- identifier[h] - literal[int] ], identifier[lower] )
identifier[upper_final] += identifier[np] . identifier[append] ( identifier[plot_values] [- identifier[h] - literal[int] ], identifier[upper] )
identifier[plot_values_final] += identifier[plot_values]
identifier[plot_values_final] = identifier[plot_values_final] / identifier[nsims]
identifier[lower_final] = identifier[lower_final] / identifier[nsims]
identifier[upper_final] = identifier[upper_final] / identifier[nsims]
identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] )
keyword[if] identifier[intervals] == keyword[True] :
identifier[plt] . identifier[fill_between] ( identifier[date_index] [- identifier[h] - literal[int] :], identifier[lower_final] , identifier[upper_final] , identifier[alpha] = literal[int] )
identifier[plt] . identifier[plot] ( identifier[plot_index] , identifier[plot_values_final] )
identifier[plt] . identifier[title] ( literal[string] + identifier[self] . identifier[data_name] )
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[ylabel] ( identifier[self] . identifier[data_name] )
identifier[plt] . identifier[show] ()
keyword[else] :
identifier[a] , identifier[P] = identifier[self] . identifier[_forecast_model] ( identifier[self] . identifier[latent_variables] . identifier[get_z_values] (), identifier[h] )
identifier[date_index] = identifier[self] . identifier[shift_dates] ( identifier[h] )
identifier[plot_values] = identifier[a] [ literal[int] ][- identifier[h] - identifier[past_values] :]
identifier[forecasted_values] = identifier[a] [ literal[int] ][- identifier[h] :]
identifier[lower] = identifier[forecasted_values] - literal[int] * identifier[np] . identifier[power] ( identifier[P] [ literal[int] ][ literal[int] ][- identifier[h] :]+ identifier[self] . identifier[latent_variables] . identifier[z_list] [ literal[int] ]. identifier[prior] . identifier[transform] ( identifier[self] . identifier[latent_variables] . identifier[get_z_values] ()[ literal[int] ]), literal[int] )
identifier[upper] = identifier[forecasted_values] + literal[int] * identifier[np] . identifier[power] ( identifier[P] [ literal[int] ][ literal[int] ][- identifier[h] :]+ identifier[self] . identifier[latent_variables] . identifier[z_list] [ literal[int] ]. identifier[prior] . identifier[transform] ( identifier[self] . identifier[latent_variables] . identifier[get_z_values] ()[ literal[int] ]), literal[int] )
identifier[lower] = identifier[np] . identifier[append] ( identifier[plot_values] [- identifier[h] - literal[int] ], identifier[lower] )
identifier[upper] = identifier[np] . identifier[append] ( identifier[plot_values] [- identifier[h] - literal[int] ], identifier[upper] )
identifier[plot_index] = identifier[date_index] [- identifier[h] - identifier[past_values] :]
identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] )
keyword[if] identifier[intervals] == keyword[True] :
identifier[plt] . identifier[fill_between] ( identifier[date_index] [- identifier[h] - literal[int] :], identifier[lower] , identifier[upper] , identifier[alpha] = literal[int] )
identifier[plt] . identifier[plot] ( identifier[plot_index] , identifier[plot_values] )
identifier[plt] . identifier[title] ( literal[string] + identifier[self] . identifier[data_name] )
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[ylabel] ( identifier[self] . identifier[data_name] )
identifier[plt] . identifier[show] () | def plot_predict(self, h=5, past_values=20, intervals=True, **kwargs):
""" Makes forecast with the estimated model
Parameters
----------
h : int (default : 5)
How many steps ahead would you like to forecast?
past_values : int (default : 20)
How many past observations to show on the forecast graph?
intervals : Boolean
Would you like to show prediction intervals for the forecast?
Returns
----------
- Plot of the forecast
"""
import matplotlib.pyplot as plt
import seaborn as sns
figsize = kwargs.get('figsize', (10, 7))
nsims = kwargs.get('nsims', 200)
if self.latent_variables.estimated is False:
raise Exception('No latent variables estimated!') # depends on [control=['if'], data=[]] # Retrieve data, dates and (transformed) latent variables
elif self.latent_variables.estimation_method in ['M-H']:
lower_final = 0
upper_final = 0
plot_values_final = 0
date_index = self.shift_dates(h)
plot_index = date_index[-h - past_values:]
for i in range(nsims):
t_params = self.draw_latent_variables(nsims=1).T[0]
(a, P) = self._forecast_model(t_params, h)
plot_values = a[0][-h - past_values:]
forecasted_values = a[0][-h:]
lower = forecasted_values - 1.96 * np.power(P[0][0][-h:] + self.latent_variables.z_list[0].prior.transform(t_params[0]), 0.5)
upper = forecasted_values + 1.96 * np.power(P[0][0][-h:] + self.latent_variables.z_list[0].prior.transform(t_params[0]), 0.5)
lower_final += np.append(plot_values[-h - 1], lower)
upper_final += np.append(plot_values[-h - 1], upper)
plot_values_final += plot_values # depends on [control=['for'], data=[]]
plot_values_final = plot_values_final / nsims
lower_final = lower_final / nsims
upper_final = upper_final / nsims
plt.figure(figsize=figsize)
if intervals == True:
plt.fill_between(date_index[-h - 1:], lower_final, upper_final, alpha=0.2) # depends on [control=['if'], data=[]]
plt.plot(plot_index, plot_values_final)
plt.title('Forecast for ' + self.data_name)
plt.xlabel('Time')
plt.ylabel(self.data_name)
plt.show() # depends on [control=['if'], data=[]]
else:
(a, P) = self._forecast_model(self.latent_variables.get_z_values(), h)
date_index = self.shift_dates(h)
plot_values = a[0][-h - past_values:]
forecasted_values = a[0][-h:]
lower = forecasted_values - 1.96 * np.power(P[0][0][-h:] + self.latent_variables.z_list[0].prior.transform(self.latent_variables.get_z_values()[0]), 0.5)
upper = forecasted_values + 1.96 * np.power(P[0][0][-h:] + self.latent_variables.z_list[0].prior.transform(self.latent_variables.get_z_values()[0]), 0.5)
lower = np.append(plot_values[-h - 1], lower)
upper = np.append(plot_values[-h - 1], upper)
plot_index = date_index[-h - past_values:]
plt.figure(figsize=figsize)
if intervals == True:
plt.fill_between(date_index[-h - 1:], lower, upper, alpha=0.2) # depends on [control=['if'], data=[]]
plt.plot(plot_index, plot_values)
plt.title('Forecast for ' + self.data_name)
plt.xlabel('Time')
plt.ylabel(self.data_name)
plt.show() |
def normalizeFilePath(value):
"""
Normalizes file path.
* **value** must be a :ref:`type-string`.
* Returned value is an unencoded ``unicode`` string
"""
if not isinstance(value, basestring):
raise TypeError("File paths must be strings, not %s."
% type(value).__name__)
return unicode(value) | def function[normalizeFilePath, parameter[value]]:
constant[
Normalizes file path.
* **value** must be a :ref:`type-string`.
* Returned value is an unencoded ``unicode`` string
]
if <ast.UnaryOp object at 0x7da2041dae60> begin[:]
<ast.Raise object at 0x7da2041d82e0>
return[call[name[unicode], parameter[name[value]]]] | keyword[def] identifier[normalizeFilePath] ( identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string]
% identifier[type] ( identifier[value] ). identifier[__name__] )
keyword[return] identifier[unicode] ( identifier[value] ) | def normalizeFilePath(value):
"""
Normalizes file path.
* **value** must be a :ref:`type-string`.
* Returned value is an unencoded ``unicode`` string
"""
if not isinstance(value, basestring):
raise TypeError('File paths must be strings, not %s.' % type(value).__name__) # depends on [control=['if'], data=[]]
return unicode(value) |
def _cast_to_type(self, value):
""" Convert the value to its string representation"""
if isinstance(value, str) or value is None:
return value
return str(value) | def function[_cast_to_type, parameter[self, value]]:
constant[ Convert the value to its string representation]
if <ast.BoolOp object at 0x7da18dc9a920> begin[:]
return[name[value]]
return[call[name[str], parameter[name[value]]]] | keyword[def] identifier[_cast_to_type] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[str] ) keyword[or] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[value]
keyword[return] identifier[str] ( identifier[value] ) | def _cast_to_type(self, value):
""" Convert the value to its string representation"""
if isinstance(value, str) or value is None:
return value # depends on [control=['if'], data=[]]
return str(value) |
def append_utc_timestamp(self, tag, timestamp=None, precision=3,
header=False):
"""Append a field with a UTCTimestamp value.
:param tag: Integer or string FIX tag number.
:param timestamp: Time value, see below.
:param precision: Number of decimal places: 0, 3 (ms) or 6 (us).
:param header: Append to FIX header if True; default to body.
The `timestamp` value should be a datetime, such as created by
datetime.datetime.utcnow(); a float, being the number of seconds
since midnight 1 Jan 1970 UTC, such as returned by time.time();
or, None, in which case datetime.datetime.utcnow() is used to
get the current UTC time.
Precision values other than zero (seconds), 3 (milliseconds),
or 6 (microseconds) will raise an exception. Note that prior
to FIX 5.0, only values of 0 or 3 comply with the standard."""
return self._append_utc_datetime(tag,
"%Y%m%d-%H:%M:%S",
timestamp,
precision,
header) | def function[append_utc_timestamp, parameter[self, tag, timestamp, precision, header]]:
constant[Append a field with a UTCTimestamp value.
:param tag: Integer or string FIX tag number.
:param timestamp: Time value, see below.
:param precision: Number of decimal places: 0, 3 (ms) or 6 (us).
:param header: Append to FIX header if True; default to body.
The `timestamp` value should be a datetime, such as created by
datetime.datetime.utcnow(); a float, being the number of seconds
since midnight 1 Jan 1970 UTC, such as returned by time.time();
or, None, in which case datetime.datetime.utcnow() is used to
get the current UTC time.
Precision values other than zero (seconds), 3 (milliseconds),
or 6 (microseconds) will raise an exception. Note that prior
to FIX 5.0, only values of 0 or 3 comply with the standard.]
return[call[name[self]._append_utc_datetime, parameter[name[tag], constant[%Y%m%d-%H:%M:%S], name[timestamp], name[precision], name[header]]]] | keyword[def] identifier[append_utc_timestamp] ( identifier[self] , identifier[tag] , identifier[timestamp] = keyword[None] , identifier[precision] = literal[int] ,
identifier[header] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[_append_utc_datetime] ( identifier[tag] ,
literal[string] ,
identifier[timestamp] ,
identifier[precision] ,
identifier[header] ) | def append_utc_timestamp(self, tag, timestamp=None, precision=3, header=False):
"""Append a field with a UTCTimestamp value.
:param tag: Integer or string FIX tag number.
:param timestamp: Time value, see below.
:param precision: Number of decimal places: 0, 3 (ms) or 6 (us).
:param header: Append to FIX header if True; default to body.
The `timestamp` value should be a datetime, such as created by
datetime.datetime.utcnow(); a float, being the number of seconds
since midnight 1 Jan 1970 UTC, such as returned by time.time();
or, None, in which case datetime.datetime.utcnow() is used to
get the current UTC time.
Precision values other than zero (seconds), 3 (milliseconds),
or 6 (microseconds) will raise an exception. Note that prior
to FIX 5.0, only values of 0 or 3 comply with the standard."""
return self._append_utc_datetime(tag, '%Y%m%d-%H:%M:%S', timestamp, precision, header) |
def cfloat64_array_to_numpy(cptr, length):
"""Convert a ctypes double pointer array to a numpy array."""
if isinstance(cptr, ctypes.POINTER(ctypes.c_double)):
return np.fromiter(cptr, dtype=np.float64, count=length)
else:
raise RuntimeError('Expected double pointer') | def function[cfloat64_array_to_numpy, parameter[cptr, length]]:
constant[Convert a ctypes double pointer array to a numpy array.]
if call[name[isinstance], parameter[name[cptr], call[name[ctypes].POINTER, parameter[name[ctypes].c_double]]]] begin[:]
return[call[name[np].fromiter, parameter[name[cptr]]]] | keyword[def] identifier[cfloat64_array_to_numpy] ( identifier[cptr] , identifier[length] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[cptr] , identifier[ctypes] . identifier[POINTER] ( identifier[ctypes] . identifier[c_double] )):
keyword[return] identifier[np] . identifier[fromiter] ( identifier[cptr] , identifier[dtype] = identifier[np] . identifier[float64] , identifier[count] = identifier[length] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] ) | def cfloat64_array_to_numpy(cptr, length):
"""Convert a ctypes double pointer array to a numpy array."""
if isinstance(cptr, ctypes.POINTER(ctypes.c_double)):
return np.fromiter(cptr, dtype=np.float64, count=length) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Expected double pointer') |
def get_objective_hierarchy_design_session_for_objective_bank(self, objective_bank_id=None, *args, **kwargs):
"""Gets the OsidSession associated with the objective hierarchy
design service for the given objective bank.
arg: objectiveBankId (osid.id.Id): the Id of the objective
bank
return: (osid.learning.ObjectiveHierarchyDesignSession) - an
ObjectiveHierarchyDesignSession
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_objective_hierarchy_design() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_objective_hierarchy_design() and
supports_visible_federation() are true.
"""
if not objective_bank_id:
raise NullArgument
if not self.supports_objective_hierarchy_design():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed()
try:
session = sessions.ObjectiveHierarchyDesignSession(objective_bank_id, runtime=self._runtime)
except AttributeError:
raise OperationFailed()
return session | def function[get_objective_hierarchy_design_session_for_objective_bank, parameter[self, objective_bank_id]]:
constant[Gets the OsidSession associated with the objective hierarchy
design service for the given objective bank.
arg: objectiveBankId (osid.id.Id): the Id of the objective
bank
return: (osid.learning.ObjectiveHierarchyDesignSession) - an
ObjectiveHierarchyDesignSession
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_objective_hierarchy_design() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_objective_hierarchy_design() and
supports_visible_federation() are true.
]
if <ast.UnaryOp object at 0x7da1b092f250> begin[:]
<ast.Raise object at 0x7da1b092e8c0>
if <ast.UnaryOp object at 0x7da1b092e0e0> begin[:]
<ast.Raise object at 0x7da1b092f130>
<ast.Try object at 0x7da1b092ded0>
<ast.Try object at 0x7da1b092da20>
return[name[session]] | keyword[def] identifier[get_objective_hierarchy_design_session_for_objective_bank] ( identifier[self] , identifier[objective_bank_id] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[objective_bank_id] :
keyword[raise] identifier[NullArgument]
keyword[if] keyword[not] identifier[self] . identifier[supports_objective_hierarchy_design] ():
keyword[raise] identifier[Unimplemented] ()
keyword[try] :
keyword[from] . keyword[import] identifier[sessions]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[OperationFailed] ()
keyword[try] :
identifier[session] = identifier[sessions] . identifier[ObjectiveHierarchyDesignSession] ( identifier[objective_bank_id] , identifier[runtime] = identifier[self] . identifier[_runtime] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[OperationFailed] ()
keyword[return] identifier[session] | def get_objective_hierarchy_design_session_for_objective_bank(self, objective_bank_id=None, *args, **kwargs):
"""Gets the OsidSession associated with the objective hierarchy
design service for the given objective bank.
arg: objectiveBankId (osid.id.Id): the Id of the objective
bank
return: (osid.learning.ObjectiveHierarchyDesignSession) - an
ObjectiveHierarchyDesignSession
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_objective_hierarchy_design() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_objective_hierarchy_design() and
supports_visible_federation() are true.
"""
if not objective_bank_id:
raise NullArgument # depends on [control=['if'], data=[]]
if not self.supports_objective_hierarchy_design():
raise Unimplemented() # depends on [control=['if'], data=[]]
try:
from . import sessions # depends on [control=['try'], data=[]]
except ImportError:
raise OperationFailed() # depends on [control=['except'], data=[]]
try:
session = sessions.ObjectiveHierarchyDesignSession(objective_bank_id, runtime=self._runtime) # depends on [control=['try'], data=[]]
except AttributeError:
raise OperationFailed() # depends on [control=['except'], data=[]]
return session |
def validate(self):
"""
Using the SCHEMA property, validate the attributes
of this instance. If any attributes are missing or
invalid, raise a ValidationException.
"""
validator = Draft7Validator(self.SCHEMA)
errors = set() # make errors a set to avoid duplicates
for error in validator.iter_errors(self.serialize()):
errors.add('.'.join(
list(map(str, error.path)) + [error.message]
))
if errors:
raise JSONValidationException(type(self).__name__, errors) | def function[validate, parameter[self]]:
constant[
Using the SCHEMA property, validate the attributes
of this instance. If any attributes are missing or
invalid, raise a ValidationException.
]
variable[validator] assign[=] call[name[Draft7Validator], parameter[name[self].SCHEMA]]
variable[errors] assign[=] call[name[set], parameter[]]
for taget[name[error]] in starred[call[name[validator].iter_errors, parameter[call[name[self].serialize, parameter[]]]]] begin[:]
call[name[errors].add, parameter[call[constant[.].join, parameter[binary_operation[call[name[list], parameter[call[name[map], parameter[name[str], name[error].path]]]] + list[[<ast.Attribute object at 0x7da1b1a559c0>]]]]]]]
if name[errors] begin[:]
<ast.Raise object at 0x7da1b1a562c0> | keyword[def] identifier[validate] ( identifier[self] ):
literal[string]
identifier[validator] = identifier[Draft7Validator] ( identifier[self] . identifier[SCHEMA] )
identifier[errors] = identifier[set] ()
keyword[for] identifier[error] keyword[in] identifier[validator] . identifier[iter_errors] ( identifier[self] . identifier[serialize] ()):
identifier[errors] . identifier[add] ( literal[string] . identifier[join] (
identifier[list] ( identifier[map] ( identifier[str] , identifier[error] . identifier[path] ))+[ identifier[error] . identifier[message] ]
))
keyword[if] identifier[errors] :
keyword[raise] identifier[JSONValidationException] ( identifier[type] ( identifier[self] ). identifier[__name__] , identifier[errors] ) | def validate(self):
"""
Using the SCHEMA property, validate the attributes
of this instance. If any attributes are missing or
invalid, raise a ValidationException.
"""
validator = Draft7Validator(self.SCHEMA)
errors = set() # make errors a set to avoid duplicates
for error in validator.iter_errors(self.serialize()):
errors.add('.'.join(list(map(str, error.path)) + [error.message])) # depends on [control=['for'], data=['error']]
if errors:
raise JSONValidationException(type(self).__name__, errors) # depends on [control=['if'], data=[]] |
def as_list(self):
"""Return all child objects in nested lists of strings."""
return [self.name, self.value, [x.as_list for x in self.children]] | def function[as_list, parameter[self]]:
constant[Return all child objects in nested lists of strings.]
return[list[[<ast.Attribute object at 0x7da18c4cfa90>, <ast.Attribute object at 0x7da18c4cd360>, <ast.ListComp object at 0x7da18c4cea10>]]] | keyword[def] identifier[as_list] ( identifier[self] ):
literal[string]
keyword[return] [ identifier[self] . identifier[name] , identifier[self] . identifier[value] ,[ identifier[x] . identifier[as_list] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[children] ]] | def as_list(self):
"""Return all child objects in nested lists of strings."""
return [self.name, self.value, [x.as_list for x in self.children]] |
def from_app_role(cls, url, path, role_id, secret_id):
"""Constructor: use AppRole authentication to read secrets from a Vault path
See https://www.vaultproject.io/docs/auth/approle.html
Args:
url: Vault url
path: Vault path where secrets are stored
role_id: Vault RoleID
secret_id: Vault SecretID
"""
token = cls._fetch_app_role_token(url, role_id, secret_id)
source_dict = cls._fetch_secrets(url, path, token)
return cls(source_dict, url, path, token) | def function[from_app_role, parameter[cls, url, path, role_id, secret_id]]:
constant[Constructor: use AppRole authentication to read secrets from a Vault path
See https://www.vaultproject.io/docs/auth/approle.html
Args:
url: Vault url
path: Vault path where secrets are stored
role_id: Vault RoleID
secret_id: Vault SecretID
]
variable[token] assign[=] call[name[cls]._fetch_app_role_token, parameter[name[url], name[role_id], name[secret_id]]]
variable[source_dict] assign[=] call[name[cls]._fetch_secrets, parameter[name[url], name[path], name[token]]]
return[call[name[cls], parameter[name[source_dict], name[url], name[path], name[token]]]] | keyword[def] identifier[from_app_role] ( identifier[cls] , identifier[url] , identifier[path] , identifier[role_id] , identifier[secret_id] ):
literal[string]
identifier[token] = identifier[cls] . identifier[_fetch_app_role_token] ( identifier[url] , identifier[role_id] , identifier[secret_id] )
identifier[source_dict] = identifier[cls] . identifier[_fetch_secrets] ( identifier[url] , identifier[path] , identifier[token] )
keyword[return] identifier[cls] ( identifier[source_dict] , identifier[url] , identifier[path] , identifier[token] ) | def from_app_role(cls, url, path, role_id, secret_id):
"""Constructor: use AppRole authentication to read secrets from a Vault path
See https://www.vaultproject.io/docs/auth/approle.html
Args:
url: Vault url
path: Vault path where secrets are stored
role_id: Vault RoleID
secret_id: Vault SecretID
"""
token = cls._fetch_app_role_token(url, role_id, secret_id)
source_dict = cls._fetch_secrets(url, path, token)
return cls(source_dict, url, path, token) |
def append_bump_sequence_op(self, bump_to, source=None):
"""Append a :class:`BumpSequence <stellar_base.operation.BumpSequence>`
operation to the list of operations.
Only available in protocol version 10 and above
:param int bump_to: Sequence number to bump to.
:param str source: The source address that is running the inflation
operation.
:return: This builder instance.
"""
op = operation.BumpSequence(bump_to, source)
return self.append_op(op) | def function[append_bump_sequence_op, parameter[self, bump_to, source]]:
constant[Append a :class:`BumpSequence <stellar_base.operation.BumpSequence>`
operation to the list of operations.
Only available in protocol version 10 and above
:param int bump_to: Sequence number to bump to.
:param str source: The source address that is running the inflation
operation.
:return: This builder instance.
]
variable[op] assign[=] call[name[operation].BumpSequence, parameter[name[bump_to], name[source]]]
return[call[name[self].append_op, parameter[name[op]]]] | keyword[def] identifier[append_bump_sequence_op] ( identifier[self] , identifier[bump_to] , identifier[source] = keyword[None] ):
literal[string]
identifier[op] = identifier[operation] . identifier[BumpSequence] ( identifier[bump_to] , identifier[source] )
keyword[return] identifier[self] . identifier[append_op] ( identifier[op] ) | def append_bump_sequence_op(self, bump_to, source=None):
"""Append a :class:`BumpSequence <stellar_base.operation.BumpSequence>`
operation to the list of operations.
Only available in protocol version 10 and above
:param int bump_to: Sequence number to bump to.
:param str source: The source address that is running the inflation
operation.
:return: This builder instance.
"""
op = operation.BumpSequence(bump_to, source)
return self.append_op(op) |
def _add_parsley_ns(cls, namespace_dict):
"""
Extend XPath evaluation with Parsley extensions' namespace
"""
namespace_dict.update({
'parslepy' : cls.LOCAL_NAMESPACE,
'parsley' : cls.LOCAL_NAMESPACE,
})
return namespace_dict | def function[_add_parsley_ns, parameter[cls, namespace_dict]]:
constant[
Extend XPath evaluation with Parsley extensions' namespace
]
call[name[namespace_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da1b003f460>, <ast.Constant object at 0x7da1b003f430>], [<ast.Attribute object at 0x7da1b003f3a0>, <ast.Attribute object at 0x7da1b003f790>]]]]
return[name[namespace_dict]] | keyword[def] identifier[_add_parsley_ns] ( identifier[cls] , identifier[namespace_dict] ):
literal[string]
identifier[namespace_dict] . identifier[update] ({
literal[string] : identifier[cls] . identifier[LOCAL_NAMESPACE] ,
literal[string] : identifier[cls] . identifier[LOCAL_NAMESPACE] ,
})
keyword[return] identifier[namespace_dict] | def _add_parsley_ns(cls, namespace_dict):
"""
Extend XPath evaluation with Parsley extensions' namespace
"""
namespace_dict.update({'parslepy': cls.LOCAL_NAMESPACE, 'parsley': cls.LOCAL_NAMESPACE})
return namespace_dict |
def _skyUserFromHeaderKwd(imageSet,paramDict):
"""
subtract the sky from all the chips in the imagefile that imageSet represents
imageSet is a single imageObject reference
paramDict should be the subset from an actual config object
"""
_skyValue=0.0 #this will be the sky value computed for the exposure
skyKW="MDRIZSKY" #header keyword that contains the sky that's been subtracted
#just making sure, tricky users and all, these are things that will be used
#by the sky function so we want them defined at least
try:
assert imageSet._numchips > 0, "invalid value for number of chips"
assert imageSet._filename != '', "image object filename is empty!, doh!"
assert imageSet._rootname != '', "image rootname is empty!, doh!"
assert imageSet.scienceExt !='', "image object science extension is empty!"
except AssertionError:
raise AssertionError
numchips=imageSet._numchips
sciExt=imageSet.scienceExt
# User Subtraction Case, User has done own sky subtraction,
# so use the image header value for subtractedsky value
skyuser=paramDict["skyuser"]
if skyuser != '':
print("User has computed their own sky values...")
if skyuser != skyKW:
print(" ...updating MDRIZSKY with supplied value.")
for chip in range(1,numchips+1,1):
chipext = '%s,%d'%(sciExt,chip)
if not imageSet[chipext].group_member:
# skip extensions/chips that will not be processed
continue
try:
_skyValue = imageSet[chipext].header[skyuser]
except:
print("**************************************************************")
print("*")
print("* Cannot find keyword ",skyuser," in ",imageSet._filename)
print("*")
print("**************************************************************\n\n\n")
raise KeyError
_updateKW(imageSet[sciExt+','+str(chip)],
imageSet._filename,(sciExt,chip),skyKW,_skyValue)
# Update internal record with subtracted sky value
imageSet[chipext].subtractedSky = _skyValue
imageSet[chipext].computedSky = None
print("Setting ",skyKW,"=",_skyValue) | def function[_skyUserFromHeaderKwd, parameter[imageSet, paramDict]]:
constant[
subtract the sky from all the chips in the imagefile that imageSet represents
imageSet is a single imageObject reference
paramDict should be the subset from an actual config object
]
variable[_skyValue] assign[=] constant[0.0]
variable[skyKW] assign[=] constant[MDRIZSKY]
<ast.Try object at 0x7da1b1be42e0>
variable[numchips] assign[=] name[imageSet]._numchips
variable[sciExt] assign[=] name[imageSet].scienceExt
variable[skyuser] assign[=] call[name[paramDict]][constant[skyuser]]
if compare[name[skyuser] not_equal[!=] constant[]] begin[:]
call[name[print], parameter[constant[User has computed their own sky values...]]]
if compare[name[skyuser] not_equal[!=] name[skyKW]] begin[:]
call[name[print], parameter[constant[ ...updating MDRIZSKY with supplied value.]]]
for taget[name[chip]] in starred[call[name[range], parameter[constant[1], binary_operation[name[numchips] + constant[1]], constant[1]]]] begin[:]
variable[chipext] assign[=] binary_operation[constant[%s,%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1be7ca0>, <ast.Name object at 0x7da1b1be7c70>]]]
if <ast.UnaryOp object at 0x7da1b1be7c10> begin[:]
continue
<ast.Try object at 0x7da1b1be7ac0>
call[name[_updateKW], parameter[call[name[imageSet]][binary_operation[binary_operation[name[sciExt] + constant[,]] + call[name[str], parameter[name[chip]]]]], name[imageSet]._filename, tuple[[<ast.Name object at 0x7da1b1be6fe0>, <ast.Name object at 0x7da1b1be6fb0>]], name[skyKW], name[_skyValue]]]
call[name[imageSet]][name[chipext]].subtractedSky assign[=] name[_skyValue]
call[name[imageSet]][name[chipext]].computedSky assign[=] constant[None]
call[name[print], parameter[constant[Setting ], name[skyKW], constant[=], name[_skyValue]]] | keyword[def] identifier[_skyUserFromHeaderKwd] ( identifier[imageSet] , identifier[paramDict] ):
literal[string]
identifier[_skyValue] = literal[int]
identifier[skyKW] = literal[string]
keyword[try] :
keyword[assert] identifier[imageSet] . identifier[_numchips] > literal[int] , literal[string]
keyword[assert] identifier[imageSet] . identifier[_filename] != literal[string] , literal[string]
keyword[assert] identifier[imageSet] . identifier[_rootname] != literal[string] , literal[string]
keyword[assert] identifier[imageSet] . identifier[scienceExt] != literal[string] , literal[string]
keyword[except] identifier[AssertionError] :
keyword[raise] identifier[AssertionError]
identifier[numchips] = identifier[imageSet] . identifier[_numchips]
identifier[sciExt] = identifier[imageSet] . identifier[scienceExt]
identifier[skyuser] = identifier[paramDict] [ literal[string] ]
keyword[if] identifier[skyuser] != literal[string] :
identifier[print] ( literal[string] )
keyword[if] identifier[skyuser] != identifier[skyKW] :
identifier[print] ( literal[string] )
keyword[for] identifier[chip] keyword[in] identifier[range] ( literal[int] , identifier[numchips] + literal[int] , literal[int] ):
identifier[chipext] = literal[string] %( identifier[sciExt] , identifier[chip] )
keyword[if] keyword[not] identifier[imageSet] [ identifier[chipext] ]. identifier[group_member] :
keyword[continue]
keyword[try] :
identifier[_skyValue] = identifier[imageSet] [ identifier[chipext] ]. identifier[header] [ identifier[skyuser] ]
keyword[except] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] , identifier[skyuser] , literal[string] , identifier[imageSet] . identifier[_filename] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[raise] identifier[KeyError]
identifier[_updateKW] ( identifier[imageSet] [ identifier[sciExt] + literal[string] + identifier[str] ( identifier[chip] )],
identifier[imageSet] . identifier[_filename] ,( identifier[sciExt] , identifier[chip] ), identifier[skyKW] , identifier[_skyValue] )
identifier[imageSet] [ identifier[chipext] ]. identifier[subtractedSky] = identifier[_skyValue]
identifier[imageSet] [ identifier[chipext] ]. identifier[computedSky] = keyword[None]
identifier[print] ( literal[string] , identifier[skyKW] , literal[string] , identifier[_skyValue] ) | def _skyUserFromHeaderKwd(imageSet, paramDict):
"""
subtract the sky from all the chips in the imagefile that imageSet represents
imageSet is a single imageObject reference
paramDict should be the subset from an actual config object
"""
_skyValue = 0.0 #this will be the sky value computed for the exposure
skyKW = 'MDRIZSKY' #header keyword that contains the sky that's been subtracted
#just making sure, tricky users and all, these are things that will be used
#by the sky function so we want them defined at least
try:
assert imageSet._numchips > 0, 'invalid value for number of chips'
assert imageSet._filename != '', 'image object filename is empty!, doh!'
assert imageSet._rootname != '', 'image rootname is empty!, doh!'
assert imageSet.scienceExt != '', 'image object science extension is empty!' # depends on [control=['try'], data=[]]
except AssertionError:
raise AssertionError # depends on [control=['except'], data=[]]
numchips = imageSet._numchips
sciExt = imageSet.scienceExt
# User Subtraction Case, User has done own sky subtraction,
# so use the image header value for subtractedsky value
skyuser = paramDict['skyuser']
if skyuser != '':
print('User has computed their own sky values...')
if skyuser != skyKW:
print(' ...updating MDRIZSKY with supplied value.')
for chip in range(1, numchips + 1, 1):
chipext = '%s,%d' % (sciExt, chip)
if not imageSet[chipext].group_member:
# skip extensions/chips that will not be processed
continue # depends on [control=['if'], data=[]]
try:
_skyValue = imageSet[chipext].header[skyuser] # depends on [control=['try'], data=[]]
except:
print('**************************************************************')
print('*')
print('* Cannot find keyword ', skyuser, ' in ', imageSet._filename)
print('*')
print('**************************************************************\n\n\n')
raise KeyError # depends on [control=['except'], data=[]]
_updateKW(imageSet[sciExt + ',' + str(chip)], imageSet._filename, (sciExt, chip), skyKW, _skyValue)
# Update internal record with subtracted sky value
imageSet[chipext].subtractedSky = _skyValue
imageSet[chipext].computedSky = None
print('Setting ', skyKW, '=', _skyValue) # depends on [control=['for'], data=['chip']] # depends on [control=['if'], data=['skyuser', 'skyKW']] # depends on [control=['if'], data=['skyuser']] |
def generate_from_reference(target_reference, project, property_set_):
""" Attempts to generate the target given by target reference, which
can refer both to a main target or to a file.
Returns a list consisting of
- usage requirements
- generated virtual targets, if any
target_reference: Target reference
project: Project where the reference is made
property_set: Properties of the main target that makes the reference
"""
assert isinstance(target_reference, basestring)
assert isinstance(project, ProjectTarget)
assert isinstance(property_set_, property_set.PropertySet)
target, sproperties = resolve_reference(target_reference, project)
# Take properties which should be propagated and refine them
# with source-specific requirements.
propagated = property_set_.propagated()
rproperties = propagated.refine(sproperties)
return target.generate(rproperties) | def function[generate_from_reference, parameter[target_reference, project, property_set_]]:
constant[ Attempts to generate the target given by target reference, which
can refer both to a main target or to a file.
Returns a list consisting of
- usage requirements
- generated virtual targets, if any
target_reference: Target reference
project: Project where the reference is made
property_set: Properties of the main target that makes the reference
]
assert[call[name[isinstance], parameter[name[target_reference], name[basestring]]]]
assert[call[name[isinstance], parameter[name[project], name[ProjectTarget]]]]
assert[call[name[isinstance], parameter[name[property_set_], name[property_set].PropertySet]]]
<ast.Tuple object at 0x7da1b1f0a4a0> assign[=] call[name[resolve_reference], parameter[name[target_reference], name[project]]]
variable[propagated] assign[=] call[name[property_set_].propagated, parameter[]]
variable[rproperties] assign[=] call[name[propagated].refine, parameter[name[sproperties]]]
return[call[name[target].generate, parameter[name[rproperties]]]] | keyword[def] identifier[generate_from_reference] ( identifier[target_reference] , identifier[project] , identifier[property_set_] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[target_reference] , identifier[basestring] )
keyword[assert] identifier[isinstance] ( identifier[project] , identifier[ProjectTarget] )
keyword[assert] identifier[isinstance] ( identifier[property_set_] , identifier[property_set] . identifier[PropertySet] )
identifier[target] , identifier[sproperties] = identifier[resolve_reference] ( identifier[target_reference] , identifier[project] )
identifier[propagated] = identifier[property_set_] . identifier[propagated] ()
identifier[rproperties] = identifier[propagated] . identifier[refine] ( identifier[sproperties] )
keyword[return] identifier[target] . identifier[generate] ( identifier[rproperties] ) | def generate_from_reference(target_reference, project, property_set_):
""" Attempts to generate the target given by target reference, which
can refer both to a main target or to a file.
Returns a list consisting of
- usage requirements
- generated virtual targets, if any
target_reference: Target reference
project: Project where the reference is made
property_set: Properties of the main target that makes the reference
"""
assert isinstance(target_reference, basestring)
assert isinstance(project, ProjectTarget)
assert isinstance(property_set_, property_set.PropertySet)
(target, sproperties) = resolve_reference(target_reference, project)
# Take properties which should be propagated and refine them
# with source-specific requirements.
propagated = property_set_.propagated()
rproperties = propagated.refine(sproperties)
return target.generate(rproperties) |
def submit(self, code: str, results: str ="html", prompt: dict = None) -> dict:
'''
This method is used to submit any SAS code. It returns the Log and Listing as a python dictionary.
code - the SAS statements you want to execute
results - format of results, HTML is default, TEXT is the alternative
prompt - dict of names:flags to prompt for; create macro variables (used in submitted code), then keep or delete
The keys are the names of the macro variables and the boolean flag is to either hide what you type and delete
the macros, or show what you type and keep the macros (they will still be available later)
for example (what you type for pw will not be displayed, user and dsname will):
results = sas.submit(
"""
libname tera teradata server=teracop1 user=&user pw=&pw;
proc print data=tera.&dsname (obs=10); run;
""" ,
prompt = {'user': False, 'pw': True, 'dsname': False}
)
Returns - a Dict containing two keys:values, [LOG, LST]. LOG is text and LST is 'results' (HTML or TEXT)
NOTE: to view HTML results in the ipykernel, issue: from IPython.display import HTML and use HTML() instead of print()
i.e,: results = sas.submit("data a; x=1; run; proc print;run')
print(results['LOG'])
HTML(results['LST'])
'''
prompt = prompt if prompt is not None else {}
odsopen = b"ods listing close;ods "+self.sascfg.output.encode()+ \
b" (id=saspy_internal) file=stdout options(bitmap_mode='inline') device=svg style="+self._sb.HTML_Style.encode()+ \
b"; ods graphics on / outputfmt=png;\n"
odsclose = b"ods "+self.sascfg.output.encode()+b" (id=saspy_internal) close;ods listing;\n"
ods = True;
mj = b";*\';*\";*/;"
lstf = b''
logf = b''
bail = False
eof = 5
bc = False
done = False
logn = self._logcnt()
logcodei = "%put E3969440A681A24088859985" + logn + ";"
logcodeo = b"\nE3969440A681A24088859985" + logn.encode()
pcodei = ''
pcodeiv = ''
pcodeo = ''
if self.pid == None:
self._sb.SASpid = None
print("No SAS process attached. SAS process has terminated unexpectedly.")
return dict(LOG="No SAS process attached. SAS process has terminated unexpectedly.", LST='')
rc = os.waitid(os.P_PID, self.pid, os.WEXITED | os.WNOHANG)
if rc != None:
self.pid = None
self._sb.SASpid = None
return dict(LOG='SAS process has terminated unexpectedly. Pid State= '+str(rc), LST='')
# to cover the possibility of an _asubmit w/ lst output not read; no known cases now; used to be __flushlst__()
# removing this and adding comment in _asubmit to use _getlst[txt] so this will never be necessary; delete later
#while(len(self.stdout.read1(4096)) > 0):
# continue
if results.upper() != "HTML":
ods = False
if len(prompt):
pcodei += 'options nosource nonotes;\n'
pcodeo += 'options nosource nonotes;\n'
for key in prompt:
gotit = False
while not gotit:
var = self.sascfg._prompt('Please enter value for macro variable '+key+' ', pw=prompt[key])
if var is None:
raise KeyboardInterrupt
if len(var) > 0:
gotit = True
else:
print("Sorry, didn't get a value for that variable.")
if prompt[key]:
pcodei += '%let '+key+'='+var+';\n'
pcodeo += '%symdel '+key+';\n'
else:
pcodeiv += '%let '+key+'='+var+';\n'
pcodei += 'options source notes;\n'
pcodeo += 'options source notes;\n'
if ods:
self.stdin.write(odsopen)
pgm = mj+b'\n'+pcodei.encode(self.sascfg.encoding)+pcodeiv.encode(self.sascfg.encoding)
pgm += code.encode(self.sascfg.encoding)+b'\n'+pcodeo.encode(self.sascfg.encoding)+b'\n'+mj
out = self.stdin.write(pgm)
if ods:
self.stdin.write(odsclose)
out = self.stdin.write(b'\n'+logcodei.encode(self.sascfg.encoding)+b'\n')
self.stdin.flush()
while not done:
try:
while True:
rc = os.waitid(os.P_PID, self.pid, os.WEXITED | os.WNOHANG)
if rc is not None:
log = b''
try:
log = self.stderr.read1(4096)
if len(log) > 0:
logf += log
self._log += logf.decode(self.sascfg.encoding, errors='replace')
except:
pass
self.pid = None
self._sb.SASpid = None
return dict(LOG='SAS process has terminated unexpectedly. Pid State= ' +
str(rc)+'\n'+logf.decode(self.sascfg.encoding, errors='replace'), LST='')
if bail:
eof -= 1
if eof < 0:
break
if ods:
lst = self.stdout.read1(4096)
else:
lst = self.stdout.read1(4096)
if len(lst) > 0:
lstf += lst
else:
log = self.stderr.read1(4096)
if len(log) > 0:
logf += log
if logf.count(logcodeo) >= 1:
bail = True
if not bail and bc:
self.stdin.write(odsclose+logcodei.encode(self.sascfg.encoding) + b'\n')
self.stdin.flush()
bc = False
done = True
except (ConnectionResetError):
log = ''
try:
log = self.stderr.read1(4096)
if len(log) > 0:
logf += log
self._log += logf.decode(self.sascfg.encoding, errors='replace')
except:
pass
rc = 0
rc = os.waitpid(self.pid, 0)
self.pid = None
self._sb.SASpid = None
log = logf.partition(logcodeo)[0]+b'\nConnection Reset: SAS process has terminated unexpectedly. Pid State= '+str(rc).encode()+b'\n'+logf
return dict(LOG=log.encode(), LST='')
except (KeyboardInterrupt, SystemExit):
print('Exception caught!')
ll = self._breakprompt(logcodeo)
if ll.get('ABORT', False):
return ll
logf += ll['LOG']
lstf += ll['LST']
bc = ll['BC']
if not bc:
print('Exception handled :)\n')
else:
print('Exception ignored, continuing to process...\n')
self.stdin.write(odsclose+logcodei.encode(self.sascfg.encoding)+b'\n')
self.stdin.flush()
if ods:
try:
lstf = lstf.decode()
except UnicodeDecodeError:
try:
lstf = lstf.decode(self.sascfg.encoding)
except UnicodeDecodeError:
lstf = lstf.decode(errors='replace')
else:
lstf = lstf.decode(self.sascfg.encoding, errors='replace')
logf = logf.decode(self.sascfg.encoding, errors='replace')
trip = lstf.rpartition("/*]]>*/")
if len(trip[1]) > 0 and len(trip[2]) < 100:
lstf = ''
self._log += logf
final = logf.partition(logcodei)
z = final[0].rpartition(chr(10))
prev = '%08d' % (self._log_cnt - 1)
zz = z[0].rpartition("\nE3969440A681A24088859985" + prev +'\n')
logd = zz[2].replace(mj.decode(self.sascfg.encoding), '')
lstd = lstf.replace(chr(12), chr(10)).replace('<body class="c body">',
'<body class="l body">').replace("font-size: x-small;",
"font-size: normal;")
return dict(LOG=logd, LST=lstd) | def function[submit, parameter[self, code, results, prompt]]:
constant[
This method is used to submit any SAS code. It returns the Log and Listing as a python dictionary.
code - the SAS statements you want to execute
results - format of results, HTML is default, TEXT is the alternative
prompt - dict of names:flags to prompt for; create macro variables (used in submitted code), then keep or delete
The keys are the names of the macro variables and the boolean flag is to either hide what you type and delete
the macros, or show what you type and keep the macros (they will still be available later)
for example (what you type for pw will not be displayed, user and dsname will):
results = sas.submit(
"""
libname tera teradata server=teracop1 user=&user pw=&pw;
proc print data=tera.&dsname (obs=10); run;
""" ,
prompt = {'user': False, 'pw': True, 'dsname': False}
)
Returns - a Dict containing two keys:values, [LOG, LST]. LOG is text and LST is 'results' (HTML or TEXT)
NOTE: to view HTML results in the ipykernel, issue: from IPython.display import HTML and use HTML() instead of print()
i.e,: results = sas.submit("data a; x=1; run; proc print;run')
print(results['LOG'])
HTML(results['LST'])
]
variable[prompt] assign[=] <ast.IfExp object at 0x7da207f02a70>
variable[odsopen] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[b'ods listing close;ods '] + call[name[self].sascfg.output.encode, parameter[]]] + constant[b" (id=saspy_internal) file=stdout options(bitmap_mode='inline') device=svg style="]] + call[name[self]._sb.HTML_Style.encode, parameter[]]] + constant[b'; ods graphics on / outputfmt=png;\n']]
variable[odsclose] assign[=] binary_operation[binary_operation[constant[b'ods '] + call[name[self].sascfg.output.encode, parameter[]]] + constant[b' (id=saspy_internal) close;ods listing;\n']]
variable[ods] assign[=] constant[True]
variable[mj] assign[=] constant[b';*\';*";*/;']
variable[lstf] assign[=] constant[b'']
variable[logf] assign[=] constant[b'']
variable[bail] assign[=] constant[False]
variable[eof] assign[=] constant[5]
variable[bc] assign[=] constant[False]
variable[done] assign[=] constant[False]
variable[logn] assign[=] call[name[self]._logcnt, parameter[]]
variable[logcodei] assign[=] binary_operation[binary_operation[constant[%put E3969440A681A24088859985] + name[logn]] + constant[;]]
variable[logcodeo] assign[=] binary_operation[constant[b'\nE3969440A681A24088859985'] + call[name[logn].encode, parameter[]]]
variable[pcodei] assign[=] constant[]
variable[pcodeiv] assign[=] constant[]
variable[pcodeo] assign[=] constant[]
if compare[name[self].pid equal[==] constant[None]] begin[:]
name[self]._sb.SASpid assign[=] constant[None]
call[name[print], parameter[constant[No SAS process attached. SAS process has terminated unexpectedly.]]]
return[call[name[dict], parameter[]]]
variable[rc] assign[=] call[name[os].waitid, parameter[name[os].P_PID, name[self].pid, binary_operation[name[os].WEXITED <ast.BitOr object at 0x7da2590d6aa0> name[os].WNOHANG]]]
if compare[name[rc] not_equal[!=] constant[None]] begin[:]
name[self].pid assign[=] constant[None]
name[self]._sb.SASpid assign[=] constant[None]
return[call[name[dict], parameter[]]]
if compare[call[name[results].upper, parameter[]] not_equal[!=] constant[HTML]] begin[:]
variable[ods] assign[=] constant[False]
if call[name[len], parameter[name[prompt]]] begin[:]
<ast.AugAssign object at 0x7da18f00ca60>
<ast.AugAssign object at 0x7da18f00e2f0>
for taget[name[key]] in starred[name[prompt]] begin[:]
variable[gotit] assign[=] constant[False]
while <ast.UnaryOp object at 0x7da18f00f010> begin[:]
variable[var] assign[=] call[name[self].sascfg._prompt, parameter[binary_operation[binary_operation[constant[Please enter value for macro variable ] + name[key]] + constant[ ]]]]
if compare[name[var] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f00f190>
if compare[call[name[len], parameter[name[var]]] greater[>] constant[0]] begin[:]
variable[gotit] assign[=] constant[True]
if call[name[prompt]][name[key]] begin[:]
<ast.AugAssign object at 0x7da18f00d870>
<ast.AugAssign object at 0x7da18f00c280>
<ast.AugAssign object at 0x7da18f00fe80>
<ast.AugAssign object at 0x7da18f00dd20>
if name[ods] begin[:]
call[name[self].stdin.write, parameter[name[odsopen]]]
variable[pgm] assign[=] binary_operation[binary_operation[binary_operation[name[mj] + constant[b'\n']] + call[name[pcodei].encode, parameter[name[self].sascfg.encoding]]] + call[name[pcodeiv].encode, parameter[name[self].sascfg.encoding]]]
<ast.AugAssign object at 0x7da18f00f160>
variable[out] assign[=] call[name[self].stdin.write, parameter[name[pgm]]]
if name[ods] begin[:]
call[name[self].stdin.write, parameter[name[odsclose]]]
variable[out] assign[=] call[name[self].stdin.write, parameter[binary_operation[binary_operation[constant[b'\n'] + call[name[logcodei].encode, parameter[name[self].sascfg.encoding]]] + constant[b'\n']]]]
call[name[self].stdin.flush, parameter[]]
while <ast.UnaryOp object at 0x7da18f00f1f0> begin[:]
<ast.Try object at 0x7da18f00e6e0>
if name[ods] begin[:]
<ast.Try object at 0x7da18bc71120>
variable[logf] assign[=] call[name[logf].decode, parameter[name[self].sascfg.encoding]]
variable[trip] assign[=] call[name[lstf].rpartition, parameter[constant[/*]]>*/]]]
if <ast.BoolOp object at 0x7da18bc70910> begin[:]
variable[lstf] assign[=] constant[]
<ast.AugAssign object at 0x7da18bc73070>
variable[final] assign[=] call[name[logf].partition, parameter[name[logcodei]]]
variable[z] assign[=] call[call[name[final]][constant[0]].rpartition, parameter[call[name[chr], parameter[constant[10]]]]]
variable[prev] assign[=] binary_operation[constant[%08d] <ast.Mod object at 0x7da2590d6920> binary_operation[name[self]._log_cnt - constant[1]]]
variable[zz] assign[=] call[call[name[z]][constant[0]].rpartition, parameter[binary_operation[binary_operation[constant[
E3969440A681A24088859985] + name[prev]] + constant[
]]]]
variable[logd] assign[=] call[call[name[zz]][constant[2]].replace, parameter[call[name[mj].decode, parameter[name[self].sascfg.encoding]], constant[]]]
variable[lstd] assign[=] call[call[call[name[lstf].replace, parameter[call[name[chr], parameter[constant[12]]], call[name[chr], parameter[constant[10]]]]].replace, parameter[constant[<body class="c body">], constant[<body class="l body">]]].replace, parameter[constant[font-size: x-small;], constant[font-size: normal;]]]
return[call[name[dict], parameter[]]] | keyword[def] identifier[submit] ( identifier[self] , identifier[code] : identifier[str] , identifier[results] : identifier[str] = literal[string] , identifier[prompt] : identifier[dict] = keyword[None] )-> identifier[dict] :
literal[string]
identifier[prompt] = identifier[prompt] keyword[if] identifier[prompt] keyword[is] keyword[not] keyword[None] keyword[else] {}
identifier[odsopen] = literal[string] + identifier[self] . identifier[sascfg] . identifier[output] . identifier[encode] ()+ literal[string] + identifier[self] . identifier[_sb] . identifier[HTML_Style] . identifier[encode] ()+ literal[string]
identifier[odsclose] = literal[string] + identifier[self] . identifier[sascfg] . identifier[output] . identifier[encode] ()+ literal[string]
identifier[ods] = keyword[True] ;
identifier[mj] = literal[string]
identifier[lstf] = literal[string]
identifier[logf] = literal[string]
identifier[bail] = keyword[False]
identifier[eof] = literal[int]
identifier[bc] = keyword[False]
identifier[done] = keyword[False]
identifier[logn] = identifier[self] . identifier[_logcnt] ()
identifier[logcodei] = literal[string] + identifier[logn] + literal[string]
identifier[logcodeo] = literal[string] + identifier[logn] . identifier[encode] ()
identifier[pcodei] = literal[string]
identifier[pcodeiv] = literal[string]
identifier[pcodeo] = literal[string]
keyword[if] identifier[self] . identifier[pid] == keyword[None] :
identifier[self] . identifier[_sb] . identifier[SASpid] = keyword[None]
identifier[print] ( literal[string] )
keyword[return] identifier[dict] ( identifier[LOG] = literal[string] , identifier[LST] = literal[string] )
identifier[rc] = identifier[os] . identifier[waitid] ( identifier[os] . identifier[P_PID] , identifier[self] . identifier[pid] , identifier[os] . identifier[WEXITED] | identifier[os] . identifier[WNOHANG] )
keyword[if] identifier[rc] != keyword[None] :
identifier[self] . identifier[pid] = keyword[None]
identifier[self] . identifier[_sb] . identifier[SASpid] = keyword[None]
keyword[return] identifier[dict] ( identifier[LOG] = literal[string] + identifier[str] ( identifier[rc] ), identifier[LST] = literal[string] )
keyword[if] identifier[results] . identifier[upper] ()!= literal[string] :
identifier[ods] = keyword[False]
keyword[if] identifier[len] ( identifier[prompt] ):
identifier[pcodei] += literal[string]
identifier[pcodeo] += literal[string]
keyword[for] identifier[key] keyword[in] identifier[prompt] :
identifier[gotit] = keyword[False]
keyword[while] keyword[not] identifier[gotit] :
identifier[var] = identifier[self] . identifier[sascfg] . identifier[_prompt] ( literal[string] + identifier[key] + literal[string] , identifier[pw] = identifier[prompt] [ identifier[key] ])
keyword[if] identifier[var] keyword[is] keyword[None] :
keyword[raise] identifier[KeyboardInterrupt]
keyword[if] identifier[len] ( identifier[var] )> literal[int] :
identifier[gotit] = keyword[True]
keyword[else] :
identifier[print] ( literal[string] )
keyword[if] identifier[prompt] [ identifier[key] ]:
identifier[pcodei] += literal[string] + identifier[key] + literal[string] + identifier[var] + literal[string]
identifier[pcodeo] += literal[string] + identifier[key] + literal[string]
keyword[else] :
identifier[pcodeiv] += literal[string] + identifier[key] + literal[string] + identifier[var] + literal[string]
identifier[pcodei] += literal[string]
identifier[pcodeo] += literal[string]
keyword[if] identifier[ods] :
identifier[self] . identifier[stdin] . identifier[write] ( identifier[odsopen] )
identifier[pgm] = identifier[mj] + literal[string] + identifier[pcodei] . identifier[encode] ( identifier[self] . identifier[sascfg] . identifier[encoding] )+ identifier[pcodeiv] . identifier[encode] ( identifier[self] . identifier[sascfg] . identifier[encoding] )
identifier[pgm] += identifier[code] . identifier[encode] ( identifier[self] . identifier[sascfg] . identifier[encoding] )+ literal[string] + identifier[pcodeo] . identifier[encode] ( identifier[self] . identifier[sascfg] . identifier[encoding] )+ literal[string] + identifier[mj]
identifier[out] = identifier[self] . identifier[stdin] . identifier[write] ( identifier[pgm] )
keyword[if] identifier[ods] :
identifier[self] . identifier[stdin] . identifier[write] ( identifier[odsclose] )
identifier[out] = identifier[self] . identifier[stdin] . identifier[write] ( literal[string] + identifier[logcodei] . identifier[encode] ( identifier[self] . identifier[sascfg] . identifier[encoding] )+ literal[string] )
identifier[self] . identifier[stdin] . identifier[flush] ()
keyword[while] keyword[not] identifier[done] :
keyword[try] :
keyword[while] keyword[True] :
identifier[rc] = identifier[os] . identifier[waitid] ( identifier[os] . identifier[P_PID] , identifier[self] . identifier[pid] , identifier[os] . identifier[WEXITED] | identifier[os] . identifier[WNOHANG] )
keyword[if] identifier[rc] keyword[is] keyword[not] keyword[None] :
identifier[log] = literal[string]
keyword[try] :
identifier[log] = identifier[self] . identifier[stderr] . identifier[read1] ( literal[int] )
keyword[if] identifier[len] ( identifier[log] )> literal[int] :
identifier[logf] += identifier[log]
identifier[self] . identifier[_log] += identifier[logf] . identifier[decode] ( identifier[self] . identifier[sascfg] . identifier[encoding] , identifier[errors] = literal[string] )
keyword[except] :
keyword[pass]
identifier[self] . identifier[pid] = keyword[None]
identifier[self] . identifier[_sb] . identifier[SASpid] = keyword[None]
keyword[return] identifier[dict] ( identifier[LOG] = literal[string] +
identifier[str] ( identifier[rc] )+ literal[string] + identifier[logf] . identifier[decode] ( identifier[self] . identifier[sascfg] . identifier[encoding] , identifier[errors] = literal[string] ), identifier[LST] = literal[string] )
keyword[if] identifier[bail] :
identifier[eof] -= literal[int]
keyword[if] identifier[eof] < literal[int] :
keyword[break]
keyword[if] identifier[ods] :
identifier[lst] = identifier[self] . identifier[stdout] . identifier[read1] ( literal[int] )
keyword[else] :
identifier[lst] = identifier[self] . identifier[stdout] . identifier[read1] ( literal[int] )
keyword[if] identifier[len] ( identifier[lst] )> literal[int] :
identifier[lstf] += identifier[lst]
keyword[else] :
identifier[log] = identifier[self] . identifier[stderr] . identifier[read1] ( literal[int] )
keyword[if] identifier[len] ( identifier[log] )> literal[int] :
identifier[logf] += identifier[log]
keyword[if] identifier[logf] . identifier[count] ( identifier[logcodeo] )>= literal[int] :
identifier[bail] = keyword[True]
keyword[if] keyword[not] identifier[bail] keyword[and] identifier[bc] :
identifier[self] . identifier[stdin] . identifier[write] ( identifier[odsclose] + identifier[logcodei] . identifier[encode] ( identifier[self] . identifier[sascfg] . identifier[encoding] )+ literal[string] )
identifier[self] . identifier[stdin] . identifier[flush] ()
identifier[bc] = keyword[False]
identifier[done] = keyword[True]
keyword[except] ( identifier[ConnectionResetError] ):
identifier[log] = literal[string]
keyword[try] :
identifier[log] = identifier[self] . identifier[stderr] . identifier[read1] ( literal[int] )
keyword[if] identifier[len] ( identifier[log] )> literal[int] :
identifier[logf] += identifier[log]
identifier[self] . identifier[_log] += identifier[logf] . identifier[decode] ( identifier[self] . identifier[sascfg] . identifier[encoding] , identifier[errors] = literal[string] )
keyword[except] :
keyword[pass]
identifier[rc] = literal[int]
identifier[rc] = identifier[os] . identifier[waitpid] ( identifier[self] . identifier[pid] , literal[int] )
identifier[self] . identifier[pid] = keyword[None]
identifier[self] . identifier[_sb] . identifier[SASpid] = keyword[None]
identifier[log] = identifier[logf] . identifier[partition] ( identifier[logcodeo] )[ literal[int] ]+ literal[string] + identifier[str] ( identifier[rc] ). identifier[encode] ()+ literal[string] + identifier[logf]
keyword[return] identifier[dict] ( identifier[LOG] = identifier[log] . identifier[encode] (), identifier[LST] = literal[string] )
keyword[except] ( identifier[KeyboardInterrupt] , identifier[SystemExit] ):
identifier[print] ( literal[string] )
identifier[ll] = identifier[self] . identifier[_breakprompt] ( identifier[logcodeo] )
keyword[if] identifier[ll] . identifier[get] ( literal[string] , keyword[False] ):
keyword[return] identifier[ll]
identifier[logf] += identifier[ll] [ literal[string] ]
identifier[lstf] += identifier[ll] [ literal[string] ]
identifier[bc] = identifier[ll] [ literal[string] ]
keyword[if] keyword[not] identifier[bc] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[print] ( literal[string] )
identifier[self] . identifier[stdin] . identifier[write] ( identifier[odsclose] + identifier[logcodei] . identifier[encode] ( identifier[self] . identifier[sascfg] . identifier[encoding] )+ literal[string] )
identifier[self] . identifier[stdin] . identifier[flush] ()
keyword[if] identifier[ods] :
keyword[try] :
identifier[lstf] = identifier[lstf] . identifier[decode] ()
keyword[except] identifier[UnicodeDecodeError] :
keyword[try] :
identifier[lstf] = identifier[lstf] . identifier[decode] ( identifier[self] . identifier[sascfg] . identifier[encoding] )
keyword[except] identifier[UnicodeDecodeError] :
identifier[lstf] = identifier[lstf] . identifier[decode] ( identifier[errors] = literal[string] )
keyword[else] :
identifier[lstf] = identifier[lstf] . identifier[decode] ( identifier[self] . identifier[sascfg] . identifier[encoding] , identifier[errors] = literal[string] )
identifier[logf] = identifier[logf] . identifier[decode] ( identifier[self] . identifier[sascfg] . identifier[encoding] , identifier[errors] = literal[string] )
identifier[trip] = identifier[lstf] . identifier[rpartition] ( literal[string] )
keyword[if] identifier[len] ( identifier[trip] [ literal[int] ])> literal[int] keyword[and] identifier[len] ( identifier[trip] [ literal[int] ])< literal[int] :
identifier[lstf] = literal[string]
identifier[self] . identifier[_log] += identifier[logf]
identifier[final] = identifier[logf] . identifier[partition] ( identifier[logcodei] )
identifier[z] = identifier[final] [ literal[int] ]. identifier[rpartition] ( identifier[chr] ( literal[int] ))
identifier[prev] = literal[string] %( identifier[self] . identifier[_log_cnt] - literal[int] )
identifier[zz] = identifier[z] [ literal[int] ]. identifier[rpartition] ( literal[string] + identifier[prev] + literal[string] )
identifier[logd] = identifier[zz] [ literal[int] ]. identifier[replace] ( identifier[mj] . identifier[decode] ( identifier[self] . identifier[sascfg] . identifier[encoding] ), literal[string] )
identifier[lstd] = identifier[lstf] . identifier[replace] ( identifier[chr] ( literal[int] ), identifier[chr] ( literal[int] )). identifier[replace] ( literal[string] ,
literal[string] ). identifier[replace] ( literal[string] ,
literal[string] )
keyword[return] identifier[dict] ( identifier[LOG] = identifier[logd] , identifier[LST] = identifier[lstd] ) | def submit(self, code: str, results: str='html', prompt: dict=None) -> dict:
'''
This method is used to submit any SAS code. It returns the Log and Listing as a python dictionary.
code - the SAS statements you want to execute
results - format of results, HTML is default, TEXT is the alternative
prompt - dict of names:flags to prompt for; create macro variables (used in submitted code), then keep or delete
The keys are the names of the macro variables and the boolean flag is to either hide what you type and delete
the macros, or show what you type and keep the macros (they will still be available later)
for example (what you type for pw will not be displayed, user and dsname will):
results = sas.submit(
"""
libname tera teradata server=teracop1 user=&user pw=&pw;
proc print data=tera.&dsname (obs=10); run;
""" ,
prompt = {'user': False, 'pw': True, 'dsname': False}
)
Returns - a Dict containing two keys:values, [LOG, LST]. LOG is text and LST is 'results' (HTML or TEXT)
NOTE: to view HTML results in the ipykernel, issue: from IPython.display import HTML and use HTML() instead of print()
i.e,: results = sas.submit("data a; x=1; run; proc print;run')
print(results['LOG'])
HTML(results['LST'])
'''
prompt = prompt if prompt is not None else {}
odsopen = b'ods listing close;ods ' + self.sascfg.output.encode() + b" (id=saspy_internal) file=stdout options(bitmap_mode='inline') device=svg style=" + self._sb.HTML_Style.encode() + b'; ods graphics on / outputfmt=png;\n'
odsclose = b'ods ' + self.sascfg.output.encode() + b' (id=saspy_internal) close;ods listing;\n'
ods = True
mj = b';*\';*";*/;'
lstf = b''
logf = b''
bail = False
eof = 5
bc = False
done = False
logn = self._logcnt()
logcodei = '%put E3969440A681A24088859985' + logn + ';'
logcodeo = b'\nE3969440A681A24088859985' + logn.encode()
pcodei = ''
pcodeiv = ''
pcodeo = ''
if self.pid == None:
self._sb.SASpid = None
print('No SAS process attached. SAS process has terminated unexpectedly.')
return dict(LOG='No SAS process attached. SAS process has terminated unexpectedly.', LST='') # depends on [control=['if'], data=[]]
rc = os.waitid(os.P_PID, self.pid, os.WEXITED | os.WNOHANG)
if rc != None:
self.pid = None
self._sb.SASpid = None
return dict(LOG='SAS process has terminated unexpectedly. Pid State= ' + str(rc), LST='') # depends on [control=['if'], data=['rc']]
# to cover the possibility of an _asubmit w/ lst output not read; no known cases now; used to be __flushlst__()
# removing this and adding comment in _asubmit to use _getlst[txt] so this will never be necessary; delete later
#while(len(self.stdout.read1(4096)) > 0):
# continue
if results.upper() != 'HTML':
ods = False # depends on [control=['if'], data=[]]
if len(prompt):
pcodei += 'options nosource nonotes;\n'
pcodeo += 'options nosource nonotes;\n'
for key in prompt:
gotit = False
while not gotit:
var = self.sascfg._prompt('Please enter value for macro variable ' + key + ' ', pw=prompt[key])
if var is None:
raise KeyboardInterrupt # depends on [control=['if'], data=[]]
if len(var) > 0:
gotit = True # depends on [control=['if'], data=[]]
else:
print("Sorry, didn't get a value for that variable.") # depends on [control=['while'], data=[]]
if prompt[key]:
pcodei += '%let ' + key + '=' + var + ';\n'
pcodeo += '%symdel ' + key + ';\n' # depends on [control=['if'], data=[]]
else:
pcodeiv += '%let ' + key + '=' + var + ';\n' # depends on [control=['for'], data=['key']]
pcodei += 'options source notes;\n'
pcodeo += 'options source notes;\n' # depends on [control=['if'], data=[]]
if ods:
self.stdin.write(odsopen) # depends on [control=['if'], data=[]]
pgm = mj + b'\n' + pcodei.encode(self.sascfg.encoding) + pcodeiv.encode(self.sascfg.encoding)
pgm += code.encode(self.sascfg.encoding) + b'\n' + pcodeo.encode(self.sascfg.encoding) + b'\n' + mj
out = self.stdin.write(pgm)
if ods:
self.stdin.write(odsclose) # depends on [control=['if'], data=[]]
out = self.stdin.write(b'\n' + logcodei.encode(self.sascfg.encoding) + b'\n')
self.stdin.flush()
while not done:
try:
while True:
rc = os.waitid(os.P_PID, self.pid, os.WEXITED | os.WNOHANG)
if rc is not None:
log = b''
try:
log = self.stderr.read1(4096)
if len(log) > 0:
logf += log # depends on [control=['if'], data=[]]
self._log += logf.decode(self.sascfg.encoding, errors='replace') # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
self.pid = None
self._sb.SASpid = None
return dict(LOG='SAS process has terminated unexpectedly. Pid State= ' + str(rc) + '\n' + logf.decode(self.sascfg.encoding, errors='replace'), LST='') # depends on [control=['if'], data=['rc']]
if bail:
eof -= 1 # depends on [control=['if'], data=[]]
if eof < 0:
break # depends on [control=['if'], data=[]]
if ods:
lst = self.stdout.read1(4096) # depends on [control=['if'], data=[]]
else:
lst = self.stdout.read1(4096)
if len(lst) > 0:
lstf += lst # depends on [control=['if'], data=[]]
else:
log = self.stderr.read1(4096)
if len(log) > 0:
logf += log
if logf.count(logcodeo) >= 1:
bail = True # depends on [control=['if'], data=[]]
if not bail and bc:
self.stdin.write(odsclose + logcodei.encode(self.sascfg.encoding) + b'\n')
self.stdin.flush()
bc = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
done = True # depends on [control=['try'], data=[]]
except ConnectionResetError:
log = ''
try:
log = self.stderr.read1(4096)
if len(log) > 0:
logf += log # depends on [control=['if'], data=[]]
self._log += logf.decode(self.sascfg.encoding, errors='replace') # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
rc = 0
rc = os.waitpid(self.pid, 0)
self.pid = None
self._sb.SASpid = None
log = logf.partition(logcodeo)[0] + b'\nConnection Reset: SAS process has terminated unexpectedly. Pid State= ' + str(rc).encode() + b'\n' + logf
return dict(LOG=log.encode(), LST='') # depends on [control=['except'], data=[]]
except (KeyboardInterrupt, SystemExit):
print('Exception caught!')
ll = self._breakprompt(logcodeo)
if ll.get('ABORT', False):
return ll # depends on [control=['if'], data=[]]
logf += ll['LOG']
lstf += ll['LST']
bc = ll['BC']
if not bc:
print('Exception handled :)\n') # depends on [control=['if'], data=[]]
else:
print('Exception ignored, continuing to process...\n')
self.stdin.write(odsclose + logcodei.encode(self.sascfg.encoding) + b'\n')
self.stdin.flush() # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
if ods:
try:
lstf = lstf.decode() # depends on [control=['try'], data=[]]
except UnicodeDecodeError:
try:
lstf = lstf.decode(self.sascfg.encoding) # depends on [control=['try'], data=[]]
except UnicodeDecodeError:
lstf = lstf.decode(errors='replace') # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
lstf = lstf.decode(self.sascfg.encoding, errors='replace')
logf = logf.decode(self.sascfg.encoding, errors='replace')
trip = lstf.rpartition('/*]]>*/')
if len(trip[1]) > 0 and len(trip[2]) < 100:
lstf = '' # depends on [control=['if'], data=[]]
self._log += logf
final = logf.partition(logcodei)
z = final[0].rpartition(chr(10))
prev = '%08d' % (self._log_cnt - 1)
zz = z[0].rpartition('\nE3969440A681A24088859985' + prev + '\n')
logd = zz[2].replace(mj.decode(self.sascfg.encoding), '')
lstd = lstf.replace(chr(12), chr(10)).replace('<body class="c body">', '<body class="l body">').replace('font-size: x-small;', 'font-size: normal;')
return dict(LOG=logd, LST=lstd) |
def join_dags(self, names=None):
""" Wait for the specified dags to terminate.
This function blocks until the specified dags terminate. If no dags are specified
wait for all dags of the workflow, except the dag of the task calling this signal,
to terminate.
Args:
names (list): The names of the dags that have to terminate.
Returns:
bool: True if all the signal was sent successfully.
"""
return self._client.send(
Request(
action='join_dags',
payload={'names': names}
)
).success | def function[join_dags, parameter[self, names]]:
constant[ Wait for the specified dags to terminate.
This function blocks until the specified dags terminate. If no dags are specified
wait for all dags of the workflow, except the dag of the task calling this signal,
to terminate.
Args:
names (list): The names of the dags that have to terminate.
Returns:
bool: True if all the signal was sent successfully.
]
return[call[name[self]._client.send, parameter[call[name[Request], parameter[]]]].success] | keyword[def] identifier[join_dags] ( identifier[self] , identifier[names] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_client] . identifier[send] (
identifier[Request] (
identifier[action] = literal[string] ,
identifier[payload] ={ literal[string] : identifier[names] }
)
). identifier[success] | def join_dags(self, names=None):
""" Wait for the specified dags to terminate.
This function blocks until the specified dags terminate. If no dags are specified
wait for all dags of the workflow, except the dag of the task calling this signal,
to terminate.
Args:
names (list): The names of the dags that have to terminate.
Returns:
bool: True if all the signal was sent successfully.
"""
return self._client.send(Request(action='join_dags', payload={'names': names})).success |
def _read_csv_table(path):
"""Lee un CSV a una lista de diccionarios."""
with open(path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
table = list(reader)
return table | def function[_read_csv_table, parameter[path]]:
constant[Lee un CSV a una lista de diccionarios.]
with call[name[open], parameter[name[path], constant[rb]]] begin[:]
variable[reader] assign[=] call[name[csv].DictReader, parameter[name[csvfile]]]
variable[table] assign[=] call[name[list], parameter[name[reader]]]
return[name[table]] | keyword[def] identifier[_read_csv_table] ( identifier[path] ):
literal[string]
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[csvfile] :
identifier[reader] = identifier[csv] . identifier[DictReader] ( identifier[csvfile] )
identifier[table] = identifier[list] ( identifier[reader] )
keyword[return] identifier[table] | def _read_csv_table(path):
"""Lee un CSV a una lista de diccionarios."""
with open(path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
table = list(reader) # depends on [control=['with'], data=['csvfile']]
return table |
def _ExtractOAuth2Client(product_yaml_key, product_data, proxy_config):
"""Generates an GoogleOAuth2Client subclass using the given product_data.
Args:
product_yaml_key: a string key identifying the product being configured.
product_data: a dict containing the configurations for a given product.
proxy_config: a ProxyConfig instance.
Returns:
An instantiated GoogleOAuth2Client subclass.
Raises:
A GoogleAdsValueError if the OAuth2 configuration for the given product is
misconfigured.
"""
oauth2_kwargs = {
'proxy_config': proxy_config
}
if all(config in product_data for config in _OAUTH2_INSTALLED_APP_KEYS):
oauth2_args = [
product_data['client_id'], product_data['client_secret'],
product_data['refresh_token']
]
oauth2_client = googleads.oauth2.GoogleRefreshTokenClient
for key in _OAUTH2_INSTALLED_APP_KEYS:
del product_data[key]
elif all(config in product_data for config in _OAUTH2_SERVICE_ACCT_KEYS):
oauth2_args = [
product_data['path_to_private_key_file'],
googleads.oauth2.GetAPIScope(product_yaml_key),
]
oauth2_kwargs.update({
'sub': product_data.get('delegated_account')
})
oauth2_client = googleads.oauth2.GoogleServiceAccountClient
for key in _OAUTH2_SERVICE_ACCT_KEYS:
del product_data[key]
for optional_key in _OAUTH2_SERVICE_ACCT_KEYS_OPTIONAL:
if optional_key in product_data:
del product_data[optional_key]
else:
raise googleads.errors.GoogleAdsValueError(
'Your yaml file is incorrectly configured for OAuth2. You need to '
'specify credentials for either the installed application flow (%s) '
'or service account flow (%s).' %
(_OAUTH2_INSTALLED_APP_KEYS, _OAUTH2_SERVICE_ACCT_KEYS))
return oauth2_client(*oauth2_args, **oauth2_kwargs) | def function[_ExtractOAuth2Client, parameter[product_yaml_key, product_data, proxy_config]]:
constant[Generates an GoogleOAuth2Client subclass using the given product_data.
Args:
product_yaml_key: a string key identifying the product being configured.
product_data: a dict containing the configurations for a given product.
proxy_config: a ProxyConfig instance.
Returns:
An instantiated GoogleOAuth2Client subclass.
Raises:
A GoogleAdsValueError if the OAuth2 configuration for the given product is
misconfigured.
]
variable[oauth2_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b1bbf640>], [<ast.Name object at 0x7da1b1bbfee0>]]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b1bbf340>]] begin[:]
variable[oauth2_args] assign[=] list[[<ast.Subscript object at 0x7da1b1bbe230>, <ast.Subscript object at 0x7da1b1bbf220>, <ast.Subscript object at 0x7da1b1bbeb00>]]
variable[oauth2_client] assign[=] name[googleads].oauth2.GoogleRefreshTokenClient
for taget[name[key]] in starred[name[_OAUTH2_INSTALLED_APP_KEYS]] begin[:]
<ast.Delete object at 0x7da1b1bbc0a0>
return[call[name[oauth2_client], parameter[<ast.Starred object at 0x7da1b1bbc4f0>]]] | keyword[def] identifier[_ExtractOAuth2Client] ( identifier[product_yaml_key] , identifier[product_data] , identifier[proxy_config] ):
literal[string]
identifier[oauth2_kwargs] ={
literal[string] : identifier[proxy_config]
}
keyword[if] identifier[all] ( identifier[config] keyword[in] identifier[product_data] keyword[for] identifier[config] keyword[in] identifier[_OAUTH2_INSTALLED_APP_KEYS] ):
identifier[oauth2_args] =[
identifier[product_data] [ literal[string] ], identifier[product_data] [ literal[string] ],
identifier[product_data] [ literal[string] ]
]
identifier[oauth2_client] = identifier[googleads] . identifier[oauth2] . identifier[GoogleRefreshTokenClient]
keyword[for] identifier[key] keyword[in] identifier[_OAUTH2_INSTALLED_APP_KEYS] :
keyword[del] identifier[product_data] [ identifier[key] ]
keyword[elif] identifier[all] ( identifier[config] keyword[in] identifier[product_data] keyword[for] identifier[config] keyword[in] identifier[_OAUTH2_SERVICE_ACCT_KEYS] ):
identifier[oauth2_args] =[
identifier[product_data] [ literal[string] ],
identifier[googleads] . identifier[oauth2] . identifier[GetAPIScope] ( identifier[product_yaml_key] ),
]
identifier[oauth2_kwargs] . identifier[update] ({
literal[string] : identifier[product_data] . identifier[get] ( literal[string] )
})
identifier[oauth2_client] = identifier[googleads] . identifier[oauth2] . identifier[GoogleServiceAccountClient]
keyword[for] identifier[key] keyword[in] identifier[_OAUTH2_SERVICE_ACCT_KEYS] :
keyword[del] identifier[product_data] [ identifier[key] ]
keyword[for] identifier[optional_key] keyword[in] identifier[_OAUTH2_SERVICE_ACCT_KEYS_OPTIONAL] :
keyword[if] identifier[optional_key] keyword[in] identifier[product_data] :
keyword[del] identifier[product_data] [ identifier[optional_key] ]
keyword[else] :
keyword[raise] identifier[googleads] . identifier[errors] . identifier[GoogleAdsValueError] (
literal[string]
literal[string]
literal[string] %
( identifier[_OAUTH2_INSTALLED_APP_KEYS] , identifier[_OAUTH2_SERVICE_ACCT_KEYS] ))
keyword[return] identifier[oauth2_client] (* identifier[oauth2_args] ,** identifier[oauth2_kwargs] ) | def _ExtractOAuth2Client(product_yaml_key, product_data, proxy_config):
"""Generates an GoogleOAuth2Client subclass using the given product_data.
Args:
product_yaml_key: a string key identifying the product being configured.
product_data: a dict containing the configurations for a given product.
proxy_config: a ProxyConfig instance.
Returns:
An instantiated GoogleOAuth2Client subclass.
Raises:
A GoogleAdsValueError if the OAuth2 configuration for the given product is
misconfigured.
"""
oauth2_kwargs = {'proxy_config': proxy_config}
if all((config in product_data for config in _OAUTH2_INSTALLED_APP_KEYS)):
oauth2_args = [product_data['client_id'], product_data['client_secret'], product_data['refresh_token']]
oauth2_client = googleads.oauth2.GoogleRefreshTokenClient
for key in _OAUTH2_INSTALLED_APP_KEYS:
del product_data[key] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
elif all((config in product_data for config in _OAUTH2_SERVICE_ACCT_KEYS)):
oauth2_args = [product_data['path_to_private_key_file'], googleads.oauth2.GetAPIScope(product_yaml_key)]
oauth2_kwargs.update({'sub': product_data.get('delegated_account')})
oauth2_client = googleads.oauth2.GoogleServiceAccountClient
for key in _OAUTH2_SERVICE_ACCT_KEYS:
del product_data[key] # depends on [control=['for'], data=['key']]
for optional_key in _OAUTH2_SERVICE_ACCT_KEYS_OPTIONAL:
if optional_key in product_data:
del product_data[optional_key] # depends on [control=['if'], data=['optional_key', 'product_data']] # depends on [control=['for'], data=['optional_key']] # depends on [control=['if'], data=[]]
else:
raise googleads.errors.GoogleAdsValueError('Your yaml file is incorrectly configured for OAuth2. You need to specify credentials for either the installed application flow (%s) or service account flow (%s).' % (_OAUTH2_INSTALLED_APP_KEYS, _OAUTH2_SERVICE_ACCT_KEYS))
return oauth2_client(*oauth2_args, **oauth2_kwargs) |
def _get_changed_diff(self, blueprint, schema):
"""
Get the table diffrence for the given changes.
:param blueprint: The blueprint
:type blueprint: Blueprint
:param schema: The schema
:type schema: orator.dbal.SchemaManager
:rtype: orator.dbal.TableDiff
"""
table = schema.list_table_details(
self.get_table_prefix() + blueprint.get_table()
)
return Comparator().diff_table(
table, self._get_table_with_column_changes(blueprint, table)
) | def function[_get_changed_diff, parameter[self, blueprint, schema]]:
constant[
Get the table diffrence for the given changes.
:param blueprint: The blueprint
:type blueprint: Blueprint
:param schema: The schema
:type schema: orator.dbal.SchemaManager
:rtype: orator.dbal.TableDiff
]
variable[table] assign[=] call[name[schema].list_table_details, parameter[binary_operation[call[name[self].get_table_prefix, parameter[]] + call[name[blueprint].get_table, parameter[]]]]]
return[call[call[name[Comparator], parameter[]].diff_table, parameter[name[table], call[name[self]._get_table_with_column_changes, parameter[name[blueprint], name[table]]]]]] | keyword[def] identifier[_get_changed_diff] ( identifier[self] , identifier[blueprint] , identifier[schema] ):
literal[string]
identifier[table] = identifier[schema] . identifier[list_table_details] (
identifier[self] . identifier[get_table_prefix] ()+ identifier[blueprint] . identifier[get_table] ()
)
keyword[return] identifier[Comparator] (). identifier[diff_table] (
identifier[table] , identifier[self] . identifier[_get_table_with_column_changes] ( identifier[blueprint] , identifier[table] )
) | def _get_changed_diff(self, blueprint, schema):
"""
Get the table diffrence for the given changes.
:param blueprint: The blueprint
:type blueprint: Blueprint
:param schema: The schema
:type schema: orator.dbal.SchemaManager
:rtype: orator.dbal.TableDiff
"""
table = schema.list_table_details(self.get_table_prefix() + blueprint.get_table())
return Comparator().diff_table(table, self._get_table_with_column_changes(blueprint, table)) |
def to_text(self, omit_final_dot = False):
"""Convert name to text format.
@param omit_final_dot: If True, don't emit the final dot (denoting the
root label) for absolute names. The default is False.
@rtype: string
"""
if len(self.labels) == 0:
return '@'
if len(self.labels) == 1 and self.labels[0] == '':
return '.'
if omit_final_dot and self.is_absolute():
l = self.labels[:-1]
else:
l = self.labels
s = '.'.join(map(_escapify, l))
return s | def function[to_text, parameter[self, omit_final_dot]]:
constant[Convert name to text format.
@param omit_final_dot: If True, don't emit the final dot (denoting the
root label) for absolute names. The default is False.
@rtype: string
]
if compare[call[name[len], parameter[name[self].labels]] equal[==] constant[0]] begin[:]
return[constant[@]]
if <ast.BoolOp object at 0x7da18f7231f0> begin[:]
return[constant[.]]
if <ast.BoolOp object at 0x7da18f7230d0> begin[:]
variable[l] assign[=] call[name[self].labels][<ast.Slice object at 0x7da18f721870>]
variable[s] assign[=] call[constant[.].join, parameter[call[name[map], parameter[name[_escapify], name[l]]]]]
return[name[s]] | keyword[def] identifier[to_text] ( identifier[self] , identifier[omit_final_dot] = keyword[False] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[labels] )== literal[int] :
keyword[return] literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[labels] )== literal[int] keyword[and] identifier[self] . identifier[labels] [ literal[int] ]== literal[string] :
keyword[return] literal[string]
keyword[if] identifier[omit_final_dot] keyword[and] identifier[self] . identifier[is_absolute] ():
identifier[l] = identifier[self] . identifier[labels] [:- literal[int] ]
keyword[else] :
identifier[l] = identifier[self] . identifier[labels]
identifier[s] = literal[string] . identifier[join] ( identifier[map] ( identifier[_escapify] , identifier[l] ))
keyword[return] identifier[s] | def to_text(self, omit_final_dot=False):
"""Convert name to text format.
@param omit_final_dot: If True, don't emit the final dot (denoting the
root label) for absolute names. The default is False.
@rtype: string
"""
if len(self.labels) == 0:
return '@' # depends on [control=['if'], data=[]]
if len(self.labels) == 1 and self.labels[0] == '':
return '.' # depends on [control=['if'], data=[]]
if omit_final_dot and self.is_absolute():
l = self.labels[:-1] # depends on [control=['if'], data=[]]
else:
l = self.labels
s = '.'.join(map(_escapify, l))
return s |
def fsn2text(path, strict=False):
"""
Args:
path (fsnative): The path to convert
strict (bool): Fail in case the conversion is not reversible
Returns:
`text`
Raises:
TypeError: In case no `fsnative` has been passed
ValueError: In case ``strict`` was True and the conversion failed
Converts a `fsnative` path to `text`.
Can be used to pass a path to some unicode API, like for example a GUI
toolkit.
If ``strict`` is True the conversion will fail in case it is not
reversible. This can be useful for converting program arguments that are
supposed to be text and erroring out in case they are not.
Encoding with a Unicode encoding will always succeed with the result.
"""
path = _fsn2native(path)
errors = "strict" if strict else "replace"
if is_win:
return path.encode("utf-16-le", _surrogatepass).decode("utf-16-le",
errors)
else:
return path.decode(_encoding, errors) | def function[fsn2text, parameter[path, strict]]:
constant[
Args:
path (fsnative): The path to convert
strict (bool): Fail in case the conversion is not reversible
Returns:
`text`
Raises:
TypeError: In case no `fsnative` has been passed
ValueError: In case ``strict`` was True and the conversion failed
Converts a `fsnative` path to `text`.
Can be used to pass a path to some unicode API, like for example a GUI
toolkit.
If ``strict`` is True the conversion will fail in case it is not
reversible. This can be useful for converting program arguments that are
supposed to be text and erroring out in case they are not.
Encoding with a Unicode encoding will always succeed with the result.
]
variable[path] assign[=] call[name[_fsn2native], parameter[name[path]]]
variable[errors] assign[=] <ast.IfExp object at 0x7da1b20f8520>
if name[is_win] begin[:]
return[call[call[name[path].encode, parameter[constant[utf-16-le], name[_surrogatepass]]].decode, parameter[constant[utf-16-le], name[errors]]]] | keyword[def] identifier[fsn2text] ( identifier[path] , identifier[strict] = keyword[False] ):
literal[string]
identifier[path] = identifier[_fsn2native] ( identifier[path] )
identifier[errors] = literal[string] keyword[if] identifier[strict] keyword[else] literal[string]
keyword[if] identifier[is_win] :
keyword[return] identifier[path] . identifier[encode] ( literal[string] , identifier[_surrogatepass] ). identifier[decode] ( literal[string] ,
identifier[errors] )
keyword[else] :
keyword[return] identifier[path] . identifier[decode] ( identifier[_encoding] , identifier[errors] ) | def fsn2text(path, strict=False):
"""
Args:
path (fsnative): The path to convert
strict (bool): Fail in case the conversion is not reversible
Returns:
`text`
Raises:
TypeError: In case no `fsnative` has been passed
ValueError: In case ``strict`` was True and the conversion failed
Converts a `fsnative` path to `text`.
Can be used to pass a path to some unicode API, like for example a GUI
toolkit.
If ``strict`` is True the conversion will fail in case it is not
reversible. This can be useful for converting program arguments that are
supposed to be text and erroring out in case they are not.
Encoding with a Unicode encoding will always succeed with the result.
"""
path = _fsn2native(path)
errors = 'strict' if strict else 'replace'
if is_win:
return path.encode('utf-16-le', _surrogatepass).decode('utf-16-le', errors) # depends on [control=['if'], data=[]]
else:
return path.decode(_encoding, errors) |
def get_diff_idxs(array, rtol, atol):
"""
Given an array with (C, N, L) values, being the first the reference value,
compute the relative differences and discard the one below the tolerance.
:returns: indices where there are sensible differences.
"""
C, N, L = array.shape
diff_idxs = set() # indices of the sites with differences
for c in range(1, C):
for n in range(N):
if not numpy.allclose(array[c, n], array[0, n], rtol, atol):
diff_idxs.add(n)
return numpy.fromiter(diff_idxs, int) | def function[get_diff_idxs, parameter[array, rtol, atol]]:
constant[
Given an array with (C, N, L) values, being the first the reference value,
compute the relative differences and discard the one below the tolerance.
:returns: indices where there are sensible differences.
]
<ast.Tuple object at 0x7da207f997e0> assign[=] name[array].shape
variable[diff_idxs] assign[=] call[name[set], parameter[]]
for taget[name[c]] in starred[call[name[range], parameter[constant[1], name[C]]]] begin[:]
for taget[name[n]] in starred[call[name[range], parameter[name[N]]]] begin[:]
if <ast.UnaryOp object at 0x7da18ede5360> begin[:]
call[name[diff_idxs].add, parameter[name[n]]]
return[call[name[numpy].fromiter, parameter[name[diff_idxs], name[int]]]] | keyword[def] identifier[get_diff_idxs] ( identifier[array] , identifier[rtol] , identifier[atol] ):
literal[string]
identifier[C] , identifier[N] , identifier[L] = identifier[array] . identifier[shape]
identifier[diff_idxs] = identifier[set] ()
keyword[for] identifier[c] keyword[in] identifier[range] ( literal[int] , identifier[C] ):
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[N] ):
keyword[if] keyword[not] identifier[numpy] . identifier[allclose] ( identifier[array] [ identifier[c] , identifier[n] ], identifier[array] [ literal[int] , identifier[n] ], identifier[rtol] , identifier[atol] ):
identifier[diff_idxs] . identifier[add] ( identifier[n] )
keyword[return] identifier[numpy] . identifier[fromiter] ( identifier[diff_idxs] , identifier[int] ) | def get_diff_idxs(array, rtol, atol):
"""
Given an array with (C, N, L) values, being the first the reference value,
compute the relative differences and discard the one below the tolerance.
:returns: indices where there are sensible differences.
"""
(C, N, L) = array.shape
diff_idxs = set() # indices of the sites with differences
for c in range(1, C):
for n in range(N):
if not numpy.allclose(array[c, n], array[0, n], rtol, atol):
diff_idxs.add(n) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']] # depends on [control=['for'], data=['c']]
return numpy.fromiter(diff_idxs, int) |
def search_disk_size(self, search_index):
"""
Retrieves disk size information about a specified search index within
the design document, returns dictionary
GET databasename/_design/{ddoc}/_search_disk_size/{search_index}
"""
ddoc_search_disk_size = self.r_session.get(
'/'.join([self.document_url, '_search_disk_size', search_index]))
ddoc_search_disk_size.raise_for_status()
return response_to_json_dict(ddoc_search_disk_size) | def function[search_disk_size, parameter[self, search_index]]:
constant[
Retrieves disk size information about a specified search index within
the design document, returns dictionary
GET databasename/_design/{ddoc}/_search_disk_size/{search_index}
]
variable[ddoc_search_disk_size] assign[=] call[name[self].r_session.get, parameter[call[constant[/].join, parameter[list[[<ast.Attribute object at 0x7da18fe92050>, <ast.Constant object at 0x7da18fe92f50>, <ast.Name object at 0x7da18fe92950>]]]]]]
call[name[ddoc_search_disk_size].raise_for_status, parameter[]]
return[call[name[response_to_json_dict], parameter[name[ddoc_search_disk_size]]]] | keyword[def] identifier[search_disk_size] ( identifier[self] , identifier[search_index] ):
literal[string]
identifier[ddoc_search_disk_size] = identifier[self] . identifier[r_session] . identifier[get] (
literal[string] . identifier[join] ([ identifier[self] . identifier[document_url] , literal[string] , identifier[search_index] ]))
identifier[ddoc_search_disk_size] . identifier[raise_for_status] ()
keyword[return] identifier[response_to_json_dict] ( identifier[ddoc_search_disk_size] ) | def search_disk_size(self, search_index):
"""
Retrieves disk size information about a specified search index within
the design document, returns dictionary
GET databasename/_design/{ddoc}/_search_disk_size/{search_index}
"""
ddoc_search_disk_size = self.r_session.get('/'.join([self.document_url, '_search_disk_size', search_index]))
ddoc_search_disk_size.raise_for_status()
return response_to_json_dict(ddoc_search_disk_size) |
def concatenated(fp):
"""Read lines from fp concatenating on backslash (\\)"""
line_parts = []
for line in fp:
line = line.strip()
if line.endswith('\\'):
line_parts.append(line[:-1].rstrip())
else:
line_parts.append(line)
yield ' '.join(line_parts)
line_parts[:] = []
if line_parts:
# Impossible:
raise RuntimeError("Compiled file ends with backslash \\") | def function[concatenated, parameter[fp]]:
constant[Read lines from fp concatenating on backslash (\)]
variable[line_parts] assign[=] list[[]]
for taget[name[line]] in starred[name[fp]] begin[:]
variable[line] assign[=] call[name[line].strip, parameter[]]
if call[name[line].endswith, parameter[constant[\]]] begin[:]
call[name[line_parts].append, parameter[call[call[name[line]][<ast.Slice object at 0x7da20c76f580>].rstrip, parameter[]]]]
if name[line_parts] begin[:]
<ast.Raise object at 0x7da18bccaa10> | keyword[def] identifier[concatenated] ( identifier[fp] ):
literal[string]
identifier[line_parts] =[]
keyword[for] identifier[line] keyword[in] identifier[fp] :
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[line] . identifier[endswith] ( literal[string] ):
identifier[line_parts] . identifier[append] ( identifier[line] [:- literal[int] ]. identifier[rstrip] ())
keyword[else] :
identifier[line_parts] . identifier[append] ( identifier[line] )
keyword[yield] literal[string] . identifier[join] ( identifier[line_parts] )
identifier[line_parts] [:]=[]
keyword[if] identifier[line_parts] :
keyword[raise] identifier[RuntimeError] ( literal[string] ) | def concatenated(fp):
"""Read lines from fp concatenating on backslash (\\)"""
line_parts = []
for line in fp:
line = line.strip()
if line.endswith('\\'):
line_parts.append(line[:-1].rstrip()) # depends on [control=['if'], data=[]]
else:
line_parts.append(line)
yield ' '.join(line_parts)
line_parts[:] = [] # depends on [control=['for'], data=['line']]
if line_parts:
# Impossible:
raise RuntimeError('Compiled file ends with backslash \\') # depends on [control=['if'], data=[]] |
def extendMarkdown(self, md, md_globals=None):
"""Initializes markdown extension components."""
if any(
x not in md.treeprocessors
for x in self.REQUIRED_EXTENSION_INTERNAL_NAMES):
raise RuntimeError(
"The attr_cols markdown extension depends the following"
" extensions which must preceded it in the extension"
" list: %s" % ", ".join(self.REQUIRED_EXTENSIONS))
processor = AttrColTreeProcessor(md, self.conf)
md.treeprocessors.register(
processor, 'attr_cols',
5) | def function[extendMarkdown, parameter[self, md, md_globals]]:
constant[Initializes markdown extension components.]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b1642b90>]] begin[:]
<ast.Raise object at 0x7da1b1642800>
variable[processor] assign[=] call[name[AttrColTreeProcessor], parameter[name[md], name[self].conf]]
call[name[md].treeprocessors.register, parameter[name[processor], constant[attr_cols], constant[5]]] | keyword[def] identifier[extendMarkdown] ( identifier[self] , identifier[md] , identifier[md_globals] = keyword[None] ):
literal[string]
keyword[if] identifier[any] (
identifier[x] keyword[not] keyword[in] identifier[md] . identifier[treeprocessors]
keyword[for] identifier[x] keyword[in] identifier[self] . identifier[REQUIRED_EXTENSION_INTERNAL_NAMES] ):
keyword[raise] identifier[RuntimeError] (
literal[string]
literal[string]
literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[REQUIRED_EXTENSIONS] ))
identifier[processor] = identifier[AttrColTreeProcessor] ( identifier[md] , identifier[self] . identifier[conf] )
identifier[md] . identifier[treeprocessors] . identifier[register] (
identifier[processor] , literal[string] ,
literal[int] ) | def extendMarkdown(self, md, md_globals=None):
"""Initializes markdown extension components."""
if any((x not in md.treeprocessors for x in self.REQUIRED_EXTENSION_INTERNAL_NAMES)):
raise RuntimeError('The attr_cols markdown extension depends the following extensions which must preceded it in the extension list: %s' % ', '.join(self.REQUIRED_EXTENSIONS)) # depends on [control=['if'], data=[]]
processor = AttrColTreeProcessor(md, self.conf)
md.treeprocessors.register(processor, 'attr_cols', 5) |
def load_partition_data(self, index):
"""
Load and return the partition with the given index.
Args:
index (int): The index of partition, that refers to the index in ``self.partitions``.
Returns:
PartitionData: A PartitionData object containing the data for the partition with the given index.
"""
info = self.partitions[index]
data = PartitionData(info)
for utt_id in info.utt_ids:
utt_data = [c._file[utt_id][:] for c in self.containers]
data.utt_data.append(utt_data)
return data | def function[load_partition_data, parameter[self, index]]:
constant[
Load and return the partition with the given index.
Args:
index (int): The index of partition, that refers to the index in ``self.partitions``.
Returns:
PartitionData: A PartitionData object containing the data for the partition with the given index.
]
variable[info] assign[=] call[name[self].partitions][name[index]]
variable[data] assign[=] call[name[PartitionData], parameter[name[info]]]
for taget[name[utt_id]] in starred[name[info].utt_ids] begin[:]
variable[utt_data] assign[=] <ast.ListComp object at 0x7da1b0e24e20>
call[name[data].utt_data.append, parameter[name[utt_data]]]
return[name[data]] | keyword[def] identifier[load_partition_data] ( identifier[self] , identifier[index] ):
literal[string]
identifier[info] = identifier[self] . identifier[partitions] [ identifier[index] ]
identifier[data] = identifier[PartitionData] ( identifier[info] )
keyword[for] identifier[utt_id] keyword[in] identifier[info] . identifier[utt_ids] :
identifier[utt_data] =[ identifier[c] . identifier[_file] [ identifier[utt_id] ][:] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[containers] ]
identifier[data] . identifier[utt_data] . identifier[append] ( identifier[utt_data] )
keyword[return] identifier[data] | def load_partition_data(self, index):
"""
Load and return the partition with the given index.
Args:
index (int): The index of partition, that refers to the index in ``self.partitions``.
Returns:
PartitionData: A PartitionData object containing the data for the partition with the given index.
"""
info = self.partitions[index]
data = PartitionData(info)
for utt_id in info.utt_ids:
utt_data = [c._file[utt_id][:] for c in self.containers]
data.utt_data.append(utt_data) # depends on [control=['for'], data=['utt_id']]
return data |
def get(self, section, option, *args):
"""Get option value from section. If an option is secure,
populates the plain text."""
if self.is_secure_option(section, option) and self.keyring_available:
s_option = "%s%s" % (section, option)
if self._unsaved.get(s_option, [''])[0] == 'set':
res = self._unsaved[s_option][1]
else:
res = keyring.get_password(self.keyring_name, s_option)
else:
res = ConfigParser.get(self, section, option, *args)
if res == '!!False!!':
return False
return res | def function[get, parameter[self, section, option]]:
constant[Get option value from section. If an option is secure,
populates the plain text.]
if <ast.BoolOp object at 0x7da18dc07250> begin[:]
variable[s_option] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc063e0>, <ast.Name object at 0x7da18dc04d00>]]]
if compare[call[call[name[self]._unsaved.get, parameter[name[s_option], list[[<ast.Constant object at 0x7da18dc06290>]]]]][constant[0]] equal[==] constant[set]] begin[:]
variable[res] assign[=] call[call[name[self]._unsaved][name[s_option]]][constant[1]]
if compare[name[res] equal[==] constant[!!False!!]] begin[:]
return[constant[False]]
return[name[res]] | keyword[def] identifier[get] ( identifier[self] , identifier[section] , identifier[option] ,* identifier[args] ):
literal[string]
keyword[if] identifier[self] . identifier[is_secure_option] ( identifier[section] , identifier[option] ) keyword[and] identifier[self] . identifier[keyring_available] :
identifier[s_option] = literal[string] %( identifier[section] , identifier[option] )
keyword[if] identifier[self] . identifier[_unsaved] . identifier[get] ( identifier[s_option] ,[ literal[string] ])[ literal[int] ]== literal[string] :
identifier[res] = identifier[self] . identifier[_unsaved] [ identifier[s_option] ][ literal[int] ]
keyword[else] :
identifier[res] = identifier[keyring] . identifier[get_password] ( identifier[self] . identifier[keyring_name] , identifier[s_option] )
keyword[else] :
identifier[res] = identifier[ConfigParser] . identifier[get] ( identifier[self] , identifier[section] , identifier[option] ,* identifier[args] )
keyword[if] identifier[res] == literal[string] :
keyword[return] keyword[False]
keyword[return] identifier[res] | def get(self, section, option, *args):
"""Get option value from section. If an option is secure,
populates the plain text."""
if self.is_secure_option(section, option) and self.keyring_available:
s_option = '%s%s' % (section, option)
if self._unsaved.get(s_option, [''])[0] == 'set':
res = self._unsaved[s_option][1] # depends on [control=['if'], data=[]]
else:
res = keyring.get_password(self.keyring_name, s_option) # depends on [control=['if'], data=[]]
else:
res = ConfigParser.get(self, section, option, *args)
if res == '!!False!!':
return False # depends on [control=['if'], data=[]]
return res |
def measure_observables(qc: QuantumComputer, tomo_experiment: TomographyExperiment,
n_shots: int = 10000, progress_callback=None, active_reset=False,
symmetrize_readout: str = 'exhaustive',
calibrate_readout: str = 'plus-eig'):
"""
Measure all the observables in a TomographyExperiment.
:param qc: A QuantumComputer which can run quantum programs
:param tomo_experiment: A suite of tomographic observables to measure
:param n_shots: The number of shots to take per ExperimentSetting
:param progress_callback: If not None, this function is called each time a group of
settings is run with arguments ``f(i, len(tomo_experiment)`` such that the progress
is ``i / len(tomo_experiment)``.
:param active_reset: Whether to actively reset qubits instead of waiting several
times the coherence length for qubits to decay to |0> naturally. Setting this
to True is much faster but there is a ~1% error per qubit in the reset operation.
Thermal noise from "traditional" reset is not routinely characterized but is of the same
order.
:param symmetrize_readout: Method used to symmetrize the readout errors, i.e. set
p(0|1) = p(1|0). For uncorrelated readout errors, this can be achieved by randomly
selecting between the POVMs {X.D1.X, X.D0.X} and {D0, D1} (where both D0 and D1 are
diagonal). However, here we currently support exhaustive symmetrization and loop through
all possible 2^n POVMs {X/I . POVM . X/I}^n, and obtain symmetrization more generally,
i.e. set p(00|00) = p(01|01) = .. = p(11|11), as well as p(00|01) = p(01|00) etc. If this
is None, no symmetrization is performed. The exhaustive method can be specified by setting
this variable to 'exhaustive' (default value). Set to `None` if no symmetrization is
desired.
:param calibrate_readout: Method used to calibrate the readout results. Currently, the only
method supported is normalizing against the operator's expectation value in its +1
eigenstate, which can be specified by setting this variable to 'plus-eig' (default value).
The preceding symmetrization and this step together yield a more accurate estimation of the observable. Set to `None` if no calibration is desired.
"""
# calibration readout only works with symmetrization turned on
if calibrate_readout is not None and symmetrize_readout is None:
raise ValueError("Readout calibration only works with readout symmetrization turned on")
# Outer loop over a collection of grouped settings for which we can simultaneously
# estimate.
for i, settings in enumerate(tomo_experiment):
log.info(f"Collecting bitstrings for the {len(settings)} settings: {settings}")
# 1.1 Prepare a state according to the amalgam of all setting.in_state
total_prog = Program()
if active_reset:
total_prog += RESET()
max_weight_in_state = _max_weight_state(setting.in_state for setting in settings)
for oneq_state in max_weight_in_state.states:
total_prog += _one_q_state_prep(oneq_state)
# 1.2 Add in the program
total_prog += tomo_experiment.program
# 1.3 Measure the state according to setting.out_operator
max_weight_out_op = _max_weight_operator(setting.out_operator for setting in settings)
for qubit, op_str in max_weight_out_op:
total_prog += _local_pauli_eig_meas(op_str, qubit)
# 2. Symmetrization
qubits = max_weight_out_op.get_qubits()
if symmetrize_readout == 'exhaustive' and len(qubits) > 0:
bitstrings, d_qub_idx = _exhaustive_symmetrization(qc, qubits, n_shots, total_prog)
elif symmetrize_readout is None and len(qubits) > 0:
total_prog_no_symm = total_prog.copy()
ro = total_prog_no_symm.declare('ro', 'BIT', len(qubits))
d_qub_idx = {}
for i, q in enumerate(qubits):
total_prog_no_symm += MEASURE(q, ro[i])
# Keep track of qubit-classical register mapping via dict
d_qub_idx[q] = i
total_prog_no_symm.wrap_in_numshots_loop(n_shots)
total_prog_no_symm_native = qc.compiler.quil_to_native_quil(total_prog_no_symm)
total_prog_no_symm_bin = qc.compiler.native_quil_to_executable(total_prog_no_symm_native)
bitstrings = qc.run(total_prog_no_symm_bin)
elif len(qubits) == 0:
# looks like an identity operation
pass
else:
raise ValueError("Readout symmetrization method must be either 'exhaustive' or None")
if progress_callback is not None:
progress_callback(i, len(tomo_experiment))
# 3. Post-process
# Inner loop over the grouped settings. They only differ in which qubits' measurements
# we include in the post-processing. For example, if `settings` is Z1, Z2, Z1Z2 and we
# measure (n_shots, n_qubits=2) obs_strings then the full operator value involves selecting
# either the first column, second column, or both and multiplying along the row.
for setting in settings:
# 3.1 Get the term's coefficient so we can multiply it in later.
coeff = complex(setting.out_operator.coefficient)
if not np.isclose(coeff.imag, 0):
raise ValueError(f"{setting}'s out_operator has a complex coefficient.")
coeff = coeff.real
# 3.2 Special case for measuring the "identity" operator, which doesn't make much
# sense but should happen perfectly.
if is_identity(setting.out_operator):
yield ExperimentResult(
setting=setting,
expectation=coeff,
std_err=0.0,
total_counts=n_shots,
)
continue
# 3.3 Obtain statistics from result of experiment
obs_mean, obs_var = _stats_from_measurements(bitstrings, d_qub_idx, setting, n_shots, coeff)
if calibrate_readout == 'plus-eig':
# 4 Readout calibration
# 4.1 Obtain calibration program
calibr_prog = _calibration_program(qc, tomo_experiment, setting)
# 4.2 Perform symmetrization on the calibration program
if symmetrize_readout == 'exhaustive':
qubs_calibr = setting.out_operator.get_qubits()
calibr_shots = n_shots
calibr_results, d_calibr_qub_idx = _exhaustive_symmetrization(qc, qubs_calibr, calibr_shots, calibr_prog)
else:
raise ValueError("Readout symmetrization method must be either 'exhaustive' or None")
# 4.3 Obtain statistics from the measurement process
obs_calibr_mean, obs_calibr_var = _stats_from_measurements(calibr_results, d_calibr_qub_idx, setting, calibr_shots)
# 4.3 Calibrate the readout results
corrected_mean = obs_mean / obs_calibr_mean
corrected_var = ratio_variance(obs_mean, obs_var, obs_calibr_mean, obs_calibr_var)
yield ExperimentResult(
setting=setting,
expectation=corrected_mean.item(),
std_err=np.sqrt(corrected_var).item(),
total_counts=n_shots,
raw_expectation=obs_mean.item(),
raw_std_err=np.sqrt(obs_var).item(),
calibration_expectation=obs_calibr_mean.item(),
calibration_std_err=np.sqrt(obs_calibr_var).item(),
calibration_counts=calibr_shots,
)
elif calibrate_readout is None:
# No calibration
yield ExperimentResult(
setting=setting,
expectation=obs_mean.item(),
std_err=np.sqrt(obs_var).item(),
total_counts=n_shots,
)
else:
raise ValueError("Calibration readout method must be either 'plus-eig' or None") | def function[measure_observables, parameter[qc, tomo_experiment, n_shots, progress_callback, active_reset, symmetrize_readout, calibrate_readout]]:
constant[
Measure all the observables in a TomographyExperiment.
:param qc: A QuantumComputer which can run quantum programs
:param tomo_experiment: A suite of tomographic observables to measure
:param n_shots: The number of shots to take per ExperimentSetting
:param progress_callback: If not None, this function is called each time a group of
settings is run with arguments ``f(i, len(tomo_experiment)`` such that the progress
is ``i / len(tomo_experiment)``.
:param active_reset: Whether to actively reset qubits instead of waiting several
times the coherence length for qubits to decay to |0> naturally. Setting this
to True is much faster but there is a ~1% error per qubit in the reset operation.
Thermal noise from "traditional" reset is not routinely characterized but is of the same
order.
:param symmetrize_readout: Method used to symmetrize the readout errors, i.e. set
p(0|1) = p(1|0). For uncorrelated readout errors, this can be achieved by randomly
selecting between the POVMs {X.D1.X, X.D0.X} and {D0, D1} (where both D0 and D1 are
diagonal). However, here we currently support exhaustive symmetrization and loop through
all possible 2^n POVMs {X/I . POVM . X/I}^n, and obtain symmetrization more generally,
i.e. set p(00|00) = p(01|01) = .. = p(11|11), as well as p(00|01) = p(01|00) etc. If this
is None, no symmetrization is performed. The exhaustive method can be specified by setting
this variable to 'exhaustive' (default value). Set to `None` if no symmetrization is
desired.
:param calibrate_readout: Method used to calibrate the readout results. Currently, the only
method supported is normalizing against the operator's expectation value in its +1
eigenstate, which can be specified by setting this variable to 'plus-eig' (default value).
The preceding symmetrization and this step together yield a more accurate estimation of the observable. Set to `None` if no calibration is desired.
]
if <ast.BoolOp object at 0x7da1b1be47c0> begin[:]
<ast.Raise object at 0x7da1b1be4e80>
for taget[tuple[[<ast.Name object at 0x7da1b1be48e0>, <ast.Name object at 0x7da1b1be4af0>]]] in starred[call[name[enumerate], parameter[name[tomo_experiment]]]] begin[:]
call[name[log].info, parameter[<ast.JoinedStr object at 0x7da1b1be4760>]]
variable[total_prog] assign[=] call[name[Program], parameter[]]
if name[active_reset] begin[:]
<ast.AugAssign object at 0x7da1b1be4b20>
variable[max_weight_in_state] assign[=] call[name[_max_weight_state], parameter[<ast.GeneratorExp object at 0x7da1b1be44f0>]]
for taget[name[oneq_state]] in starred[name[max_weight_in_state].states] begin[:]
<ast.AugAssign object at 0x7da1b1be5090>
<ast.AugAssign object at 0x7da1b1be4490>
variable[max_weight_out_op] assign[=] call[name[_max_weight_operator], parameter[<ast.GeneratorExp object at 0x7da1b1bc2a10>]]
for taget[tuple[[<ast.Name object at 0x7da1b1bc2cb0>, <ast.Name object at 0x7da1b1bc2320>]]] in starred[name[max_weight_out_op]] begin[:]
<ast.AugAssign object at 0x7da1b1bc0640>
variable[qubits] assign[=] call[name[max_weight_out_op].get_qubits, parameter[]]
if <ast.BoolOp object at 0x7da1b1bc1c30> begin[:]
<ast.Tuple object at 0x7da1b1bc2ec0> assign[=] call[name[_exhaustive_symmetrization], parameter[name[qc], name[qubits], name[n_shots], name[total_prog]]]
if compare[name[progress_callback] is_not constant[None]] begin[:]
call[name[progress_callback], parameter[name[i], call[name[len], parameter[name[tomo_experiment]]]]]
for taget[name[setting]] in starred[name[settings]] begin[:]
variable[coeff] assign[=] call[name[complex], parameter[name[setting].out_operator.coefficient]]
if <ast.UnaryOp object at 0x7da1b1beca30> begin[:]
<ast.Raise object at 0x7da1b1befa60>
variable[coeff] assign[=] name[coeff].real
if call[name[is_identity], parameter[name[setting].out_operator]] begin[:]
<ast.Yield object at 0x7da1b1befaf0>
continue
<ast.Tuple object at 0x7da1b1bee7d0> assign[=] call[name[_stats_from_measurements], parameter[name[bitstrings], name[d_qub_idx], name[setting], name[n_shots], name[coeff]]]
if compare[name[calibrate_readout] equal[==] constant[plus-eig]] begin[:]
variable[calibr_prog] assign[=] call[name[_calibration_program], parameter[name[qc], name[tomo_experiment], name[setting]]]
if compare[name[symmetrize_readout] equal[==] constant[exhaustive]] begin[:]
variable[qubs_calibr] assign[=] call[name[setting].out_operator.get_qubits, parameter[]]
variable[calibr_shots] assign[=] name[n_shots]
<ast.Tuple object at 0x7da1b1bedd50> assign[=] call[name[_exhaustive_symmetrization], parameter[name[qc], name[qubs_calibr], name[calibr_shots], name[calibr_prog]]]
<ast.Tuple object at 0x7da1b1bedcf0> assign[=] call[name[_stats_from_measurements], parameter[name[calibr_results], name[d_calibr_qub_idx], name[setting], name[calibr_shots]]]
variable[corrected_mean] assign[=] binary_operation[name[obs_mean] / name[obs_calibr_mean]]
variable[corrected_var] assign[=] call[name[ratio_variance], parameter[name[obs_mean], name[obs_var], name[obs_calibr_mean], name[obs_calibr_var]]]
<ast.Yield object at 0x7da1b1beead0> | keyword[def] identifier[measure_observables] ( identifier[qc] : identifier[QuantumComputer] , identifier[tomo_experiment] : identifier[TomographyExperiment] ,
identifier[n_shots] : identifier[int] = literal[int] , identifier[progress_callback] = keyword[None] , identifier[active_reset] = keyword[False] ,
identifier[symmetrize_readout] : identifier[str] = literal[string] ,
identifier[calibrate_readout] : identifier[str] = literal[string] ):
literal[string]
keyword[if] identifier[calibrate_readout] keyword[is] keyword[not] keyword[None] keyword[and] identifier[symmetrize_readout] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[for] identifier[i] , identifier[settings] keyword[in] identifier[enumerate] ( identifier[tomo_experiment] ):
identifier[log] . identifier[info] ( literal[string] )
identifier[total_prog] = identifier[Program] ()
keyword[if] identifier[active_reset] :
identifier[total_prog] += identifier[RESET] ()
identifier[max_weight_in_state] = identifier[_max_weight_state] ( identifier[setting] . identifier[in_state] keyword[for] identifier[setting] keyword[in] identifier[settings] )
keyword[for] identifier[oneq_state] keyword[in] identifier[max_weight_in_state] . identifier[states] :
identifier[total_prog] += identifier[_one_q_state_prep] ( identifier[oneq_state] )
identifier[total_prog] += identifier[tomo_experiment] . identifier[program]
identifier[max_weight_out_op] = identifier[_max_weight_operator] ( identifier[setting] . identifier[out_operator] keyword[for] identifier[setting] keyword[in] identifier[settings] )
keyword[for] identifier[qubit] , identifier[op_str] keyword[in] identifier[max_weight_out_op] :
identifier[total_prog] += identifier[_local_pauli_eig_meas] ( identifier[op_str] , identifier[qubit] )
identifier[qubits] = identifier[max_weight_out_op] . identifier[get_qubits] ()
keyword[if] identifier[symmetrize_readout] == literal[string] keyword[and] identifier[len] ( identifier[qubits] )> literal[int] :
identifier[bitstrings] , identifier[d_qub_idx] = identifier[_exhaustive_symmetrization] ( identifier[qc] , identifier[qubits] , identifier[n_shots] , identifier[total_prog] )
keyword[elif] identifier[symmetrize_readout] keyword[is] keyword[None] keyword[and] identifier[len] ( identifier[qubits] )> literal[int] :
identifier[total_prog_no_symm] = identifier[total_prog] . identifier[copy] ()
identifier[ro] = identifier[total_prog_no_symm] . identifier[declare] ( literal[string] , literal[string] , identifier[len] ( identifier[qubits] ))
identifier[d_qub_idx] ={}
keyword[for] identifier[i] , identifier[q] keyword[in] identifier[enumerate] ( identifier[qubits] ):
identifier[total_prog_no_symm] += identifier[MEASURE] ( identifier[q] , identifier[ro] [ identifier[i] ])
identifier[d_qub_idx] [ identifier[q] ]= identifier[i]
identifier[total_prog_no_symm] . identifier[wrap_in_numshots_loop] ( identifier[n_shots] )
identifier[total_prog_no_symm_native] = identifier[qc] . identifier[compiler] . identifier[quil_to_native_quil] ( identifier[total_prog_no_symm] )
identifier[total_prog_no_symm_bin] = identifier[qc] . identifier[compiler] . identifier[native_quil_to_executable] ( identifier[total_prog_no_symm_native] )
identifier[bitstrings] = identifier[qc] . identifier[run] ( identifier[total_prog_no_symm_bin] )
keyword[elif] identifier[len] ( identifier[qubits] )== literal[int] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[progress_callback] keyword[is] keyword[not] keyword[None] :
identifier[progress_callback] ( identifier[i] , identifier[len] ( identifier[tomo_experiment] ))
keyword[for] identifier[setting] keyword[in] identifier[settings] :
identifier[coeff] = identifier[complex] ( identifier[setting] . identifier[out_operator] . identifier[coefficient] )
keyword[if] keyword[not] identifier[np] . identifier[isclose] ( identifier[coeff] . identifier[imag] , literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[coeff] = identifier[coeff] . identifier[real]
keyword[if] identifier[is_identity] ( identifier[setting] . identifier[out_operator] ):
keyword[yield] identifier[ExperimentResult] (
identifier[setting] = identifier[setting] ,
identifier[expectation] = identifier[coeff] ,
identifier[std_err] = literal[int] ,
identifier[total_counts] = identifier[n_shots] ,
)
keyword[continue]
identifier[obs_mean] , identifier[obs_var] = identifier[_stats_from_measurements] ( identifier[bitstrings] , identifier[d_qub_idx] , identifier[setting] , identifier[n_shots] , identifier[coeff] )
keyword[if] identifier[calibrate_readout] == literal[string] :
identifier[calibr_prog] = identifier[_calibration_program] ( identifier[qc] , identifier[tomo_experiment] , identifier[setting] )
keyword[if] identifier[symmetrize_readout] == literal[string] :
identifier[qubs_calibr] = identifier[setting] . identifier[out_operator] . identifier[get_qubits] ()
identifier[calibr_shots] = identifier[n_shots]
identifier[calibr_results] , identifier[d_calibr_qub_idx] = identifier[_exhaustive_symmetrization] ( identifier[qc] , identifier[qubs_calibr] , identifier[calibr_shots] , identifier[calibr_prog] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[obs_calibr_mean] , identifier[obs_calibr_var] = identifier[_stats_from_measurements] ( identifier[calibr_results] , identifier[d_calibr_qub_idx] , identifier[setting] , identifier[calibr_shots] )
identifier[corrected_mean] = identifier[obs_mean] / identifier[obs_calibr_mean]
identifier[corrected_var] = identifier[ratio_variance] ( identifier[obs_mean] , identifier[obs_var] , identifier[obs_calibr_mean] , identifier[obs_calibr_var] )
keyword[yield] identifier[ExperimentResult] (
identifier[setting] = identifier[setting] ,
identifier[expectation] = identifier[corrected_mean] . identifier[item] (),
identifier[std_err] = identifier[np] . identifier[sqrt] ( identifier[corrected_var] ). identifier[item] (),
identifier[total_counts] = identifier[n_shots] ,
identifier[raw_expectation] = identifier[obs_mean] . identifier[item] (),
identifier[raw_std_err] = identifier[np] . identifier[sqrt] ( identifier[obs_var] ). identifier[item] (),
identifier[calibration_expectation] = identifier[obs_calibr_mean] . identifier[item] (),
identifier[calibration_std_err] = identifier[np] . identifier[sqrt] ( identifier[obs_calibr_var] ). identifier[item] (),
identifier[calibration_counts] = identifier[calibr_shots] ,
)
keyword[elif] identifier[calibrate_readout] keyword[is] keyword[None] :
keyword[yield] identifier[ExperimentResult] (
identifier[setting] = identifier[setting] ,
identifier[expectation] = identifier[obs_mean] . identifier[item] (),
identifier[std_err] = identifier[np] . identifier[sqrt] ( identifier[obs_var] ). identifier[item] (),
identifier[total_counts] = identifier[n_shots] ,
)
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def measure_observables(qc: QuantumComputer, tomo_experiment: TomographyExperiment, n_shots: int=10000, progress_callback=None, active_reset=False, symmetrize_readout: str='exhaustive', calibrate_readout: str='plus-eig'):
"""
Measure all the observables in a TomographyExperiment.
:param qc: A QuantumComputer which can run quantum programs
:param tomo_experiment: A suite of tomographic observables to measure
:param n_shots: The number of shots to take per ExperimentSetting
:param progress_callback: If not None, this function is called each time a group of
settings is run with arguments ``f(i, len(tomo_experiment)`` such that the progress
is ``i / len(tomo_experiment)``.
:param active_reset: Whether to actively reset qubits instead of waiting several
times the coherence length for qubits to decay to |0> naturally. Setting this
to True is much faster but there is a ~1% error per qubit in the reset operation.
Thermal noise from "traditional" reset is not routinely characterized but is of the same
order.
:param symmetrize_readout: Method used to symmetrize the readout errors, i.e. set
p(0|1) = p(1|0). For uncorrelated readout errors, this can be achieved by randomly
selecting between the POVMs {X.D1.X, X.D0.X} and {D0, D1} (where both D0 and D1 are
diagonal). However, here we currently support exhaustive symmetrization and loop through
all possible 2^n POVMs {X/I . POVM . X/I}^n, and obtain symmetrization more generally,
i.e. set p(00|00) = p(01|01) = .. = p(11|11), as well as p(00|01) = p(01|00) etc. If this
is None, no symmetrization is performed. The exhaustive method can be specified by setting
this variable to 'exhaustive' (default value). Set to `None` if no symmetrization is
desired.
:param calibrate_readout: Method used to calibrate the readout results. Currently, the only
method supported is normalizing against the operator's expectation value in its +1
eigenstate, which can be specified by setting this variable to 'plus-eig' (default value).
The preceding symmetrization and this step together yield a more accurate estimation of the observable. Set to `None` if no calibration is desired.
"""
# calibration readout only works with symmetrization turned on
if calibrate_readout is not None and symmetrize_readout is None:
raise ValueError('Readout calibration only works with readout symmetrization turned on') # depends on [control=['if'], data=[]]
# Outer loop over a collection of grouped settings for which we can simultaneously
# estimate.
for (i, settings) in enumerate(tomo_experiment):
log.info(f'Collecting bitstrings for the {len(settings)} settings: {settings}')
# 1.1 Prepare a state according to the amalgam of all setting.in_state
total_prog = Program()
if active_reset:
total_prog += RESET() # depends on [control=['if'], data=[]]
max_weight_in_state = _max_weight_state((setting.in_state for setting in settings))
for oneq_state in max_weight_in_state.states:
total_prog += _one_q_state_prep(oneq_state) # depends on [control=['for'], data=['oneq_state']]
# 1.2 Add in the program
total_prog += tomo_experiment.program
# 1.3 Measure the state according to setting.out_operator
max_weight_out_op = _max_weight_operator((setting.out_operator for setting in settings))
for (qubit, op_str) in max_weight_out_op:
total_prog += _local_pauli_eig_meas(op_str, qubit) # depends on [control=['for'], data=[]]
# 2. Symmetrization
qubits = max_weight_out_op.get_qubits()
if symmetrize_readout == 'exhaustive' and len(qubits) > 0:
(bitstrings, d_qub_idx) = _exhaustive_symmetrization(qc, qubits, n_shots, total_prog) # depends on [control=['if'], data=[]]
elif symmetrize_readout is None and len(qubits) > 0:
total_prog_no_symm = total_prog.copy()
ro = total_prog_no_symm.declare('ro', 'BIT', len(qubits))
d_qub_idx = {}
for (i, q) in enumerate(qubits):
total_prog_no_symm += MEASURE(q, ro[i])
# Keep track of qubit-classical register mapping via dict
d_qub_idx[q] = i # depends on [control=['for'], data=[]]
total_prog_no_symm.wrap_in_numshots_loop(n_shots)
total_prog_no_symm_native = qc.compiler.quil_to_native_quil(total_prog_no_symm)
total_prog_no_symm_bin = qc.compiler.native_quil_to_executable(total_prog_no_symm_native)
bitstrings = qc.run(total_prog_no_symm_bin) # depends on [control=['if'], data=[]]
elif len(qubits) == 0:
# looks like an identity operation
pass # depends on [control=['if'], data=[]]
else:
raise ValueError("Readout symmetrization method must be either 'exhaustive' or None")
if progress_callback is not None:
progress_callback(i, len(tomo_experiment)) # depends on [control=['if'], data=['progress_callback']]
# 3. Post-process
# Inner loop over the grouped settings. They only differ in which qubits' measurements
# we include in the post-processing. For example, if `settings` is Z1, Z2, Z1Z2 and we
# measure (n_shots, n_qubits=2) obs_strings then the full operator value involves selecting
# either the first column, second column, or both and multiplying along the row.
for setting in settings:
# 3.1 Get the term's coefficient so we can multiply it in later.
coeff = complex(setting.out_operator.coefficient)
if not np.isclose(coeff.imag, 0):
raise ValueError(f"{setting}'s out_operator has a complex coefficient.") # depends on [control=['if'], data=[]]
coeff = coeff.real
# 3.2 Special case for measuring the "identity" operator, which doesn't make much
# sense but should happen perfectly.
if is_identity(setting.out_operator):
yield ExperimentResult(setting=setting, expectation=coeff, std_err=0.0, total_counts=n_shots)
continue # depends on [control=['if'], data=[]]
# 3.3 Obtain statistics from result of experiment
(obs_mean, obs_var) = _stats_from_measurements(bitstrings, d_qub_idx, setting, n_shots, coeff)
if calibrate_readout == 'plus-eig':
# 4 Readout calibration
# 4.1 Obtain calibration program
calibr_prog = _calibration_program(qc, tomo_experiment, setting)
# 4.2 Perform symmetrization on the calibration program
if symmetrize_readout == 'exhaustive':
qubs_calibr = setting.out_operator.get_qubits()
calibr_shots = n_shots
(calibr_results, d_calibr_qub_idx) = _exhaustive_symmetrization(qc, qubs_calibr, calibr_shots, calibr_prog) # depends on [control=['if'], data=[]]
else:
raise ValueError("Readout symmetrization method must be either 'exhaustive' or None")
# 4.3 Obtain statistics from the measurement process
(obs_calibr_mean, obs_calibr_var) = _stats_from_measurements(calibr_results, d_calibr_qub_idx, setting, calibr_shots)
# 4.3 Calibrate the readout results
corrected_mean = obs_mean / obs_calibr_mean
corrected_var = ratio_variance(obs_mean, obs_var, obs_calibr_mean, obs_calibr_var)
yield ExperimentResult(setting=setting, expectation=corrected_mean.item(), std_err=np.sqrt(corrected_var).item(), total_counts=n_shots, raw_expectation=obs_mean.item(), raw_std_err=np.sqrt(obs_var).item(), calibration_expectation=obs_calibr_mean.item(), calibration_std_err=np.sqrt(obs_calibr_var).item(), calibration_counts=calibr_shots) # depends on [control=['if'], data=[]]
elif calibrate_readout is None:
# No calibration
yield ExperimentResult(setting=setting, expectation=obs_mean.item(), std_err=np.sqrt(obs_var).item(), total_counts=n_shots) # depends on [control=['if'], data=[]]
else:
raise ValueError("Calibration readout method must be either 'plus-eig' or None") # depends on [control=['for'], data=['setting']] # depends on [control=['for'], data=[]] |
def encode(self, b64=False):
"""Encode the payload for transmission."""
encoded_payload = b''
for pkt in self.packets:
encoded_packet = pkt.encode(b64=b64)
packet_len = len(encoded_packet)
if b64:
encoded_payload += str(packet_len).encode('utf-8') + b':' + \
encoded_packet
else:
binary_len = b''
while packet_len != 0:
binary_len = six.int2byte(packet_len % 10) + binary_len
packet_len = int(packet_len / 10)
if not pkt.binary:
encoded_payload += b'\0'
else:
encoded_payload += b'\1'
encoded_payload += binary_len + b'\xff' + encoded_packet
return encoded_payload | def function[encode, parameter[self, b64]]:
constant[Encode the payload for transmission.]
variable[encoded_payload] assign[=] constant[b'']
for taget[name[pkt]] in starred[name[self].packets] begin[:]
variable[encoded_packet] assign[=] call[name[pkt].encode, parameter[]]
variable[packet_len] assign[=] call[name[len], parameter[name[encoded_packet]]]
if name[b64] begin[:]
<ast.AugAssign object at 0x7da2047e80a0>
return[name[encoded_payload]] | keyword[def] identifier[encode] ( identifier[self] , identifier[b64] = keyword[False] ):
literal[string]
identifier[encoded_payload] = literal[string]
keyword[for] identifier[pkt] keyword[in] identifier[self] . identifier[packets] :
identifier[encoded_packet] = identifier[pkt] . identifier[encode] ( identifier[b64] = identifier[b64] )
identifier[packet_len] = identifier[len] ( identifier[encoded_packet] )
keyword[if] identifier[b64] :
identifier[encoded_payload] += identifier[str] ( identifier[packet_len] ). identifier[encode] ( literal[string] )+ literal[string] + identifier[encoded_packet]
keyword[else] :
identifier[binary_len] = literal[string]
keyword[while] identifier[packet_len] != literal[int] :
identifier[binary_len] = identifier[six] . identifier[int2byte] ( identifier[packet_len] % literal[int] )+ identifier[binary_len]
identifier[packet_len] = identifier[int] ( identifier[packet_len] / literal[int] )
keyword[if] keyword[not] identifier[pkt] . identifier[binary] :
identifier[encoded_payload] += literal[string]
keyword[else] :
identifier[encoded_payload] += literal[string]
identifier[encoded_payload] += identifier[binary_len] + literal[string] + identifier[encoded_packet]
keyword[return] identifier[encoded_payload] | def encode(self, b64=False):
"""Encode the payload for transmission."""
encoded_payload = b''
for pkt in self.packets:
encoded_packet = pkt.encode(b64=b64)
packet_len = len(encoded_packet)
if b64:
encoded_payload += str(packet_len).encode('utf-8') + b':' + encoded_packet # depends on [control=['if'], data=[]]
else:
binary_len = b''
while packet_len != 0:
binary_len = six.int2byte(packet_len % 10) + binary_len
packet_len = int(packet_len / 10) # depends on [control=['while'], data=['packet_len']]
if not pkt.binary:
encoded_payload += b'\x00' # depends on [control=['if'], data=[]]
else:
encoded_payload += b'\x01'
encoded_payload += binary_len + b'\xff' + encoded_packet # depends on [control=['for'], data=['pkt']]
return encoded_payload |
def loads(s, object_pairs_hook=dict):
"""
Parse the contents of the string ``s`` as a simple line-oriented
``.properties`` file and return a `dict` of the key-value pairs.
``s`` may be either a text string or bytes string. If it is a bytes
string, its contents are decoded as Latin-1.
By default, the key-value pairs extracted from ``s`` are combined into a
`dict` with later occurrences of a key overriding previous occurrences of
the same key. To change this behavior, pass a callable as the
``object_pairs_hook`` argument; it will be called with one argument, a
generator of ``(key, value)`` pairs representing the key-value entries in
``s`` (including duplicates) in order of occurrence. `loads` will then
return the value returned by ``object_pairs_hook``.
.. versionchanged:: 0.5.0
Invalid ``\\uXXXX`` escape sequences will now cause an
`InvalidUEscapeError` to be raised
:param string s: the string from which to read the ``.properties`` document
:param callable object_pairs_hook: class or function for combining the
key-value pairs
:rtype: `dict` of text strings or the return value of ``object_pairs_hook``
:raises InvalidUEscapeError: if an invalid ``\\uXXXX`` escape sequence
occurs in the input
"""
fp = BytesIO(s) if isinstance(s, binary_type) else StringIO(s)
return load(fp, object_pairs_hook=object_pairs_hook) | def function[loads, parameter[s, object_pairs_hook]]:
constant[
Parse the contents of the string ``s`` as a simple line-oriented
``.properties`` file and return a `dict` of the key-value pairs.
``s`` may be either a text string or bytes string. If it is a bytes
string, its contents are decoded as Latin-1.
By default, the key-value pairs extracted from ``s`` are combined into a
`dict` with later occurrences of a key overriding previous occurrences of
the same key. To change this behavior, pass a callable as the
``object_pairs_hook`` argument; it will be called with one argument, a
generator of ``(key, value)`` pairs representing the key-value entries in
``s`` (including duplicates) in order of occurrence. `loads` will then
return the value returned by ``object_pairs_hook``.
.. versionchanged:: 0.5.0
Invalid ``\uXXXX`` escape sequences will now cause an
`InvalidUEscapeError` to be raised
:param string s: the string from which to read the ``.properties`` document
:param callable object_pairs_hook: class or function for combining the
key-value pairs
:rtype: `dict` of text strings or the return value of ``object_pairs_hook``
:raises InvalidUEscapeError: if an invalid ``\uXXXX`` escape sequence
occurs in the input
]
variable[fp] assign[=] <ast.IfExp object at 0x7da18f09fca0>
return[call[name[load], parameter[name[fp]]]] | keyword[def] identifier[loads] ( identifier[s] , identifier[object_pairs_hook] = identifier[dict] ):
literal[string]
identifier[fp] = identifier[BytesIO] ( identifier[s] ) keyword[if] identifier[isinstance] ( identifier[s] , identifier[binary_type] ) keyword[else] identifier[StringIO] ( identifier[s] )
keyword[return] identifier[load] ( identifier[fp] , identifier[object_pairs_hook] = identifier[object_pairs_hook] ) | def loads(s, object_pairs_hook=dict):
"""
Parse the contents of the string ``s`` as a simple line-oriented
``.properties`` file and return a `dict` of the key-value pairs.
``s`` may be either a text string or bytes string. If it is a bytes
string, its contents are decoded as Latin-1.
By default, the key-value pairs extracted from ``s`` are combined into a
`dict` with later occurrences of a key overriding previous occurrences of
the same key. To change this behavior, pass a callable as the
``object_pairs_hook`` argument; it will be called with one argument, a
generator of ``(key, value)`` pairs representing the key-value entries in
``s`` (including duplicates) in order of occurrence. `loads` will then
return the value returned by ``object_pairs_hook``.
.. versionchanged:: 0.5.0
Invalid ``\\uXXXX`` escape sequences will now cause an
`InvalidUEscapeError` to be raised
:param string s: the string from which to read the ``.properties`` document
:param callable object_pairs_hook: class or function for combining the
key-value pairs
:rtype: `dict` of text strings or the return value of ``object_pairs_hook``
:raises InvalidUEscapeError: if an invalid ``\\uXXXX`` escape sequence
occurs in the input
"""
fp = BytesIO(s) if isinstance(s, binary_type) else StringIO(s)
return load(fp, object_pairs_hook=object_pairs_hook) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.