code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_section2usrnts(self):
"""Get dict section2usrnts."""
sec_nts = []
for section_name, _ in self.get_sections_2d():
usrgos = self.get_usrgos_g_section(section_name)
sec_nts.append((section_name, [self.go2nt.get(u) for u in usrgos]))
return cx.OrderedDict(sec_nts) | def function[get_section2usrnts, parameter[self]]:
constant[Get dict section2usrnts.]
variable[sec_nts] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2054a41c0>, <ast.Name object at 0x7da2054a5030>]]] in starred[call[name[self].get_sections_2d, parameter[]]] begin[:]
variable[usrgos] assign[=] call[name[self].get_usrgos_g_section, parameter[name[section_name]]]
call[name[sec_nts].append, parameter[tuple[[<ast.Name object at 0x7da2054a52a0>, <ast.ListComp object at 0x7da2054a7f10>]]]]
return[call[name[cx].OrderedDict, parameter[name[sec_nts]]]] | keyword[def] identifier[get_section2usrnts] ( identifier[self] ):
literal[string]
identifier[sec_nts] =[]
keyword[for] identifier[section_name] , identifier[_] keyword[in] identifier[self] . identifier[get_sections_2d] ():
identifier[usrgos] = identifier[self] . identifier[get_usrgos_g_section] ( identifier[section_name] )
identifier[sec_nts] . identifier[append] (( identifier[section_name] ,[ identifier[self] . identifier[go2nt] . identifier[get] ( identifier[u] ) keyword[for] identifier[u] keyword[in] identifier[usrgos] ]))
keyword[return] identifier[cx] . identifier[OrderedDict] ( identifier[sec_nts] ) | def get_section2usrnts(self):
"""Get dict section2usrnts."""
sec_nts = []
for (section_name, _) in self.get_sections_2d():
usrgos = self.get_usrgos_g_section(section_name)
sec_nts.append((section_name, [self.go2nt.get(u) for u in usrgos])) # depends on [control=['for'], data=[]]
return cx.OrderedDict(sec_nts) |
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret) | def function[enable_i2c_slave, parameter[self, slave_address]]:
constant[Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
]
variable[ret] assign[=] call[name[api].py_aa_i2c_slave_enable, parameter[name[self].handle, name[slave_address], name[self].BUFFER_SIZE, name[self].BUFFER_SIZE]]
call[name[_raise_error_if_negative], parameter[name[ret]]] | keyword[def] identifier[enable_i2c_slave] ( identifier[self] , identifier[slave_address] ):
literal[string]
identifier[ret] = identifier[api] . identifier[py_aa_i2c_slave_enable] ( identifier[self] . identifier[handle] , identifier[slave_address] ,
identifier[self] . identifier[BUFFER_SIZE] , identifier[self] . identifier[BUFFER_SIZE] )
identifier[_raise_error_if_negative] ( identifier[ret] ) | def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address, self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret) |
def add(self, hostname, keytype, key):
"""
Add a host key entry to the table. Any existing entry for a
``(hostname, keytype)`` pair will be replaced.
:param str hostname: the hostname (or IP) to add
:param str keytype: key type (``"ssh-rsa"`` or ``"ssh-dss"``)
:param .PKey key: the key to add
"""
for e in self._entries:
if (hostname in e.hostnames) and (e.key.get_name() == keytype):
e.key = key
return
self._entries.append(HostKeyEntry([hostname], key)) | def function[add, parameter[self, hostname, keytype, key]]:
constant[
Add a host key entry to the table. Any existing entry for a
``(hostname, keytype)`` pair will be replaced.
:param str hostname: the hostname (or IP) to add
:param str keytype: key type (``"ssh-rsa"`` or ``"ssh-dss"``)
:param .PKey key: the key to add
]
for taget[name[e]] in starred[name[self]._entries] begin[:]
if <ast.BoolOp object at 0x7da1b212f490> begin[:]
name[e].key assign[=] name[key]
return[None]
call[name[self]._entries.append, parameter[call[name[HostKeyEntry], parameter[list[[<ast.Name object at 0x7da1b212fa30>]], name[key]]]]] | keyword[def] identifier[add] ( identifier[self] , identifier[hostname] , identifier[keytype] , identifier[key] ):
literal[string]
keyword[for] identifier[e] keyword[in] identifier[self] . identifier[_entries] :
keyword[if] ( identifier[hostname] keyword[in] identifier[e] . identifier[hostnames] ) keyword[and] ( identifier[e] . identifier[key] . identifier[get_name] ()== identifier[keytype] ):
identifier[e] . identifier[key] = identifier[key]
keyword[return]
identifier[self] . identifier[_entries] . identifier[append] ( identifier[HostKeyEntry] ([ identifier[hostname] ], identifier[key] )) | def add(self, hostname, keytype, key):
"""
Add a host key entry to the table. Any existing entry for a
``(hostname, keytype)`` pair will be replaced.
:param str hostname: the hostname (or IP) to add
:param str keytype: key type (``"ssh-rsa"`` or ``"ssh-dss"``)
:param .PKey key: the key to add
"""
for e in self._entries:
if hostname in e.hostnames and e.key.get_name() == keytype:
e.key = key
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e']]
self._entries.append(HostKeyEntry([hostname], key)) |
def apply_patch(self, patch_path):
"""Applies the patch located at *patch_path*. Returns the return code of
the patch command.
"""
# Do not create .orig backup files, and merge files in place.
return self._execute('patch -p1 --no-backup-if-mismatch --merge', stdout=open(os.devnull, 'w'), stdin=open(patch_path, 'r'))[0] | def function[apply_patch, parameter[self, patch_path]]:
constant[Applies the patch located at *patch_path*. Returns the return code of
the patch command.
]
return[call[call[name[self]._execute, parameter[constant[patch -p1 --no-backup-if-mismatch --merge]]]][constant[0]]] | keyword[def] identifier[apply_patch] ( identifier[self] , identifier[patch_path] ):
literal[string]
keyword[return] identifier[self] . identifier[_execute] ( literal[string] , identifier[stdout] = identifier[open] ( identifier[os] . identifier[devnull] , literal[string] ), identifier[stdin] = identifier[open] ( identifier[patch_path] , literal[string] ))[ literal[int] ] | def apply_patch(self, patch_path):
"""Applies the patch located at *patch_path*. Returns the return code of
the patch command.
"""
# Do not create .orig backup files, and merge files in place.
return self._execute('patch -p1 --no-backup-if-mismatch --merge', stdout=open(os.devnull, 'w'), stdin=open(patch_path, 'r'))[0] |
def _readline_insert(self, char, echo, insptr, line):
"""Deal properly with inserted chars in a line."""
if not self._readline_do_echo(echo):
return
# Write out the remainder of the line
self.write(char + ''.join(line[insptr:]))
# Cursor Left to the current insert point
char_count = len(line) - insptr
self.write(self.CODES['CSRLEFT'] * char_count) | def function[_readline_insert, parameter[self, char, echo, insptr, line]]:
constant[Deal properly with inserted chars in a line.]
if <ast.UnaryOp object at 0x7da18dc9a050> begin[:]
return[None]
call[name[self].write, parameter[binary_operation[name[char] + call[constant[].join, parameter[call[name[line]][<ast.Slice object at 0x7da18dc98100>]]]]]]
variable[char_count] assign[=] binary_operation[call[name[len], parameter[name[line]]] - name[insptr]]
call[name[self].write, parameter[binary_operation[call[name[self].CODES][constant[CSRLEFT]] * name[char_count]]]] | keyword[def] identifier[_readline_insert] ( identifier[self] , identifier[char] , identifier[echo] , identifier[insptr] , identifier[line] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_readline_do_echo] ( identifier[echo] ):
keyword[return]
identifier[self] . identifier[write] ( identifier[char] + literal[string] . identifier[join] ( identifier[line] [ identifier[insptr] :]))
identifier[char_count] = identifier[len] ( identifier[line] )- identifier[insptr]
identifier[self] . identifier[write] ( identifier[self] . identifier[CODES] [ literal[string] ]* identifier[char_count] ) | def _readline_insert(self, char, echo, insptr, line):
"""Deal properly with inserted chars in a line."""
if not self._readline_do_echo(echo):
return # depends on [control=['if'], data=[]]
# Write out the remainder of the line
self.write(char + ''.join(line[insptr:]))
# Cursor Left to the current insert point
char_count = len(line) - insptr
self.write(self.CODES['CSRLEFT'] * char_count) |
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None
if morsel['max-age']:
expires = time.time() + morsel['max-age']
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = time.mktime(
time.strptime(morsel['expires'], time_template)) - time.timezone
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
discard=False,
domain=morsel['domain'],
expires=expires,
name=morsel.key,
path=morsel['path'],
port=None,
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,
secure=bool(morsel['secure']),
value=morsel.value,
version=morsel['version'] or 0,
) | def function[morsel_to_cookie, parameter[morsel]]:
constant[Convert a Morsel object into a Cookie containing the one k/v pair.]
variable[expires] assign[=] constant[None]
if call[name[morsel]][constant[max-age]] begin[:]
variable[expires] assign[=] binary_operation[call[name[time].time, parameter[]] + call[name[morsel]][constant[max-age]]]
return[call[name[create_cookie], parameter[]]] | keyword[def] identifier[morsel_to_cookie] ( identifier[morsel] ):
literal[string]
identifier[expires] = keyword[None]
keyword[if] identifier[morsel] [ literal[string] ]:
identifier[expires] = identifier[time] . identifier[time] ()+ identifier[morsel] [ literal[string] ]
keyword[elif] identifier[morsel] [ literal[string] ]:
identifier[time_template] = literal[string]
identifier[expires] = identifier[time] . identifier[mktime] (
identifier[time] . identifier[strptime] ( identifier[morsel] [ literal[string] ], identifier[time_template] ))- identifier[time] . identifier[timezone]
keyword[return] identifier[create_cookie] (
identifier[comment] = identifier[morsel] [ literal[string] ],
identifier[comment_url] = identifier[bool] ( identifier[morsel] [ literal[string] ]),
identifier[discard] = keyword[False] ,
identifier[domain] = identifier[morsel] [ literal[string] ],
identifier[expires] = identifier[expires] ,
identifier[name] = identifier[morsel] . identifier[key] ,
identifier[path] = identifier[morsel] [ literal[string] ],
identifier[port] = keyword[None] ,
identifier[rest] ={ literal[string] : identifier[morsel] [ literal[string] ]},
identifier[rfc2109] = keyword[False] ,
identifier[secure] = identifier[bool] ( identifier[morsel] [ literal[string] ]),
identifier[value] = identifier[morsel] . identifier[value] ,
identifier[version] = identifier[morsel] [ literal[string] ] keyword[or] literal[int] ,
) | def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None
if morsel['max-age']:
expires = time.time() + morsel['max-age'] # depends on [control=['if'], data=[]]
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = time.mktime(time.strptime(morsel['expires'], time_template)) - time.timezone # depends on [control=['if'], data=[]]
return create_cookie(comment=morsel['comment'], comment_url=bool(morsel['comment']), discard=False, domain=morsel['domain'], expires=expires, name=morsel.key, path=morsel['path'], port=None, rest={'HttpOnly': morsel['httponly']}, rfc2109=False, secure=bool(morsel['secure']), value=morsel.value, version=morsel['version'] or 0) |
def post(self, query):
"""
Wrapper around ``_do_post()`` to handle accounts that require
sending back session cookies (``self.set_cookies`` True).
"""
res, response = self._do_post(query)
cookies = res.getheader('Set-Cookie', None)
if len(response) == 0 and cookies is not None and res.status == 200:
logging.debug('Got 0-length 200 response with Set-Cookies header; '
'retrying request with cookies')
_, response = self._do_post(query, [('Cookie', cookies)])
return response | def function[post, parameter[self, query]]:
constant[
Wrapper around ``_do_post()`` to handle accounts that require
sending back session cookies (``self.set_cookies`` True).
]
<ast.Tuple object at 0x7da18f00c4f0> assign[=] call[name[self]._do_post, parameter[name[query]]]
variable[cookies] assign[=] call[name[res].getheader, parameter[constant[Set-Cookie], constant[None]]]
if <ast.BoolOp object at 0x7da18f00f520> begin[:]
call[name[logging].debug, parameter[constant[Got 0-length 200 response with Set-Cookies header; retrying request with cookies]]]
<ast.Tuple object at 0x7da18f00fd90> assign[=] call[name[self]._do_post, parameter[name[query], list[[<ast.Tuple object at 0x7da18f00dbd0>]]]]
return[name[response]] | keyword[def] identifier[post] ( identifier[self] , identifier[query] ):
literal[string]
identifier[res] , identifier[response] = identifier[self] . identifier[_do_post] ( identifier[query] )
identifier[cookies] = identifier[res] . identifier[getheader] ( literal[string] , keyword[None] )
keyword[if] identifier[len] ( identifier[response] )== literal[int] keyword[and] identifier[cookies] keyword[is] keyword[not] keyword[None] keyword[and] identifier[res] . identifier[status] == literal[int] :
identifier[logging] . identifier[debug] ( literal[string]
literal[string] )
identifier[_] , identifier[response] = identifier[self] . identifier[_do_post] ( identifier[query] ,[( literal[string] , identifier[cookies] )])
keyword[return] identifier[response] | def post(self, query):
"""
Wrapper around ``_do_post()`` to handle accounts that require
sending back session cookies (``self.set_cookies`` True).
"""
(res, response) = self._do_post(query)
cookies = res.getheader('Set-Cookie', None)
if len(response) == 0 and cookies is not None and (res.status == 200):
logging.debug('Got 0-length 200 response with Set-Cookies header; retrying request with cookies')
(_, response) = self._do_post(query, [('Cookie', cookies)]) # depends on [control=['if'], data=[]]
return response |
def sign(wheelfile, replace=False, get_keyring=get_keyring):
"""Sign a wheel"""
warn_signatures()
WheelKeys, keyring = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wf = WheelFile(wheelfile, append=True)
wk = WheelKeys().load()
name = wf.parsed_filename.group('name')
sign_with = wk.signers(name)[0]
print("Signing {} with {}".format(name, sign_with[1]))
vk = sign_with[1]
kr = keyring.get_keyring()
sk = kr.get_password('wheel', vk)
keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)),
urlsafe_b64decode(binary(sk)))
record_name = wf.distinfo_name + '/RECORD'
sig_name = wf.distinfo_name + '/RECORD.jws'
if sig_name in wf.zipfile.namelist():
raise WheelError("Wheel is already signed.")
record_data = wf.zipfile.read(record_name)
payload = {"hash": "sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))}
sig = signatures.sign(payload, keypair)
wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))
wf.zipfile.close() | def function[sign, parameter[wheelfile, replace, get_keyring]]:
constant[Sign a wheel]
call[name[warn_signatures], parameter[]]
<ast.Tuple object at 0x7da20e954f10> assign[=] call[name[get_keyring], parameter[]]
variable[ed25519ll] assign[=] call[name[signatures].get_ed25519ll, parameter[]]
variable[wf] assign[=] call[name[WheelFile], parameter[name[wheelfile]]]
variable[wk] assign[=] call[call[name[WheelKeys], parameter[]].load, parameter[]]
variable[name] assign[=] call[name[wf].parsed_filename.group, parameter[constant[name]]]
variable[sign_with] assign[=] call[call[name[wk].signers, parameter[name[name]]]][constant[0]]
call[name[print], parameter[call[constant[Signing {} with {}].format, parameter[name[name], call[name[sign_with]][constant[1]]]]]]
variable[vk] assign[=] call[name[sign_with]][constant[1]]
variable[kr] assign[=] call[name[keyring].get_keyring, parameter[]]
variable[sk] assign[=] call[name[kr].get_password, parameter[constant[wheel], name[vk]]]
variable[keypair] assign[=] call[name[ed25519ll].Keypair, parameter[call[name[urlsafe_b64decode], parameter[call[name[binary], parameter[name[vk]]]]], call[name[urlsafe_b64decode], parameter[call[name[binary], parameter[name[sk]]]]]]]
variable[record_name] assign[=] binary_operation[name[wf].distinfo_name + constant[/RECORD]]
variable[sig_name] assign[=] binary_operation[name[wf].distinfo_name + constant[/RECORD.jws]]
if compare[name[sig_name] in call[name[wf].zipfile.namelist, parameter[]]] begin[:]
<ast.Raise object at 0x7da20e956f50>
variable[record_data] assign[=] call[name[wf].zipfile.read, parameter[name[record_name]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da207f001f0>], [<ast.BinOp object at 0x7da207f02200>]]
variable[sig] assign[=] call[name[signatures].sign, parameter[name[payload], name[keypair]]]
call[name[wf].zipfile.writestr, parameter[name[sig_name], call[name[json].dumps, parameter[name[sig]]]]]
call[name[wf].zipfile.close, parameter[]] | keyword[def] identifier[sign] ( identifier[wheelfile] , identifier[replace] = keyword[False] , identifier[get_keyring] = identifier[get_keyring] ):
literal[string]
identifier[warn_signatures] ()
identifier[WheelKeys] , identifier[keyring] = identifier[get_keyring] ()
identifier[ed25519ll] = identifier[signatures] . identifier[get_ed25519ll] ()
identifier[wf] = identifier[WheelFile] ( identifier[wheelfile] , identifier[append] = keyword[True] )
identifier[wk] = identifier[WheelKeys] (). identifier[load] ()
identifier[name] = identifier[wf] . identifier[parsed_filename] . identifier[group] ( literal[string] )
identifier[sign_with] = identifier[wk] . identifier[signers] ( identifier[name] )[ literal[int] ]
identifier[print] ( literal[string] . identifier[format] ( identifier[name] , identifier[sign_with] [ literal[int] ]))
identifier[vk] = identifier[sign_with] [ literal[int] ]
identifier[kr] = identifier[keyring] . identifier[get_keyring] ()
identifier[sk] = identifier[kr] . identifier[get_password] ( literal[string] , identifier[vk] )
identifier[keypair] = identifier[ed25519ll] . identifier[Keypair] ( identifier[urlsafe_b64decode] ( identifier[binary] ( identifier[vk] )),
identifier[urlsafe_b64decode] ( identifier[binary] ( identifier[sk] )))
identifier[record_name] = identifier[wf] . identifier[distinfo_name] + literal[string]
identifier[sig_name] = identifier[wf] . identifier[distinfo_name] + literal[string]
keyword[if] identifier[sig_name] keyword[in] identifier[wf] . identifier[zipfile] . identifier[namelist] ():
keyword[raise] identifier[WheelError] ( literal[string] )
identifier[record_data] = identifier[wf] . identifier[zipfile] . identifier[read] ( identifier[record_name] )
identifier[payload] ={ literal[string] : literal[string] + identifier[native] ( identifier[urlsafe_b64encode] ( identifier[hashlib] . identifier[sha256] ( identifier[record_data] ). identifier[digest] ()))}
identifier[sig] = identifier[signatures] . identifier[sign] ( identifier[payload] , identifier[keypair] )
identifier[wf] . identifier[zipfile] . identifier[writestr] ( identifier[sig_name] , identifier[json] . identifier[dumps] ( identifier[sig] , identifier[sort_keys] = keyword[True] ))
identifier[wf] . identifier[zipfile] . identifier[close] () | def sign(wheelfile, replace=False, get_keyring=get_keyring):
"""Sign a wheel"""
warn_signatures()
(WheelKeys, keyring) = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wf = WheelFile(wheelfile, append=True)
wk = WheelKeys().load()
name = wf.parsed_filename.group('name')
sign_with = wk.signers(name)[0]
print('Signing {} with {}'.format(name, sign_with[1]))
vk = sign_with[1]
kr = keyring.get_keyring()
sk = kr.get_password('wheel', vk)
keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)), urlsafe_b64decode(binary(sk)))
record_name = wf.distinfo_name + '/RECORD'
sig_name = wf.distinfo_name + '/RECORD.jws'
if sig_name in wf.zipfile.namelist():
raise WheelError('Wheel is already signed.') # depends on [control=['if'], data=[]]
record_data = wf.zipfile.read(record_name)
payload = {'hash': 'sha256=' + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))}
sig = signatures.sign(payload, keypair)
wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))
wf.zipfile.close() |
def _create_path(self, *args):
"""Create the URL path with the Fred endpoint and given arguments."""
args = filter(None, args)
path = self.endpoint + '/'.join(args)
return path | def function[_create_path, parameter[self]]:
constant[Create the URL path with the Fred endpoint and given arguments.]
variable[args] assign[=] call[name[filter], parameter[constant[None], name[args]]]
variable[path] assign[=] binary_operation[name[self].endpoint + call[constant[/].join, parameter[name[args]]]]
return[name[path]] | keyword[def] identifier[_create_path] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[args] = identifier[filter] ( keyword[None] , identifier[args] )
identifier[path] = identifier[self] . identifier[endpoint] + literal[string] . identifier[join] ( identifier[args] )
keyword[return] identifier[path] | def _create_path(self, *args):
"""Create the URL path with the Fred endpoint and given arguments."""
args = filter(None, args)
path = self.endpoint + '/'.join(args)
return path |
def _merge_sorted_items(self, index):
""" load a partition from disk, then sort and group by key """
def load_partition(j):
path = self._get_spill_dir(j)
p = os.path.join(path, str(index))
with open(p, 'rb', 65536) as f:
for v in self.serializer.load_stream(f):
yield v
disk_items = [load_partition(j) for j in range(self.spills)]
if self._sorted:
# all the partitions are already sorted
sorted_items = heapq.merge(disk_items, key=operator.itemgetter(0))
else:
# Flatten the combined values, so it will not consume huge
# memory during merging sort.
ser = self.flattened_serializer()
sorter = ExternalSorter(self.memory_limit, ser)
sorted_items = sorter.sorted(itertools.chain(*disk_items),
key=operator.itemgetter(0))
return ((k, vs) for k, vs in GroupByKey(sorted_items)) | def function[_merge_sorted_items, parameter[self, index]]:
constant[ load a partition from disk, then sort and group by key ]
def function[load_partition, parameter[j]]:
variable[path] assign[=] call[name[self]._get_spill_dir, parameter[name[j]]]
variable[p] assign[=] call[name[os].path.join, parameter[name[path], call[name[str], parameter[name[index]]]]]
with call[name[open], parameter[name[p], constant[rb], constant[65536]]] begin[:]
for taget[name[v]] in starred[call[name[self].serializer.load_stream, parameter[name[f]]]] begin[:]
<ast.Yield object at 0x7da1b20a9d20>
variable[disk_items] assign[=] <ast.ListComp object at 0x7da1b20aba90>
if name[self]._sorted begin[:]
variable[sorted_items] assign[=] call[name[heapq].merge, parameter[name[disk_items]]]
return[<ast.GeneratorExp object at 0x7da1b20a88e0>] | keyword[def] identifier[_merge_sorted_items] ( identifier[self] , identifier[index] ):
literal[string]
keyword[def] identifier[load_partition] ( identifier[j] ):
identifier[path] = identifier[self] . identifier[_get_spill_dir] ( identifier[j] )
identifier[p] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[str] ( identifier[index] ))
keyword[with] identifier[open] ( identifier[p] , literal[string] , literal[int] ) keyword[as] identifier[f] :
keyword[for] identifier[v] keyword[in] identifier[self] . identifier[serializer] . identifier[load_stream] ( identifier[f] ):
keyword[yield] identifier[v]
identifier[disk_items] =[ identifier[load_partition] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[spills] )]
keyword[if] identifier[self] . identifier[_sorted] :
identifier[sorted_items] = identifier[heapq] . identifier[merge] ( identifier[disk_items] , identifier[key] = identifier[operator] . identifier[itemgetter] ( literal[int] ))
keyword[else] :
identifier[ser] = identifier[self] . identifier[flattened_serializer] ()
identifier[sorter] = identifier[ExternalSorter] ( identifier[self] . identifier[memory_limit] , identifier[ser] )
identifier[sorted_items] = identifier[sorter] . identifier[sorted] ( identifier[itertools] . identifier[chain] (* identifier[disk_items] ),
identifier[key] = identifier[operator] . identifier[itemgetter] ( literal[int] ))
keyword[return] (( identifier[k] , identifier[vs] ) keyword[for] identifier[k] , identifier[vs] keyword[in] identifier[GroupByKey] ( identifier[sorted_items] )) | def _merge_sorted_items(self, index):
""" load a partition from disk, then sort and group by key """
def load_partition(j):
path = self._get_spill_dir(j)
p = os.path.join(path, str(index))
with open(p, 'rb', 65536) as f:
for v in self.serializer.load_stream(f):
yield v # depends on [control=['for'], data=['v']] # depends on [control=['with'], data=['f']]
disk_items = [load_partition(j) for j in range(self.spills)]
if self._sorted:
# all the partitions are already sorted
sorted_items = heapq.merge(disk_items, key=operator.itemgetter(0)) # depends on [control=['if'], data=[]]
else:
# Flatten the combined values, so it will not consume huge
# memory during merging sort.
ser = self.flattened_serializer()
sorter = ExternalSorter(self.memory_limit, ser)
sorted_items = sorter.sorted(itertools.chain(*disk_items), key=operator.itemgetter(0))
return ((k, vs) for (k, vs) in GroupByKey(sorted_items)) |
def main():
"""Just print out some event infomation when the gamepad is used."""
while 1:
events = get_gamepad()
for event in events:
print(event.ev_type, event.code, event.state) | def function[main, parameter[]]:
constant[Just print out some event infomation when the gamepad is used.]
while constant[1] begin[:]
variable[events] assign[=] call[name[get_gamepad], parameter[]]
for taget[name[event]] in starred[name[events]] begin[:]
call[name[print], parameter[name[event].ev_type, name[event].code, name[event].state]] | keyword[def] identifier[main] ():
literal[string]
keyword[while] literal[int] :
identifier[events] = identifier[get_gamepad] ()
keyword[for] identifier[event] keyword[in] identifier[events] :
identifier[print] ( identifier[event] . identifier[ev_type] , identifier[event] . identifier[code] , identifier[event] . identifier[state] ) | def main():
"""Just print out some event infomation when the gamepad is used."""
while 1:
events = get_gamepad()
for event in events:
print(event.ev_type, event.code, event.state) # depends on [control=['for'], data=['event']] # depends on [control=['while'], data=[]] |
def wrap_worker__run(*args, **kwargs):
"""
While the strategy is active, rewrite connection_loader.get() calls for
some transports into requests for a compatible Mitogen transport.
"""
# Ignore parent's attempts to murder us when we still need to write
# profiling output.
if mitogen.core._profile_hook.__name__ != '_profile_hook':
signal.signal(signal.SIGTERM, signal.SIG_IGN)
ansible_mitogen.logging.set_process_name('task')
ansible_mitogen.affinity.policy.assign_worker()
return mitogen.core._profile_hook('WorkerProcess',
lambda: worker__run(*args, **kwargs)
) | def function[wrap_worker__run, parameter[]]:
constant[
While the strategy is active, rewrite connection_loader.get() calls for
some transports into requests for a compatible Mitogen transport.
]
if compare[name[mitogen].core._profile_hook.__name__ not_equal[!=] constant[_profile_hook]] begin[:]
call[name[signal].signal, parameter[name[signal].SIGTERM, name[signal].SIG_IGN]]
call[name[ansible_mitogen].logging.set_process_name, parameter[constant[task]]]
call[name[ansible_mitogen].affinity.policy.assign_worker, parameter[]]
return[call[name[mitogen].core._profile_hook, parameter[constant[WorkerProcess], <ast.Lambda object at 0x7da1b1d23c70>]]] | keyword[def] identifier[wrap_worker__run] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[mitogen] . identifier[core] . identifier[_profile_hook] . identifier[__name__] != literal[string] :
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGTERM] , identifier[signal] . identifier[SIG_IGN] )
identifier[ansible_mitogen] . identifier[logging] . identifier[set_process_name] ( literal[string] )
identifier[ansible_mitogen] . identifier[affinity] . identifier[policy] . identifier[assign_worker] ()
keyword[return] identifier[mitogen] . identifier[core] . identifier[_profile_hook] ( literal[string] ,
keyword[lambda] : identifier[worker__run] (* identifier[args] ,** identifier[kwargs] )
) | def wrap_worker__run(*args, **kwargs):
"""
While the strategy is active, rewrite connection_loader.get() calls for
some transports into requests for a compatible Mitogen transport.
"""
# Ignore parent's attempts to murder us when we still need to write
# profiling output.
if mitogen.core._profile_hook.__name__ != '_profile_hook':
signal.signal(signal.SIGTERM, signal.SIG_IGN) # depends on [control=['if'], data=[]]
ansible_mitogen.logging.set_process_name('task')
ansible_mitogen.affinity.policy.assign_worker()
return mitogen.core._profile_hook('WorkerProcess', lambda : worker__run(*args, **kwargs)) |
def access_for(self, roles=None, actor=None, anchors=[]):
"""
Return a proxy object that limits read and write access to attributes
based on the actor's roles. If the ``roles`` parameter isn't
provided, :meth:`roles_for` is called with the other parameters::
# This typical call:
obj.access_for(actor=current_auth.actor)
# Is shorthand for:
obj.access_for(roles=obj.roles_for(actor=current_auth.actor))
"""
if roles is None:
roles = self.roles_for(actor=actor, anchors=anchors)
elif actor is not None or anchors:
raise TypeError('If roles are specified, actor/anchors must not be specified')
return RoleAccessProxy(self, roles=roles) | def function[access_for, parameter[self, roles, actor, anchors]]:
constant[
Return a proxy object that limits read and write access to attributes
based on the actor's roles. If the ``roles`` parameter isn't
provided, :meth:`roles_for` is called with the other parameters::
# This typical call:
obj.access_for(actor=current_auth.actor)
# Is shorthand for:
obj.access_for(roles=obj.roles_for(actor=current_auth.actor))
]
if compare[name[roles] is constant[None]] begin[:]
variable[roles] assign[=] call[name[self].roles_for, parameter[]]
return[call[name[RoleAccessProxy], parameter[name[self]]]] | keyword[def] identifier[access_for] ( identifier[self] , identifier[roles] = keyword[None] , identifier[actor] = keyword[None] , identifier[anchors] =[]):
literal[string]
keyword[if] identifier[roles] keyword[is] keyword[None] :
identifier[roles] = identifier[self] . identifier[roles_for] ( identifier[actor] = identifier[actor] , identifier[anchors] = identifier[anchors] )
keyword[elif] identifier[actor] keyword[is] keyword[not] keyword[None] keyword[or] identifier[anchors] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[return] identifier[RoleAccessProxy] ( identifier[self] , identifier[roles] = identifier[roles] ) | def access_for(self, roles=None, actor=None, anchors=[]):
"""
Return a proxy object that limits read and write access to attributes
based on the actor's roles. If the ``roles`` parameter isn't
provided, :meth:`roles_for` is called with the other parameters::
# This typical call:
obj.access_for(actor=current_auth.actor)
# Is shorthand for:
obj.access_for(roles=obj.roles_for(actor=current_auth.actor))
"""
if roles is None:
roles = self.roles_for(actor=actor, anchors=anchors) # depends on [control=['if'], data=['roles']]
elif actor is not None or anchors:
raise TypeError('If roles are specified, actor/anchors must not be specified') # depends on [control=['if'], data=[]]
return RoleAccessProxy(self, roles=roles) |
def get_stored_metadata(self, temp_ver):
"""
Retrieves the metadata for the given template version from the store
Args:
temp_ver (TemplateVersion): template version to retrieve the
metadata for
Returns:
dict: the metadata of the given template version
"""
with open(self._prefixed('%s.metadata' % temp_ver.name)) as f:
return json.load(f) | def function[get_stored_metadata, parameter[self, temp_ver]]:
constant[
Retrieves the metadata for the given template version from the store
Args:
temp_ver (TemplateVersion): template version to retrieve the
metadata for
Returns:
dict: the metadata of the given template version
]
with call[name[open], parameter[call[name[self]._prefixed, parameter[binary_operation[constant[%s.metadata] <ast.Mod object at 0x7da2590d6920> name[temp_ver].name]]]]] begin[:]
return[call[name[json].load, parameter[name[f]]]] | keyword[def] identifier[get_stored_metadata] ( identifier[self] , identifier[temp_ver] ):
literal[string]
keyword[with] identifier[open] ( identifier[self] . identifier[_prefixed] ( literal[string] % identifier[temp_ver] . identifier[name] )) keyword[as] identifier[f] :
keyword[return] identifier[json] . identifier[load] ( identifier[f] ) | def get_stored_metadata(self, temp_ver):
"""
Retrieves the metadata for the given template version from the store
Args:
temp_ver (TemplateVersion): template version to retrieve the
metadata for
Returns:
dict: the metadata of the given template version
"""
with open(self._prefixed('%s.metadata' % temp_ver.name)) as f:
return json.load(f) # depends on [control=['with'], data=['f']] |
def validation_schema(name):
"""Return json schema for json validation."""
schemas = {
'processor': 'processSchema.json',
'descriptor': 'descriptorSchema.json',
'field': 'fieldSchema.json',
'type': 'typeSchema.json',
}
if name not in schemas:
raise ValueError()
field_schema_file = finders.find('flow/{}'.format(schemas['field']), all=True)[0]
with open(field_schema_file, 'r') as fn:
field_schema = fn.read()
if name == 'field':
return json.loads(field_schema.replace('{{PARENT}}', ''))
schema_file = finders.find('flow/{}'.format(schemas[name]), all=True)[0]
with open(schema_file, 'r') as fn:
schema = fn.read()
return json.loads(schema.replace('{{FIELD}}', field_schema).replace('{{PARENT}}', '/field')) | def function[validation_schema, parameter[name]]:
constant[Return json schema for json validation.]
variable[schemas] assign[=] dictionary[[<ast.Constant object at 0x7da1b19b7400>, <ast.Constant object at 0x7da1b19b6e60>, <ast.Constant object at 0x7da1b19b6b90>, <ast.Constant object at 0x7da1b19b7160>], [<ast.Constant object at 0x7da1b19b7b80>, <ast.Constant object at 0x7da1b19b6ef0>, <ast.Constant object at 0x7da1b19b5e70>, <ast.Constant object at 0x7da1b19b7bb0>]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[schemas]] begin[:]
<ast.Raise object at 0x7da1b19b5de0>
variable[field_schema_file] assign[=] call[call[name[finders].find, parameter[call[constant[flow/{}].format, parameter[call[name[schemas]][constant[field]]]]]]][constant[0]]
with call[name[open], parameter[name[field_schema_file], constant[r]]] begin[:]
variable[field_schema] assign[=] call[name[fn].read, parameter[]]
if compare[name[name] equal[==] constant[field]] begin[:]
return[call[name[json].loads, parameter[call[name[field_schema].replace, parameter[constant[{{PARENT}}], constant[]]]]]]
variable[schema_file] assign[=] call[call[name[finders].find, parameter[call[constant[flow/{}].format, parameter[call[name[schemas]][name[name]]]]]]][constant[0]]
with call[name[open], parameter[name[schema_file], constant[r]]] begin[:]
variable[schema] assign[=] call[name[fn].read, parameter[]]
return[call[name[json].loads, parameter[call[call[name[schema].replace, parameter[constant[{{FIELD}}], name[field_schema]]].replace, parameter[constant[{{PARENT}}], constant[/field]]]]]] | keyword[def] identifier[validation_schema] ( identifier[name] ):
literal[string]
identifier[schemas] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
keyword[if] identifier[name] keyword[not] keyword[in] identifier[schemas] :
keyword[raise] identifier[ValueError] ()
identifier[field_schema_file] = identifier[finders] . identifier[find] ( literal[string] . identifier[format] ( identifier[schemas] [ literal[string] ]), identifier[all] = keyword[True] )[ literal[int] ]
keyword[with] identifier[open] ( identifier[field_schema_file] , literal[string] ) keyword[as] identifier[fn] :
identifier[field_schema] = identifier[fn] . identifier[read] ()
keyword[if] identifier[name] == literal[string] :
keyword[return] identifier[json] . identifier[loads] ( identifier[field_schema] . identifier[replace] ( literal[string] , literal[string] ))
identifier[schema_file] = identifier[finders] . identifier[find] ( literal[string] . identifier[format] ( identifier[schemas] [ identifier[name] ]), identifier[all] = keyword[True] )[ literal[int] ]
keyword[with] identifier[open] ( identifier[schema_file] , literal[string] ) keyword[as] identifier[fn] :
identifier[schema] = identifier[fn] . identifier[read] ()
keyword[return] identifier[json] . identifier[loads] ( identifier[schema] . identifier[replace] ( literal[string] , identifier[field_schema] ). identifier[replace] ( literal[string] , literal[string] )) | def validation_schema(name):
"""Return json schema for json validation."""
schemas = {'processor': 'processSchema.json', 'descriptor': 'descriptorSchema.json', 'field': 'fieldSchema.json', 'type': 'typeSchema.json'}
if name not in schemas:
raise ValueError() # depends on [control=['if'], data=[]]
field_schema_file = finders.find('flow/{}'.format(schemas['field']), all=True)[0]
with open(field_schema_file, 'r') as fn:
field_schema = fn.read() # depends on [control=['with'], data=['fn']]
if name == 'field':
return json.loads(field_schema.replace('{{PARENT}}', '')) # depends on [control=['if'], data=[]]
schema_file = finders.find('flow/{}'.format(schemas[name]), all=True)[0]
with open(schema_file, 'r') as fn:
schema = fn.read() # depends on [control=['with'], data=['fn']]
return json.loads(schema.replace('{{FIELD}}', field_schema).replace('{{PARENT}}', '/field')) |
def inverse_distance_to_grid(xp, yp, variable, grid_x, grid_y, r, gamma=None, kappa=None,
min_neighbors=3, kind='cressman'):
r"""Generate an inverse distance interpolation of the given points to a regular grid.
Values are assigned to the given grid using inverse distance weighting based on either
[Cressman1959]_ or [Barnes1964]_. The Barnes implementation used here based on [Koch1983]_.
Parameters
----------
xp: (N, ) ndarray
x-coordinates of observations.
yp: (N, ) ndarray
y-coordinates of observations.
variable: (N, ) ndarray
observation values associated with (xp, yp) pairs.
IE, variable[i] is a unique observation at (xp[i], yp[i]).
grid_x: (M, 2) ndarray
Meshgrid associated with x dimension.
grid_y: (M, 2) ndarray
Meshgrid associated with y dimension.
r: float
Radius from grid center, within which observations
are considered and weighted.
gamma: float
Adjustable smoothing parameter for the barnes interpolation. Default None.
kappa: float
Response parameter for barnes interpolation. Default None.
min_neighbors: int
Minimum number of neighbors needed to perform barnes or cressman interpolation
for a point. Default is 3.
kind: str
Specify what inverse distance weighting interpolation to use.
Options: 'cressman' or 'barnes'. Default 'cressman'
Returns
-------
img: (M, N) ndarray
Interpolated values on a 2-dimensional grid
See Also
--------
inverse_distance_to_points
"""
# Handle grid-to-points conversion, and use function from `interpolation`
points_obs = list(zip(xp, yp))
points_grid = generate_grid_coords(grid_x, grid_y)
img = inverse_distance_to_points(points_obs, variable, points_grid, r, gamma=gamma,
kappa=kappa, min_neighbors=min_neighbors, kind=kind)
return img.reshape(grid_x.shape) | def function[inverse_distance_to_grid, parameter[xp, yp, variable, grid_x, grid_y, r, gamma, kappa, min_neighbors, kind]]:
constant[Generate an inverse distance interpolation of the given points to a regular grid.
Values are assigned to the given grid using inverse distance weighting based on either
[Cressman1959]_ or [Barnes1964]_. The Barnes implementation used here based on [Koch1983]_.
Parameters
----------
xp: (N, ) ndarray
x-coordinates of observations.
yp: (N, ) ndarray
y-coordinates of observations.
variable: (N, ) ndarray
observation values associated with (xp, yp) pairs.
IE, variable[i] is a unique observation at (xp[i], yp[i]).
grid_x: (M, 2) ndarray
Meshgrid associated with x dimension.
grid_y: (M, 2) ndarray
Meshgrid associated with y dimension.
r: float
Radius from grid center, within which observations
are considered and weighted.
gamma: float
Adjustable smoothing parameter for the barnes interpolation. Default None.
kappa: float
Response parameter for barnes interpolation. Default None.
min_neighbors: int
Minimum number of neighbors needed to perform barnes or cressman interpolation
for a point. Default is 3.
kind: str
Specify what inverse distance weighting interpolation to use.
Options: 'cressman' or 'barnes'. Default 'cressman'
Returns
-------
img: (M, N) ndarray
Interpolated values on a 2-dimensional grid
See Also
--------
inverse_distance_to_points
]
variable[points_obs] assign[=] call[name[list], parameter[call[name[zip], parameter[name[xp], name[yp]]]]]
variable[points_grid] assign[=] call[name[generate_grid_coords], parameter[name[grid_x], name[grid_y]]]
variable[img] assign[=] call[name[inverse_distance_to_points], parameter[name[points_obs], name[variable], name[points_grid], name[r]]]
return[call[name[img].reshape, parameter[name[grid_x].shape]]] | keyword[def] identifier[inverse_distance_to_grid] ( identifier[xp] , identifier[yp] , identifier[variable] , identifier[grid_x] , identifier[grid_y] , identifier[r] , identifier[gamma] = keyword[None] , identifier[kappa] = keyword[None] ,
identifier[min_neighbors] = literal[int] , identifier[kind] = literal[string] ):
literal[string]
identifier[points_obs] = identifier[list] ( identifier[zip] ( identifier[xp] , identifier[yp] ))
identifier[points_grid] = identifier[generate_grid_coords] ( identifier[grid_x] , identifier[grid_y] )
identifier[img] = identifier[inverse_distance_to_points] ( identifier[points_obs] , identifier[variable] , identifier[points_grid] , identifier[r] , identifier[gamma] = identifier[gamma] ,
identifier[kappa] = identifier[kappa] , identifier[min_neighbors] = identifier[min_neighbors] , identifier[kind] = identifier[kind] )
keyword[return] identifier[img] . identifier[reshape] ( identifier[grid_x] . identifier[shape] ) | def inverse_distance_to_grid(xp, yp, variable, grid_x, grid_y, r, gamma=None, kappa=None, min_neighbors=3, kind='cressman'):
"""Generate an inverse distance interpolation of the given points to a regular grid.
Values are assigned to the given grid using inverse distance weighting based on either
[Cressman1959]_ or [Barnes1964]_. The Barnes implementation used here based on [Koch1983]_.
Parameters
----------
xp: (N, ) ndarray
x-coordinates of observations.
yp: (N, ) ndarray
y-coordinates of observations.
variable: (N, ) ndarray
observation values associated with (xp, yp) pairs.
IE, variable[i] is a unique observation at (xp[i], yp[i]).
grid_x: (M, 2) ndarray
Meshgrid associated with x dimension.
grid_y: (M, 2) ndarray
Meshgrid associated with y dimension.
r: float
Radius from grid center, within which observations
are considered and weighted.
gamma: float
Adjustable smoothing parameter for the barnes interpolation. Default None.
kappa: float
Response parameter for barnes interpolation. Default None.
min_neighbors: int
Minimum number of neighbors needed to perform barnes or cressman interpolation
for a point. Default is 3.
kind: str
Specify what inverse distance weighting interpolation to use.
Options: 'cressman' or 'barnes'. Default 'cressman'
Returns
-------
img: (M, N) ndarray
Interpolated values on a 2-dimensional grid
See Also
--------
inverse_distance_to_points
"""
# Handle grid-to-points conversion, and use function from `interpolation`
points_obs = list(zip(xp, yp))
points_grid = generate_grid_coords(grid_x, grid_y)
img = inverse_distance_to_points(points_obs, variable, points_grid, r, gamma=gamma, kappa=kappa, min_neighbors=min_neighbors, kind=kind)
return img.reshape(grid_x.shape) |
def load(self):
"""Load table
Keeps only rows with annual average defined (full year data available).
Returns:
pandas.DataFrame: loaded data
"""
df = pd.read_excel(self.input_file, skiprows=11)
df = df.dropna(subset=['Annual'])
return df | def function[load, parameter[self]]:
constant[Load table
Keeps only rows with annual average defined (full year data available).
Returns:
pandas.DataFrame: loaded data
]
variable[df] assign[=] call[name[pd].read_excel, parameter[name[self].input_file]]
variable[df] assign[=] call[name[df].dropna, parameter[]]
return[name[df]] | keyword[def] identifier[load] ( identifier[self] ):
literal[string]
identifier[df] = identifier[pd] . identifier[read_excel] ( identifier[self] . identifier[input_file] , identifier[skiprows] = literal[int] )
identifier[df] = identifier[df] . identifier[dropna] ( identifier[subset] =[ literal[string] ])
keyword[return] identifier[df] | def load(self):
"""Load table
Keeps only rows with annual average defined (full year data available).
Returns:
pandas.DataFrame: loaded data
"""
df = pd.read_excel(self.input_file, skiprows=11)
df = df.dropna(subset=['Annual'])
return df |
def fit_model(ts, sc=None):
"""
Fits an AR(1) + GARCH(1, 1) model to the given time series.
Parameters
----------
ts:
the time series to which we want to fit a AR+GARCH model as a Numpy array
Returns an ARGARCH model
"""
assert sc != None, "Missing SparkContext"
jvm = sc._jvm
jmodel = jvm.com.cloudera.sparkts.models.ARGARCH.fitModel(_py2java(sc, Vectors.dense(ts)))
return ARGARCHModel(jmodel=jmodel, sc=sc) | def function[fit_model, parameter[ts, sc]]:
constant[
Fits an AR(1) + GARCH(1, 1) model to the given time series.
Parameters
----------
ts:
the time series to which we want to fit a AR+GARCH model as a Numpy array
Returns an ARGARCH model
]
assert[compare[name[sc] not_equal[!=] constant[None]]]
variable[jvm] assign[=] name[sc]._jvm
variable[jmodel] assign[=] call[name[jvm].com.cloudera.sparkts.models.ARGARCH.fitModel, parameter[call[name[_py2java], parameter[name[sc], call[name[Vectors].dense, parameter[name[ts]]]]]]]
return[call[name[ARGARCHModel], parameter[]]] | keyword[def] identifier[fit_model] ( identifier[ts] , identifier[sc] = keyword[None] ):
literal[string]
keyword[assert] identifier[sc] != keyword[None] , literal[string]
identifier[jvm] = identifier[sc] . identifier[_jvm]
identifier[jmodel] = identifier[jvm] . identifier[com] . identifier[cloudera] . identifier[sparkts] . identifier[models] . identifier[ARGARCH] . identifier[fitModel] ( identifier[_py2java] ( identifier[sc] , identifier[Vectors] . identifier[dense] ( identifier[ts] )))
keyword[return] identifier[ARGARCHModel] ( identifier[jmodel] = identifier[jmodel] , identifier[sc] = identifier[sc] ) | def fit_model(ts, sc=None):
"""
Fits an AR(1) + GARCH(1, 1) model to the given time series.
Parameters
----------
ts:
the time series to which we want to fit a AR+GARCH model as a Numpy array
Returns an ARGARCH model
"""
assert sc != None, 'Missing SparkContext'
jvm = sc._jvm
jmodel = jvm.com.cloudera.sparkts.models.ARGARCH.fitModel(_py2java(sc, Vectors.dense(ts)))
return ARGARCHModel(jmodel=jmodel, sc=sc) |
def combine_last_two_dimensions(x):
"""Reshape x so that the last two dimension become one.
Args:
x: a Tensor with shape [..., a, b]
Returns:
a Tensor with shape [..., ab]
"""
x_shape = common_layers.shape_list(x)
a, b = x_shape[-2:]
return tf.reshape(x, x_shape[:-2] + [a * b]) | def function[combine_last_two_dimensions, parameter[x]]:
constant[Reshape x so that the last two dimension become one.
Args:
x: a Tensor with shape [..., a, b]
Returns:
a Tensor with shape [..., ab]
]
variable[x_shape] assign[=] call[name[common_layers].shape_list, parameter[name[x]]]
<ast.Tuple object at 0x7da1b1e154b0> assign[=] call[name[x_shape]][<ast.Slice object at 0x7da1b1e16560>]
return[call[name[tf].reshape, parameter[name[x], binary_operation[call[name[x_shape]][<ast.Slice object at 0x7da1b1e17df0>] + list[[<ast.BinOp object at 0x7da1b1e14400>]]]]]] | keyword[def] identifier[combine_last_two_dimensions] ( identifier[x] ):
literal[string]
identifier[x_shape] = identifier[common_layers] . identifier[shape_list] ( identifier[x] )
identifier[a] , identifier[b] = identifier[x_shape] [- literal[int] :]
keyword[return] identifier[tf] . identifier[reshape] ( identifier[x] , identifier[x_shape] [:- literal[int] ]+[ identifier[a] * identifier[b] ]) | def combine_last_two_dimensions(x):
"""Reshape x so that the last two dimension become one.
Args:
x: a Tensor with shape [..., a, b]
Returns:
a Tensor with shape [..., ab]
"""
x_shape = common_layers.shape_list(x)
(a, b) = x_shape[-2:]
return tf.reshape(x, x_shape[:-2] + [a * b]) |
def set_logger(self, logger_name, level=logging.INFO):
"""
Convenience function to quickly configure full debug output
to go to the console.
"""
log = logging.getLogger(logger_name)
log.setLevel(level)
ch = logging.StreamHandler(None)
ch.setLevel(level)
# create formatter
if level == logging.INFO:
formatter = logging.Formatter(InfoFmtString)
else:
formatter = logging.Formatter(DebugFmtString)
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
log.addHandler(ch) | def function[set_logger, parameter[self, logger_name, level]]:
constant[
Convenience function to quickly configure full debug output
to go to the console.
]
variable[log] assign[=] call[name[logging].getLogger, parameter[name[logger_name]]]
call[name[log].setLevel, parameter[name[level]]]
variable[ch] assign[=] call[name[logging].StreamHandler, parameter[constant[None]]]
call[name[ch].setLevel, parameter[name[level]]]
if compare[name[level] equal[==] name[logging].INFO] begin[:]
variable[formatter] assign[=] call[name[logging].Formatter, parameter[name[InfoFmtString]]]
call[name[ch].setFormatter, parameter[name[formatter]]]
call[name[log].addHandler, parameter[name[ch]]] | keyword[def] identifier[set_logger] ( identifier[self] , identifier[logger_name] , identifier[level] = identifier[logging] . identifier[INFO] ):
literal[string]
identifier[log] = identifier[logging] . identifier[getLogger] ( identifier[logger_name] )
identifier[log] . identifier[setLevel] ( identifier[level] )
identifier[ch] = identifier[logging] . identifier[StreamHandler] ( keyword[None] )
identifier[ch] . identifier[setLevel] ( identifier[level] )
keyword[if] identifier[level] == identifier[logging] . identifier[INFO] :
identifier[formatter] = identifier[logging] . identifier[Formatter] ( identifier[InfoFmtString] )
keyword[else] :
identifier[formatter] = identifier[logging] . identifier[Formatter] ( identifier[DebugFmtString] )
identifier[ch] . identifier[setFormatter] ( identifier[formatter] )
identifier[log] . identifier[addHandler] ( identifier[ch] ) | def set_logger(self, logger_name, level=logging.INFO):
"""
Convenience function to quickly configure full debug output
to go to the console.
"""
log = logging.getLogger(logger_name)
log.setLevel(level)
ch = logging.StreamHandler(None)
ch.setLevel(level)
# create formatter
if level == logging.INFO:
formatter = logging.Formatter(InfoFmtString) # depends on [control=['if'], data=[]]
else:
formatter = logging.Formatter(DebugFmtString)
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
log.addHandler(ch) |
def subscribe_topic(self, topics=[], pattern=None):
"""Subscribe to a list of topics, or a topic regex pattern.
- ``topics`` (list): List of topics for subscription.
- ``pattern`` (str): Pattern to match available topics. You must provide either topics or pattern,
but not both.
"""
if not isinstance(topics, list):
topics = [topics]
self.consumer.subscribe(topics, pattern=pattern) | def function[subscribe_topic, parameter[self, topics, pattern]]:
constant[Subscribe to a list of topics, or a topic regex pattern.
- ``topics`` (list): List of topics for subscription.
- ``pattern`` (str): Pattern to match available topics. You must provide either topics or pattern,
but not both.
]
if <ast.UnaryOp object at 0x7da1b0f2e320> begin[:]
variable[topics] assign[=] list[[<ast.Name object at 0x7da1b0f2d630>]]
call[name[self].consumer.subscribe, parameter[name[topics]]] | keyword[def] identifier[subscribe_topic] ( identifier[self] , identifier[topics] =[], identifier[pattern] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[topics] , identifier[list] ):
identifier[topics] =[ identifier[topics] ]
identifier[self] . identifier[consumer] . identifier[subscribe] ( identifier[topics] , identifier[pattern] = identifier[pattern] ) | def subscribe_topic(self, topics=[], pattern=None):
"""Subscribe to a list of topics, or a topic regex pattern.
- ``topics`` (list): List of topics for subscription.
- ``pattern`` (str): Pattern to match available topics. You must provide either topics or pattern,
but not both.
"""
if not isinstance(topics, list):
topics = [topics] # depends on [control=['if'], data=[]]
self.consumer.subscribe(topics, pattern=pattern) |
def get_uri_list(self, **kwargs):
"""
Returns a list of Uris to index
"""
index_status_filter = """
optional {{ ?s dcterm:modified ?modTime }} .
optional {{ ?s kds:esIndexTime ?time }} .
optional {{ ?s kds:esIndexError ?error }}
filter (
!(bound(?time)) ||
?time<?modTime ||
(bound(?error) && ?time < {idx_start_time}))
""".format(idx_start_time=self.idx_start_time.sparql)
items_query_template = """
SELECT DISTINCT ?s ?es_id
{{
VALUES ?rdftypes {{\n\t\t{rdf_types} }} .
?s a ?rdftypes .
BIND(SHA1(STR(?s)) as ?es_id) .
{status_filter}
}}
{order_by}
"""
status_filter = index_status_filter \
if not kwargs.get("no_status") else ""
order_by = kwargs.get("order_by", "")
sparql = items_query_template.format(
rdf_types="\n\t\t".join(self.rdf_types),
status_filter=status_filter,
order_by=order_by)
results = [(Uri(item['s']['value']), item['es_id']['value'],)
for item in self.tstore_conn.query(sparql=sparql)]
return results | def function[get_uri_list, parameter[self]]:
constant[
Returns a list of Uris to index
]
variable[index_status_filter] assign[=] call[constant[
optional {{ ?s dcterm:modified ?modTime }} .
optional {{ ?s kds:esIndexTime ?time }} .
optional {{ ?s kds:esIndexError ?error }}
filter (
!(bound(?time)) ||
?time<?modTime ||
(bound(?error) && ?time < {idx_start_time}))
].format, parameter[]]
variable[items_query_template] assign[=] constant[
SELECT DISTINCT ?s ?es_id
{{
VALUES ?rdftypes {{
{rdf_types} }} .
?s a ?rdftypes .
BIND(SHA1(STR(?s)) as ?es_id) .
{status_filter}
}}
{order_by}
]
variable[status_filter] assign[=] <ast.IfExp object at 0x7da1b1588e50>
variable[order_by] assign[=] call[name[kwargs].get, parameter[constant[order_by], constant[]]]
variable[sparql] assign[=] call[name[items_query_template].format, parameter[]]
variable[results] assign[=] <ast.ListComp object at 0x7da1b15889d0>
return[name[results]] | keyword[def] identifier[get_uri_list] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[index_status_filter] = literal[string] . identifier[format] ( identifier[idx_start_time] = identifier[self] . identifier[idx_start_time] . identifier[sparql] )
identifier[items_query_template] = literal[string]
identifier[status_filter] = identifier[index_status_filter] keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] ) keyword[else] literal[string]
identifier[order_by] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[sparql] = identifier[items_query_template] . identifier[format] (
identifier[rdf_types] = literal[string] . identifier[join] ( identifier[self] . identifier[rdf_types] ),
identifier[status_filter] = identifier[status_filter] ,
identifier[order_by] = identifier[order_by] )
identifier[results] =[( identifier[Uri] ( identifier[item] [ literal[string] ][ literal[string] ]), identifier[item] [ literal[string] ][ literal[string] ],)
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[tstore_conn] . identifier[query] ( identifier[sparql] = identifier[sparql] )]
keyword[return] identifier[results] | def get_uri_list(self, **kwargs):
"""
Returns a list of Uris to index
"""
index_status_filter = '\n optional {{ ?s dcterm:modified ?modTime }} .\n optional {{ ?s kds:esIndexTime ?time }} .\n optional {{ ?s kds:esIndexError ?error }}\n filter (\n !(bound(?time)) ||\n ?time<?modTime ||\n (bound(?error) && ?time < {idx_start_time}))\n '.format(idx_start_time=self.idx_start_time.sparql)
items_query_template = '\n SELECT DISTINCT ?s ?es_id\n {{\n VALUES ?rdftypes {{\n\t\t{rdf_types} }} .\n ?s a ?rdftypes .\n BIND(SHA1(STR(?s)) as ?es_id) .\n {status_filter}\n }}\n {order_by}\n '
status_filter = index_status_filter if not kwargs.get('no_status') else ''
order_by = kwargs.get('order_by', '')
sparql = items_query_template.format(rdf_types='\n\t\t'.join(self.rdf_types), status_filter=status_filter, order_by=order_by)
results = [(Uri(item['s']['value']), item['es_id']['value']) for item in self.tstore_conn.query(sparql=sparql)]
return results |
def _finalize_merge(out_file, bam_files, config):
"""Handle indexes and cleanups of merged BAM and input files.
"""
# Ensure timestamps are up to date on output file and index
# Works around issues on systems with inconsistent times
for ext in ["", ".bai"]:
if os.path.exists(out_file + ext):
subprocess.check_call(["touch", out_file + ext])
for b in bam_files:
utils.save_diskspace(b, "BAM merged to %s" % out_file, config) | def function[_finalize_merge, parameter[out_file, bam_files, config]]:
constant[Handle indexes and cleanups of merged BAM and input files.
]
for taget[name[ext]] in starred[list[[<ast.Constant object at 0x7da1b18bc250>, <ast.Constant object at 0x7da1b18bdae0>]]] begin[:]
if call[name[os].path.exists, parameter[binary_operation[name[out_file] + name[ext]]]] begin[:]
call[name[subprocess].check_call, parameter[list[[<ast.Constant object at 0x7da1b18bef80>, <ast.BinOp object at 0x7da1b18bfe50>]]]]
for taget[name[b]] in starred[name[bam_files]] begin[:]
call[name[utils].save_diskspace, parameter[name[b], binary_operation[constant[BAM merged to %s] <ast.Mod object at 0x7da2590d6920> name[out_file]], name[config]]] | keyword[def] identifier[_finalize_merge] ( identifier[out_file] , identifier[bam_files] , identifier[config] ):
literal[string]
keyword[for] identifier[ext] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[out_file] + identifier[ext] ):
identifier[subprocess] . identifier[check_call] ([ literal[string] , identifier[out_file] + identifier[ext] ])
keyword[for] identifier[b] keyword[in] identifier[bam_files] :
identifier[utils] . identifier[save_diskspace] ( identifier[b] , literal[string] % identifier[out_file] , identifier[config] ) | def _finalize_merge(out_file, bam_files, config):
"""Handle indexes and cleanups of merged BAM and input files.
"""
# Ensure timestamps are up to date on output file and index
# Works around issues on systems with inconsistent times
for ext in ['', '.bai']:
if os.path.exists(out_file + ext):
subprocess.check_call(['touch', out_file + ext]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ext']]
for b in bam_files:
utils.save_diskspace(b, 'BAM merged to %s' % out_file, config) # depends on [control=['for'], data=['b']] |
def cnst_A(self, X, Xf=None):
r"""Compute :math:`A \mathbf{x}` component of ADMM problem
constraint. In this case :math:`A \mathbf{x} = (G_r^T \;\;
G_c^T \;\; H)^T \mathbf{x}`.
"""
if Xf is None:
Xf = sl.rfftn(X, axes=self.axes)
return sl.irfftn(self.GAf*Xf[..., np.newaxis], self.axsz,
axes=self.axes) | def function[cnst_A, parameter[self, X, Xf]]:
constant[Compute :math:`A \mathbf{x}` component of ADMM problem
constraint. In this case :math:`A \mathbf{x} = (G_r^T \;\;
G_c^T \;\; H)^T \mathbf{x}`.
]
if compare[name[Xf] is constant[None]] begin[:]
variable[Xf] assign[=] call[name[sl].rfftn, parameter[name[X]]]
return[call[name[sl].irfftn, parameter[binary_operation[name[self].GAf * call[name[Xf]][tuple[[<ast.Constant object at 0x7da1b074bf70>, <ast.Attribute object at 0x7da1b0749a50>]]]], name[self].axsz]]] | keyword[def] identifier[cnst_A] ( identifier[self] , identifier[X] , identifier[Xf] = keyword[None] ):
literal[string]
keyword[if] identifier[Xf] keyword[is] keyword[None] :
identifier[Xf] = identifier[sl] . identifier[rfftn] ( identifier[X] , identifier[axes] = identifier[self] . identifier[axes] )
keyword[return] identifier[sl] . identifier[irfftn] ( identifier[self] . identifier[GAf] * identifier[Xf] [..., identifier[np] . identifier[newaxis] ], identifier[self] . identifier[axsz] ,
identifier[axes] = identifier[self] . identifier[axes] ) | def cnst_A(self, X, Xf=None):
"""Compute :math:`A \\mathbf{x}` component of ADMM problem
constraint. In this case :math:`A \\mathbf{x} = (G_r^T \\;\\;
G_c^T \\;\\; H)^T \\mathbf{x}`.
"""
if Xf is None:
Xf = sl.rfftn(X, axes=self.axes) # depends on [control=['if'], data=['Xf']]
return sl.irfftn(self.GAf * Xf[..., np.newaxis], self.axsz, axes=self.axes) |
def _construct_update(self, outgoing_route):
"""Construct update message with Outgoing-routes path attribute
appropriately cloned/copied/updated.
"""
update = None
path = outgoing_route.path
# Get copy of path's path attributes.
pathattr_map = path.pathattr_map
new_pathattr = []
if path.is_withdraw:
if isinstance(path, Ipv4Path):
update = BGPUpdate(withdrawn_routes=[path.nlri])
return update
else:
mpunreach_attr = BGPPathAttributeMpUnreachNLRI(
path.route_family.afi, path.route_family.safi, [path.nlri]
)
new_pathattr.append(mpunreach_attr)
elif self.is_route_server_client:
nlri_list = [path.nlri]
new_pathattr.extend(pathattr_map.values())
else:
if self.is_route_reflector_client:
# Append ORIGINATOR_ID attribute if not already exist.
if BGP_ATTR_TYPE_ORIGINATOR_ID not in pathattr_map:
originator_id = path.source
if originator_id is None:
originator_id = self._common_conf.router_id
elif isinstance(path.source, Peer):
originator_id = path.source.ip_address
new_pathattr.append(
BGPPathAttributeOriginatorId(value=originator_id))
# Preppend own CLUSTER_ID into CLUSTER_LIST attribute if exist.
# Otherwise append CLUSTER_LIST attribute.
cluster_lst_attr = pathattr_map.get(BGP_ATTR_TYPE_CLUSTER_LIST)
if cluster_lst_attr:
cluster_list = list(cluster_lst_attr.value)
if self._common_conf.cluster_id not in cluster_list:
cluster_list.insert(0, self._common_conf.cluster_id)
new_pathattr.append(
BGPPathAttributeClusterList(cluster_list))
else:
new_pathattr.append(
BGPPathAttributeClusterList(
[self._common_conf.cluster_id]))
# Supported and un-supported/unknown attributes.
origin_attr = None
nexthop_attr = None
as_path_attr = None
as4_path_attr = None
aggregator_attr = None
as4_aggregator_attr = None
extcomm_attr = None
community_attr = None
localpref_attr = None
pmsi_tunnel_attr = None
unknown_opttrans_attrs = None
nlri_list = [path.nlri]
if path.route_family.safi in (subaddr_family.IP_FLOWSPEC,
subaddr_family.VPN_FLOWSPEC):
# Flow Specification does not have next_hop.
next_hop = []
elif self.is_ebgp_peer():
next_hop = self._session_next_hop(path)
if path.is_local() and path.has_nexthop():
next_hop = path.nexthop
else:
next_hop = path.nexthop
# RFC 4271 allows us to change next_hop
# if configured to announce its own ip address.
# Also if the BGP route is configured without next_hop,
# we use path._session_next_hop() as next_hop.
if (self._neigh_conf.is_next_hop_self
or (path.is_local() and not path.has_nexthop())):
next_hop = self._session_next_hop(path)
LOG.debug('using %s as a next_hop address instead'
' of path.nexthop %s', next_hop, path.nexthop)
nexthop_attr = BGPPathAttributeNextHop(next_hop)
assert nexthop_attr, 'Missing NEXTHOP mandatory attribute.'
if not isinstance(path, Ipv4Path):
# We construct mpreach-nlri attribute.
mpnlri_attr = BGPPathAttributeMpReachNLRI(
path.route_family.afi,
path.route_family.safi,
next_hop,
nlri_list
)
# ORIGIN Attribute.
# According to RFC this attribute value SHOULD NOT be changed by
# any other speaker.
origin_attr = pathattr_map.get(BGP_ATTR_TYPE_ORIGIN)
assert origin_attr, 'Missing ORIGIN mandatory attribute.'
# AS_PATH Attribute.
# Construct AS-path-attr using paths AS_PATH attr. with local AS as
# first item.
path_aspath = pathattr_map.get(BGP_ATTR_TYPE_AS_PATH)
assert path_aspath, 'Missing AS_PATH mandatory attribute.'
# Deep copy AS_PATH attr value
as_path_list = path_aspath.path_seg_list
# If this is a iBGP peer.
if not self.is_ebgp_peer():
# When a given BGP speaker advertises the route to an internal
# peer, the advertising speaker SHALL NOT modify the AS_PATH
# attribute associated with the route.
pass
else:
# When a given BGP speaker advertises the route to an external
# peer, the advertising speaker updates the AS_PATH attribute
# as follows:
# 1) if the first path segment of the AS_PATH is of type
# AS_SEQUENCE, the local system prepends its own AS num as
# the last element of the sequence (put it in the left-most
# position with respect to the position of octets in the
# protocol message). If the act of prepending will cause an
# overflow in the AS_PATH segment (i.e., more than 255
# ASes), it SHOULD prepend a new segment of type AS_SEQUENCE
# and prepend its own AS number to this new segment.
#
# 2) if the first path segment of the AS_PATH is of type AS_SET
# , the local system prepends a new path segment of type
# AS_SEQUENCE to the AS_PATH, including its own AS number in
# that segment.
#
# 3) if the AS_PATH is empty, the local system creates a path
# segment of type AS_SEQUENCE, places its own AS into that
# segment, and places that segment into the AS_PATH.
if (len(as_path_list) > 0 and
isinstance(as_path_list[0], list) and
len(as_path_list[0]) < 255):
as_path_list[0].insert(0, self.local_as)
else:
as_path_list.insert(0, [self.local_as])
# Construct AS4_PATH list from AS_PATH list and swap
# non-mappable AS number with AS_TRANS in AS_PATH.
as_path_list, as4_path_list = self._trans_as_path(
as_path_list)
# If the neighbor supports Four-Octet AS number, send AS_PATH
# in Four-Octet.
if self.is_four_octet_as_number_cap_valid():
as_path_attr = BGPPathAttributeAsPath(
as_path_list, as_pack_str='!I') # specify Four-Octet.
# Otherwise, send AS_PATH in Two-Octet.
else:
as_path_attr = BGPPathAttributeAsPath(as_path_list)
# If needed, send AS4_PATH attribute.
if as4_path_list:
as4_path_attr = BGPPathAttributeAs4Path(as4_path_list)
# AGGREGATOR Attribute.
aggregator_attr = pathattr_map.get(BGP_ATTR_TYPE_AGGREGATOR)
# If the neighbor does not support Four-Octet AS number,
# swap non-mappable AS number with AS_TRANS.
if (aggregator_attr and
not self.is_four_octet_as_number_cap_valid()):
# If AS number of AGGREGATOR is Four-Octet AS number,
# swap with AS_TRANS, else do not.
aggregator_as_number = aggregator_attr.as_number
if not is_valid_old_asn(aggregator_as_number):
aggregator_attr = bgp.BGPPathAttributeAggregator(
bgp.AS_TRANS, aggregator_attr.addr)
as4_aggregator_attr = bgp.BGPPathAttributeAs4Aggregator(
aggregator_as_number, aggregator_attr.addr)
# MULTI_EXIT_DISC Attribute.
# For eBGP session we can send multi-exit-disc if configured.
multi_exit_disc = None
if self.is_ebgp_peer():
if self._neigh_conf.multi_exit_disc:
multi_exit_disc = BGPPathAttributeMultiExitDisc(
self._neigh_conf.multi_exit_disc
)
else:
pass
if not self.is_ebgp_peer():
multi_exit_disc = pathattr_map.get(
BGP_ATTR_TYPE_MULTI_EXIT_DISC)
# LOCAL_PREF Attribute.
if not self.is_ebgp_peer():
# For iBGP peers we are required to send local-pref attribute
# for connected or local prefixes. We check if the path matches
# attribute_maps and set local-pref value.
# If the path doesn't match, we set default local-pref given
# from the user. The default value is 100.
localpref_attr = BGPPathAttributeLocalPref(
self._common_conf.local_pref)
key = const.ATTR_MAPS_LABEL_DEFAULT
if isinstance(path, (Vpnv4Path, Vpnv6Path)):
nlri = nlri_list[0]
rf = VRF_RF_IPV4 if isinstance(path, Vpnv4Path)\
else VRF_RF_IPV6
key = ':'.join([nlri.route_dist, rf])
attr_type = AttributeMap.ATTR_LOCAL_PREF
at_maps = self._attribute_maps.get(key, {})
result = self._lookup_attribute_map(at_maps, attr_type, path)
if result:
localpref_attr = result
# COMMUNITY Attribute.
community_attr = pathattr_map.get(BGP_ATTR_TYPE_COMMUNITIES)
# EXTENDED COMMUNITY Attribute.
# Construct ExtCommunity path-attr based on given.
path_extcomm_attr = pathattr_map.get(
BGP_ATTR_TYPE_EXTENDED_COMMUNITIES
)
if path_extcomm_attr:
# SOO list can be configured per VRF and/or per Neighbor.
# NeighborConf has this setting we add this to existing list.
communities = path_extcomm_attr.communities
if self._neigh_conf.soo_list:
# construct extended community
soo_list = self._neigh_conf.soo_list
subtype = 0x03
for soo in soo_list:
first, second = soo.split(':')
if '.' in first:
c = BGPIPv4AddressSpecificExtendedCommunity(
subtype=subtype,
ipv4_address=first,
local_administrator=int(second))
else:
c = BGPTwoOctetAsSpecificExtendedCommunity(
subtype=subtype,
as_number=int(first),
local_administrator=int(second))
communities.append(c)
extcomm_attr = BGPPathAttributeExtendedCommunities(
communities=communities
)
pmsi_tunnel_attr = pathattr_map.get(
BGP_ATTR_TYEP_PMSI_TUNNEL_ATTRIBUTE
)
# UNKNOWN Attributes.
# Get optional transitive path attributes
unknown_opttrans_attrs = bgp_utils.get_unknown_opttrans_attr(path)
# Ordering path attributes according to type as RFC says. We set
# MPReachNLRI first as advised by experts as a new trend in BGP
# implementation.
if isinstance(path, Ipv4Path):
new_pathattr.append(nexthop_attr)
else:
new_pathattr.append(mpnlri_attr)
new_pathattr.append(origin_attr)
new_pathattr.append(as_path_attr)
if as4_path_attr:
new_pathattr.append(as4_path_attr)
if aggregator_attr:
new_pathattr.append(aggregator_attr)
if as4_aggregator_attr:
new_pathattr.append(as4_aggregator_attr)
if multi_exit_disc:
new_pathattr.append(multi_exit_disc)
if localpref_attr:
new_pathattr.append(localpref_attr)
if community_attr:
new_pathattr.append(community_attr)
if extcomm_attr:
new_pathattr.append(extcomm_attr)
if pmsi_tunnel_attr:
new_pathattr.append(pmsi_tunnel_attr)
if unknown_opttrans_attrs:
new_pathattr.extend(unknown_opttrans_attrs.values())
if isinstance(path, Ipv4Path):
update = BGPUpdate(path_attributes=new_pathattr,
nlri=nlri_list)
else:
update = BGPUpdate(path_attributes=new_pathattr)
return update | def function[_construct_update, parameter[self, outgoing_route]]:
constant[Construct update message with Outgoing-routes path attribute
appropriately cloned/copied/updated.
]
variable[update] assign[=] constant[None]
variable[path] assign[=] name[outgoing_route].path
variable[pathattr_map] assign[=] name[path].pathattr_map
variable[new_pathattr] assign[=] list[[]]
if name[path].is_withdraw begin[:]
if call[name[isinstance], parameter[name[path], name[Ipv4Path]]] begin[:]
variable[update] assign[=] call[name[BGPUpdate], parameter[]]
return[name[update]]
if call[name[isinstance], parameter[name[path], name[Ipv4Path]]] begin[:]
variable[update] assign[=] call[name[BGPUpdate], parameter[]]
return[name[update]] | keyword[def] identifier[_construct_update] ( identifier[self] , identifier[outgoing_route] ):
literal[string]
identifier[update] = keyword[None]
identifier[path] = identifier[outgoing_route] . identifier[path]
identifier[pathattr_map] = identifier[path] . identifier[pathattr_map]
identifier[new_pathattr] =[]
keyword[if] identifier[path] . identifier[is_withdraw] :
keyword[if] identifier[isinstance] ( identifier[path] , identifier[Ipv4Path] ):
identifier[update] = identifier[BGPUpdate] ( identifier[withdrawn_routes] =[ identifier[path] . identifier[nlri] ])
keyword[return] identifier[update]
keyword[else] :
identifier[mpunreach_attr] = identifier[BGPPathAttributeMpUnreachNLRI] (
identifier[path] . identifier[route_family] . identifier[afi] , identifier[path] . identifier[route_family] . identifier[safi] ,[ identifier[path] . identifier[nlri] ]
)
identifier[new_pathattr] . identifier[append] ( identifier[mpunreach_attr] )
keyword[elif] identifier[self] . identifier[is_route_server_client] :
identifier[nlri_list] =[ identifier[path] . identifier[nlri] ]
identifier[new_pathattr] . identifier[extend] ( identifier[pathattr_map] . identifier[values] ())
keyword[else] :
keyword[if] identifier[self] . identifier[is_route_reflector_client] :
keyword[if] identifier[BGP_ATTR_TYPE_ORIGINATOR_ID] keyword[not] keyword[in] identifier[pathattr_map] :
identifier[originator_id] = identifier[path] . identifier[source]
keyword[if] identifier[originator_id] keyword[is] keyword[None] :
identifier[originator_id] = identifier[self] . identifier[_common_conf] . identifier[router_id]
keyword[elif] identifier[isinstance] ( identifier[path] . identifier[source] , identifier[Peer] ):
identifier[originator_id] = identifier[path] . identifier[source] . identifier[ip_address]
identifier[new_pathattr] . identifier[append] (
identifier[BGPPathAttributeOriginatorId] ( identifier[value] = identifier[originator_id] ))
identifier[cluster_lst_attr] = identifier[pathattr_map] . identifier[get] ( identifier[BGP_ATTR_TYPE_CLUSTER_LIST] )
keyword[if] identifier[cluster_lst_attr] :
identifier[cluster_list] = identifier[list] ( identifier[cluster_lst_attr] . identifier[value] )
keyword[if] identifier[self] . identifier[_common_conf] . identifier[cluster_id] keyword[not] keyword[in] identifier[cluster_list] :
identifier[cluster_list] . identifier[insert] ( literal[int] , identifier[self] . identifier[_common_conf] . identifier[cluster_id] )
identifier[new_pathattr] . identifier[append] (
identifier[BGPPathAttributeClusterList] ( identifier[cluster_list] ))
keyword[else] :
identifier[new_pathattr] . identifier[append] (
identifier[BGPPathAttributeClusterList] (
[ identifier[self] . identifier[_common_conf] . identifier[cluster_id] ]))
identifier[origin_attr] = keyword[None]
identifier[nexthop_attr] = keyword[None]
identifier[as_path_attr] = keyword[None]
identifier[as4_path_attr] = keyword[None]
identifier[aggregator_attr] = keyword[None]
identifier[as4_aggregator_attr] = keyword[None]
identifier[extcomm_attr] = keyword[None]
identifier[community_attr] = keyword[None]
identifier[localpref_attr] = keyword[None]
identifier[pmsi_tunnel_attr] = keyword[None]
identifier[unknown_opttrans_attrs] = keyword[None]
identifier[nlri_list] =[ identifier[path] . identifier[nlri] ]
keyword[if] identifier[path] . identifier[route_family] . identifier[safi] keyword[in] ( identifier[subaddr_family] . identifier[IP_FLOWSPEC] ,
identifier[subaddr_family] . identifier[VPN_FLOWSPEC] ):
identifier[next_hop] =[]
keyword[elif] identifier[self] . identifier[is_ebgp_peer] ():
identifier[next_hop] = identifier[self] . identifier[_session_next_hop] ( identifier[path] )
keyword[if] identifier[path] . identifier[is_local] () keyword[and] identifier[path] . identifier[has_nexthop] ():
identifier[next_hop] = identifier[path] . identifier[nexthop]
keyword[else] :
identifier[next_hop] = identifier[path] . identifier[nexthop]
keyword[if] ( identifier[self] . identifier[_neigh_conf] . identifier[is_next_hop_self]
keyword[or] ( identifier[path] . identifier[is_local] () keyword[and] keyword[not] identifier[path] . identifier[has_nexthop] ())):
identifier[next_hop] = identifier[self] . identifier[_session_next_hop] ( identifier[path] )
identifier[LOG] . identifier[debug] ( literal[string]
literal[string] , identifier[next_hop] , identifier[path] . identifier[nexthop] )
identifier[nexthop_attr] = identifier[BGPPathAttributeNextHop] ( identifier[next_hop] )
keyword[assert] identifier[nexthop_attr] , literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[path] , identifier[Ipv4Path] ):
identifier[mpnlri_attr] = identifier[BGPPathAttributeMpReachNLRI] (
identifier[path] . identifier[route_family] . identifier[afi] ,
identifier[path] . identifier[route_family] . identifier[safi] ,
identifier[next_hop] ,
identifier[nlri_list]
)
identifier[origin_attr] = identifier[pathattr_map] . identifier[get] ( identifier[BGP_ATTR_TYPE_ORIGIN] )
keyword[assert] identifier[origin_attr] , literal[string]
identifier[path_aspath] = identifier[pathattr_map] . identifier[get] ( identifier[BGP_ATTR_TYPE_AS_PATH] )
keyword[assert] identifier[path_aspath] , literal[string]
identifier[as_path_list] = identifier[path_aspath] . identifier[path_seg_list]
keyword[if] keyword[not] identifier[self] . identifier[is_ebgp_peer] ():
keyword[pass]
keyword[else] :
keyword[if] ( identifier[len] ( identifier[as_path_list] )> literal[int] keyword[and]
identifier[isinstance] ( identifier[as_path_list] [ literal[int] ], identifier[list] ) keyword[and]
identifier[len] ( identifier[as_path_list] [ literal[int] ])< literal[int] ):
identifier[as_path_list] [ literal[int] ]. identifier[insert] ( literal[int] , identifier[self] . identifier[local_as] )
keyword[else] :
identifier[as_path_list] . identifier[insert] ( literal[int] ,[ identifier[self] . identifier[local_as] ])
identifier[as_path_list] , identifier[as4_path_list] = identifier[self] . identifier[_trans_as_path] (
identifier[as_path_list] )
keyword[if] identifier[self] . identifier[is_four_octet_as_number_cap_valid] ():
identifier[as_path_attr] = identifier[BGPPathAttributeAsPath] (
identifier[as_path_list] , identifier[as_pack_str] = literal[string] )
keyword[else] :
identifier[as_path_attr] = identifier[BGPPathAttributeAsPath] ( identifier[as_path_list] )
keyword[if] identifier[as4_path_list] :
identifier[as4_path_attr] = identifier[BGPPathAttributeAs4Path] ( identifier[as4_path_list] )
identifier[aggregator_attr] = identifier[pathattr_map] . identifier[get] ( identifier[BGP_ATTR_TYPE_AGGREGATOR] )
keyword[if] ( identifier[aggregator_attr] keyword[and]
keyword[not] identifier[self] . identifier[is_four_octet_as_number_cap_valid] ()):
identifier[aggregator_as_number] = identifier[aggregator_attr] . identifier[as_number]
keyword[if] keyword[not] identifier[is_valid_old_asn] ( identifier[aggregator_as_number] ):
identifier[aggregator_attr] = identifier[bgp] . identifier[BGPPathAttributeAggregator] (
identifier[bgp] . identifier[AS_TRANS] , identifier[aggregator_attr] . identifier[addr] )
identifier[as4_aggregator_attr] = identifier[bgp] . identifier[BGPPathAttributeAs4Aggregator] (
identifier[aggregator_as_number] , identifier[aggregator_attr] . identifier[addr] )
identifier[multi_exit_disc] = keyword[None]
keyword[if] identifier[self] . identifier[is_ebgp_peer] ():
keyword[if] identifier[self] . identifier[_neigh_conf] . identifier[multi_exit_disc] :
identifier[multi_exit_disc] = identifier[BGPPathAttributeMultiExitDisc] (
identifier[self] . identifier[_neigh_conf] . identifier[multi_exit_disc]
)
keyword[else] :
keyword[pass]
keyword[if] keyword[not] identifier[self] . identifier[is_ebgp_peer] ():
identifier[multi_exit_disc] = identifier[pathattr_map] . identifier[get] (
identifier[BGP_ATTR_TYPE_MULTI_EXIT_DISC] )
keyword[if] keyword[not] identifier[self] . identifier[is_ebgp_peer] ():
identifier[localpref_attr] = identifier[BGPPathAttributeLocalPref] (
identifier[self] . identifier[_common_conf] . identifier[local_pref] )
identifier[key] = identifier[const] . identifier[ATTR_MAPS_LABEL_DEFAULT]
keyword[if] identifier[isinstance] ( identifier[path] ,( identifier[Vpnv4Path] , identifier[Vpnv6Path] )):
identifier[nlri] = identifier[nlri_list] [ literal[int] ]
identifier[rf] = identifier[VRF_RF_IPV4] keyword[if] identifier[isinstance] ( identifier[path] , identifier[Vpnv4Path] ) keyword[else] identifier[VRF_RF_IPV6]
identifier[key] = literal[string] . identifier[join] ([ identifier[nlri] . identifier[route_dist] , identifier[rf] ])
identifier[attr_type] = identifier[AttributeMap] . identifier[ATTR_LOCAL_PREF]
identifier[at_maps] = identifier[self] . identifier[_attribute_maps] . identifier[get] ( identifier[key] ,{})
identifier[result] = identifier[self] . identifier[_lookup_attribute_map] ( identifier[at_maps] , identifier[attr_type] , identifier[path] )
keyword[if] identifier[result] :
identifier[localpref_attr] = identifier[result]
identifier[community_attr] = identifier[pathattr_map] . identifier[get] ( identifier[BGP_ATTR_TYPE_COMMUNITIES] )
identifier[path_extcomm_attr] = identifier[pathattr_map] . identifier[get] (
identifier[BGP_ATTR_TYPE_EXTENDED_COMMUNITIES]
)
keyword[if] identifier[path_extcomm_attr] :
identifier[communities] = identifier[path_extcomm_attr] . identifier[communities]
keyword[if] identifier[self] . identifier[_neigh_conf] . identifier[soo_list] :
identifier[soo_list] = identifier[self] . identifier[_neigh_conf] . identifier[soo_list]
identifier[subtype] = literal[int]
keyword[for] identifier[soo] keyword[in] identifier[soo_list] :
identifier[first] , identifier[second] = identifier[soo] . identifier[split] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[first] :
identifier[c] = identifier[BGPIPv4AddressSpecificExtendedCommunity] (
identifier[subtype] = identifier[subtype] ,
identifier[ipv4_address] = identifier[first] ,
identifier[local_administrator] = identifier[int] ( identifier[second] ))
keyword[else] :
identifier[c] = identifier[BGPTwoOctetAsSpecificExtendedCommunity] (
identifier[subtype] = identifier[subtype] ,
identifier[as_number] = identifier[int] ( identifier[first] ),
identifier[local_administrator] = identifier[int] ( identifier[second] ))
identifier[communities] . identifier[append] ( identifier[c] )
identifier[extcomm_attr] = identifier[BGPPathAttributeExtendedCommunities] (
identifier[communities] = identifier[communities]
)
identifier[pmsi_tunnel_attr] = identifier[pathattr_map] . identifier[get] (
identifier[BGP_ATTR_TYEP_PMSI_TUNNEL_ATTRIBUTE]
)
identifier[unknown_opttrans_attrs] = identifier[bgp_utils] . identifier[get_unknown_opttrans_attr] ( identifier[path] )
keyword[if] identifier[isinstance] ( identifier[path] , identifier[Ipv4Path] ):
identifier[new_pathattr] . identifier[append] ( identifier[nexthop_attr] )
keyword[else] :
identifier[new_pathattr] . identifier[append] ( identifier[mpnlri_attr] )
identifier[new_pathattr] . identifier[append] ( identifier[origin_attr] )
identifier[new_pathattr] . identifier[append] ( identifier[as_path_attr] )
keyword[if] identifier[as4_path_attr] :
identifier[new_pathattr] . identifier[append] ( identifier[as4_path_attr] )
keyword[if] identifier[aggregator_attr] :
identifier[new_pathattr] . identifier[append] ( identifier[aggregator_attr] )
keyword[if] identifier[as4_aggregator_attr] :
identifier[new_pathattr] . identifier[append] ( identifier[as4_aggregator_attr] )
keyword[if] identifier[multi_exit_disc] :
identifier[new_pathattr] . identifier[append] ( identifier[multi_exit_disc] )
keyword[if] identifier[localpref_attr] :
identifier[new_pathattr] . identifier[append] ( identifier[localpref_attr] )
keyword[if] identifier[community_attr] :
identifier[new_pathattr] . identifier[append] ( identifier[community_attr] )
keyword[if] identifier[extcomm_attr] :
identifier[new_pathattr] . identifier[append] ( identifier[extcomm_attr] )
keyword[if] identifier[pmsi_tunnel_attr] :
identifier[new_pathattr] . identifier[append] ( identifier[pmsi_tunnel_attr] )
keyword[if] identifier[unknown_opttrans_attrs] :
identifier[new_pathattr] . identifier[extend] ( identifier[unknown_opttrans_attrs] . identifier[values] ())
keyword[if] identifier[isinstance] ( identifier[path] , identifier[Ipv4Path] ):
identifier[update] = identifier[BGPUpdate] ( identifier[path_attributes] = identifier[new_pathattr] ,
identifier[nlri] = identifier[nlri_list] )
keyword[else] :
identifier[update] = identifier[BGPUpdate] ( identifier[path_attributes] = identifier[new_pathattr] )
keyword[return] identifier[update] | def _construct_update(self, outgoing_route):
"""Construct update message with Outgoing-routes path attribute
appropriately cloned/copied/updated.
"""
update = None
path = outgoing_route.path
# Get copy of path's path attributes.
pathattr_map = path.pathattr_map
new_pathattr = []
if path.is_withdraw:
if isinstance(path, Ipv4Path):
update = BGPUpdate(withdrawn_routes=[path.nlri])
return update # depends on [control=['if'], data=[]]
else:
mpunreach_attr = BGPPathAttributeMpUnreachNLRI(path.route_family.afi, path.route_family.safi, [path.nlri])
new_pathattr.append(mpunreach_attr) # depends on [control=['if'], data=[]]
elif self.is_route_server_client:
nlri_list = [path.nlri]
new_pathattr.extend(pathattr_map.values()) # depends on [control=['if'], data=[]]
else:
if self.is_route_reflector_client:
# Append ORIGINATOR_ID attribute if not already exist.
if BGP_ATTR_TYPE_ORIGINATOR_ID not in pathattr_map:
originator_id = path.source
if originator_id is None:
originator_id = self._common_conf.router_id # depends on [control=['if'], data=['originator_id']]
elif isinstance(path.source, Peer):
originator_id = path.source.ip_address # depends on [control=['if'], data=[]]
new_pathattr.append(BGPPathAttributeOriginatorId(value=originator_id)) # depends on [control=['if'], data=[]]
# Preppend own CLUSTER_ID into CLUSTER_LIST attribute if exist.
# Otherwise append CLUSTER_LIST attribute.
cluster_lst_attr = pathattr_map.get(BGP_ATTR_TYPE_CLUSTER_LIST)
if cluster_lst_attr:
cluster_list = list(cluster_lst_attr.value)
if self._common_conf.cluster_id not in cluster_list:
cluster_list.insert(0, self._common_conf.cluster_id) # depends on [control=['if'], data=['cluster_list']]
new_pathattr.append(BGPPathAttributeClusterList(cluster_list)) # depends on [control=['if'], data=[]]
else:
new_pathattr.append(BGPPathAttributeClusterList([self._common_conf.cluster_id])) # depends on [control=['if'], data=[]]
# Supported and un-supported/unknown attributes.
origin_attr = None
nexthop_attr = None
as_path_attr = None
as4_path_attr = None
aggregator_attr = None
as4_aggregator_attr = None
extcomm_attr = None
community_attr = None
localpref_attr = None
pmsi_tunnel_attr = None
unknown_opttrans_attrs = None
nlri_list = [path.nlri]
if path.route_family.safi in (subaddr_family.IP_FLOWSPEC, subaddr_family.VPN_FLOWSPEC):
# Flow Specification does not have next_hop.
next_hop = [] # depends on [control=['if'], data=[]]
elif self.is_ebgp_peer():
next_hop = self._session_next_hop(path)
if path.is_local() and path.has_nexthop():
next_hop = path.nexthop # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
next_hop = path.nexthop
# RFC 4271 allows us to change next_hop
# if configured to announce its own ip address.
# Also if the BGP route is configured without next_hop,
# we use path._session_next_hop() as next_hop.
if self._neigh_conf.is_next_hop_self or (path.is_local() and (not path.has_nexthop())):
next_hop = self._session_next_hop(path)
LOG.debug('using %s as a next_hop address instead of path.nexthop %s', next_hop, path.nexthop) # depends on [control=['if'], data=[]]
nexthop_attr = BGPPathAttributeNextHop(next_hop)
assert nexthop_attr, 'Missing NEXTHOP mandatory attribute.'
if not isinstance(path, Ipv4Path):
# We construct mpreach-nlri attribute.
mpnlri_attr = BGPPathAttributeMpReachNLRI(path.route_family.afi, path.route_family.safi, next_hop, nlri_list) # depends on [control=['if'], data=[]]
# ORIGIN Attribute.
# According to RFC this attribute value SHOULD NOT be changed by
# any other speaker.
origin_attr = pathattr_map.get(BGP_ATTR_TYPE_ORIGIN)
assert origin_attr, 'Missing ORIGIN mandatory attribute.'
# AS_PATH Attribute.
# Construct AS-path-attr using paths AS_PATH attr. with local AS as
# first item.
path_aspath = pathattr_map.get(BGP_ATTR_TYPE_AS_PATH)
assert path_aspath, 'Missing AS_PATH mandatory attribute.'
# Deep copy AS_PATH attr value
as_path_list = path_aspath.path_seg_list
# If this is a iBGP peer.
if not self.is_ebgp_peer():
# When a given BGP speaker advertises the route to an internal
# peer, the advertising speaker SHALL NOT modify the AS_PATH
# attribute associated with the route.
pass # depends on [control=['if'], data=[]]
# When a given BGP speaker advertises the route to an external
# peer, the advertising speaker updates the AS_PATH attribute
# as follows:
# 1) if the first path segment of the AS_PATH is of type
# AS_SEQUENCE, the local system prepends its own AS num as
# the last element of the sequence (put it in the left-most
# position with respect to the position of octets in the
# protocol message). If the act of prepending will cause an
# overflow in the AS_PATH segment (i.e., more than 255
# ASes), it SHOULD prepend a new segment of type AS_SEQUENCE
# and prepend its own AS number to this new segment.
#
# 2) if the first path segment of the AS_PATH is of type AS_SET
# , the local system prepends a new path segment of type
# AS_SEQUENCE to the AS_PATH, including its own AS number in
# that segment.
#
# 3) if the AS_PATH is empty, the local system creates a path
# segment of type AS_SEQUENCE, places its own AS into that
# segment, and places that segment into the AS_PATH.
elif len(as_path_list) > 0 and isinstance(as_path_list[0], list) and (len(as_path_list[0]) < 255):
as_path_list[0].insert(0, self.local_as) # depends on [control=['if'], data=[]]
else:
as_path_list.insert(0, [self.local_as])
# Construct AS4_PATH list from AS_PATH list and swap
# non-mappable AS number with AS_TRANS in AS_PATH.
(as_path_list, as4_path_list) = self._trans_as_path(as_path_list)
# If the neighbor supports Four-Octet AS number, send AS_PATH
# in Four-Octet.
if self.is_four_octet_as_number_cap_valid():
as_path_attr = BGPPathAttributeAsPath(as_path_list, as_pack_str='!I') # specify Four-Octet. # depends on [control=['if'], data=[]]
else:
# Otherwise, send AS_PATH in Two-Octet.
as_path_attr = BGPPathAttributeAsPath(as_path_list)
# If needed, send AS4_PATH attribute.
if as4_path_list:
as4_path_attr = BGPPathAttributeAs4Path(as4_path_list) # depends on [control=['if'], data=[]]
# AGGREGATOR Attribute.
aggregator_attr = pathattr_map.get(BGP_ATTR_TYPE_AGGREGATOR)
# If the neighbor does not support Four-Octet AS number,
# swap non-mappable AS number with AS_TRANS.
if aggregator_attr and (not self.is_four_octet_as_number_cap_valid()):
# If AS number of AGGREGATOR is Four-Octet AS number,
# swap with AS_TRANS, else do not.
aggregator_as_number = aggregator_attr.as_number
if not is_valid_old_asn(aggregator_as_number):
aggregator_attr = bgp.BGPPathAttributeAggregator(bgp.AS_TRANS, aggregator_attr.addr)
as4_aggregator_attr = bgp.BGPPathAttributeAs4Aggregator(aggregator_as_number, aggregator_attr.addr) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# MULTI_EXIT_DISC Attribute.
# For eBGP session we can send multi-exit-disc if configured.
multi_exit_disc = None
if self.is_ebgp_peer():
if self._neigh_conf.multi_exit_disc:
multi_exit_disc = BGPPathAttributeMultiExitDisc(self._neigh_conf.multi_exit_disc) # depends on [control=['if'], data=[]]
else:
pass # depends on [control=['if'], data=[]]
if not self.is_ebgp_peer():
multi_exit_disc = pathattr_map.get(BGP_ATTR_TYPE_MULTI_EXIT_DISC) # depends on [control=['if'], data=[]]
# LOCAL_PREF Attribute.
if not self.is_ebgp_peer():
# For iBGP peers we are required to send local-pref attribute
# for connected or local prefixes. We check if the path matches
# attribute_maps and set local-pref value.
# If the path doesn't match, we set default local-pref given
# from the user. The default value is 100.
localpref_attr = BGPPathAttributeLocalPref(self._common_conf.local_pref)
key = const.ATTR_MAPS_LABEL_DEFAULT
if isinstance(path, (Vpnv4Path, Vpnv6Path)):
nlri = nlri_list[0]
rf = VRF_RF_IPV4 if isinstance(path, Vpnv4Path) else VRF_RF_IPV6
key = ':'.join([nlri.route_dist, rf]) # depends on [control=['if'], data=[]]
attr_type = AttributeMap.ATTR_LOCAL_PREF
at_maps = self._attribute_maps.get(key, {})
result = self._lookup_attribute_map(at_maps, attr_type, path)
if result:
localpref_attr = result # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# COMMUNITY Attribute.
community_attr = pathattr_map.get(BGP_ATTR_TYPE_COMMUNITIES)
# EXTENDED COMMUNITY Attribute.
# Construct ExtCommunity path-attr based on given.
path_extcomm_attr = pathattr_map.get(BGP_ATTR_TYPE_EXTENDED_COMMUNITIES)
if path_extcomm_attr:
# SOO list can be configured per VRF and/or per Neighbor.
# NeighborConf has this setting we add this to existing list.
communities = path_extcomm_attr.communities
if self._neigh_conf.soo_list:
# construct extended community
soo_list = self._neigh_conf.soo_list
subtype = 3
for soo in soo_list:
(first, second) = soo.split(':')
if '.' in first:
c = BGPIPv4AddressSpecificExtendedCommunity(subtype=subtype, ipv4_address=first, local_administrator=int(second)) # depends on [control=['if'], data=['first']]
else:
c = BGPTwoOctetAsSpecificExtendedCommunity(subtype=subtype, as_number=int(first), local_administrator=int(second))
communities.append(c) # depends on [control=['for'], data=['soo']] # depends on [control=['if'], data=[]]
extcomm_attr = BGPPathAttributeExtendedCommunities(communities=communities)
pmsi_tunnel_attr = pathattr_map.get(BGP_ATTR_TYEP_PMSI_TUNNEL_ATTRIBUTE) # depends on [control=['if'], data=[]]
# UNKNOWN Attributes.
# Get optional transitive path attributes
unknown_opttrans_attrs = bgp_utils.get_unknown_opttrans_attr(path)
# Ordering path attributes according to type as RFC says. We set
# MPReachNLRI first as advised by experts as a new trend in BGP
# implementation.
if isinstance(path, Ipv4Path):
new_pathattr.append(nexthop_attr) # depends on [control=['if'], data=[]]
else:
new_pathattr.append(mpnlri_attr)
new_pathattr.append(origin_attr)
new_pathattr.append(as_path_attr)
if as4_path_attr:
new_pathattr.append(as4_path_attr) # depends on [control=['if'], data=[]]
if aggregator_attr:
new_pathattr.append(aggregator_attr) # depends on [control=['if'], data=[]]
if as4_aggregator_attr:
new_pathattr.append(as4_aggregator_attr) # depends on [control=['if'], data=[]]
if multi_exit_disc:
new_pathattr.append(multi_exit_disc) # depends on [control=['if'], data=[]]
if localpref_attr:
new_pathattr.append(localpref_attr) # depends on [control=['if'], data=[]]
if community_attr:
new_pathattr.append(community_attr) # depends on [control=['if'], data=[]]
if extcomm_attr:
new_pathattr.append(extcomm_attr) # depends on [control=['if'], data=[]]
if pmsi_tunnel_attr:
new_pathattr.append(pmsi_tunnel_attr) # depends on [control=['if'], data=[]]
if unknown_opttrans_attrs:
new_pathattr.extend(unknown_opttrans_attrs.values()) # depends on [control=['if'], data=[]]
if isinstance(path, Ipv4Path):
update = BGPUpdate(path_attributes=new_pathattr, nlri=nlri_list) # depends on [control=['if'], data=[]]
else:
update = BGPUpdate(path_attributes=new_pathattr)
return update |
def format_currency_field(__, prec, number, locale):
"""Formats a currency field."""
locale = Locale.parse(locale)
currency = get_territory_currencies(locale.territory)[0]
if prec is None:
pattern, currency_digits = None, True
else:
prec = int(prec)
pattern = locale.currency_formats['standard']
pattern = modify_number_pattern(pattern, frac_prec=(prec, prec))
currency_digits = False
return format_currency(number, currency, pattern, locale=locale,
currency_digits=currency_digits) | def function[format_currency_field, parameter[__, prec, number, locale]]:
constant[Formats a currency field.]
variable[locale] assign[=] call[name[Locale].parse, parameter[name[locale]]]
variable[currency] assign[=] call[call[name[get_territory_currencies], parameter[name[locale].territory]]][constant[0]]
if compare[name[prec] is constant[None]] begin[:]
<ast.Tuple object at 0x7da18c4ccaf0> assign[=] tuple[[<ast.Constant object at 0x7da18c4cc850>, <ast.Constant object at 0x7da18c4cd6f0>]]
return[call[name[format_currency], parameter[name[number], name[currency], name[pattern]]]] | keyword[def] identifier[format_currency_field] ( identifier[__] , identifier[prec] , identifier[number] , identifier[locale] ):
literal[string]
identifier[locale] = identifier[Locale] . identifier[parse] ( identifier[locale] )
identifier[currency] = identifier[get_territory_currencies] ( identifier[locale] . identifier[territory] )[ literal[int] ]
keyword[if] identifier[prec] keyword[is] keyword[None] :
identifier[pattern] , identifier[currency_digits] = keyword[None] , keyword[True]
keyword[else] :
identifier[prec] = identifier[int] ( identifier[prec] )
identifier[pattern] = identifier[locale] . identifier[currency_formats] [ literal[string] ]
identifier[pattern] = identifier[modify_number_pattern] ( identifier[pattern] , identifier[frac_prec] =( identifier[prec] , identifier[prec] ))
identifier[currency_digits] = keyword[False]
keyword[return] identifier[format_currency] ( identifier[number] , identifier[currency] , identifier[pattern] , identifier[locale] = identifier[locale] ,
identifier[currency_digits] = identifier[currency_digits] ) | def format_currency_field(__, prec, number, locale):
"""Formats a currency field."""
locale = Locale.parse(locale)
currency = get_territory_currencies(locale.territory)[0]
if prec is None:
(pattern, currency_digits) = (None, True) # depends on [control=['if'], data=[]]
else:
prec = int(prec)
pattern = locale.currency_formats['standard']
pattern = modify_number_pattern(pattern, frac_prec=(prec, prec))
currency_digits = False
return format_currency(number, currency, pattern, locale=locale, currency_digits=currency_digits) |
def get_parameter(self):
"""Obtain list parameter object from the current widget state.
:returns: A DefaultValueParameter from the current state of widget
:rtype: DefaultValueParameter
"""
radio_button_checked_id = self.input_button_group.checkedId()
# No radio button checked, then default value = None
if radio_button_checked_id == -1:
self._parameter.value = None
# The last radio button (custom) is checked, get the value from the
# line edit
elif radio_button_checked_id == len(self._parameter.options) - 1:
self._parameter.options[radio_button_checked_id] = \
self.custom_value.value()
self._parameter.value = self.custom_value.value()
else:
self._parameter.value = self._parameter.options[
radio_button_checked_id]
return self._parameter | def function[get_parameter, parameter[self]]:
constant[Obtain list parameter object from the current widget state.
:returns: A DefaultValueParameter from the current state of widget
:rtype: DefaultValueParameter
]
variable[radio_button_checked_id] assign[=] call[name[self].input_button_group.checkedId, parameter[]]
if compare[name[radio_button_checked_id] equal[==] <ast.UnaryOp object at 0x7da18ede5c90>] begin[:]
name[self]._parameter.value assign[=] constant[None]
return[name[self]._parameter] | keyword[def] identifier[get_parameter] ( identifier[self] ):
literal[string]
identifier[radio_button_checked_id] = identifier[self] . identifier[input_button_group] . identifier[checkedId] ()
keyword[if] identifier[radio_button_checked_id] ==- literal[int] :
identifier[self] . identifier[_parameter] . identifier[value] = keyword[None]
keyword[elif] identifier[radio_button_checked_id] == identifier[len] ( identifier[self] . identifier[_parameter] . identifier[options] )- literal[int] :
identifier[self] . identifier[_parameter] . identifier[options] [ identifier[radio_button_checked_id] ]= identifier[self] . identifier[custom_value] . identifier[value] ()
identifier[self] . identifier[_parameter] . identifier[value] = identifier[self] . identifier[custom_value] . identifier[value] ()
keyword[else] :
identifier[self] . identifier[_parameter] . identifier[value] = identifier[self] . identifier[_parameter] . identifier[options] [
identifier[radio_button_checked_id] ]
keyword[return] identifier[self] . identifier[_parameter] | def get_parameter(self):
"""Obtain list parameter object from the current widget state.
:returns: A DefaultValueParameter from the current state of widget
:rtype: DefaultValueParameter
"""
radio_button_checked_id = self.input_button_group.checkedId()
# No radio button checked, then default value = None
if radio_button_checked_id == -1:
self._parameter.value = None # depends on [control=['if'], data=[]]
# The last radio button (custom) is checked, get the value from the
# line edit
elif radio_button_checked_id == len(self._parameter.options) - 1:
self._parameter.options[radio_button_checked_id] = self.custom_value.value()
self._parameter.value = self.custom_value.value() # depends on [control=['if'], data=['radio_button_checked_id']]
else:
self._parameter.value = self._parameter.options[radio_button_checked_id]
return self._parameter |
def long_press(self, on_element):
"""
Long press on an element.
:Args:
- on_element: The element to long press.
"""
self._actions.append(lambda: self._driver.execute(
Command.LONG_PRESS, {'element': on_element.id}))
return self | def function[long_press, parameter[self, on_element]]:
constant[
Long press on an element.
:Args:
- on_element: The element to long press.
]
call[name[self]._actions.append, parameter[<ast.Lambda object at 0x7da1b2029a50>]]
return[name[self]] | keyword[def] identifier[long_press] ( identifier[self] , identifier[on_element] ):
literal[string]
identifier[self] . identifier[_actions] . identifier[append] ( keyword[lambda] : identifier[self] . identifier[_driver] . identifier[execute] (
identifier[Command] . identifier[LONG_PRESS] ,{ literal[string] : identifier[on_element] . identifier[id] }))
keyword[return] identifier[self] | def long_press(self, on_element):
"""
Long press on an element.
:Args:
- on_element: The element to long press.
"""
self._actions.append(lambda : self._driver.execute(Command.LONG_PRESS, {'element': on_element.id}))
return self |
def _make_leading_paths(self, src, mode=0o700):
"""Create leading path components
The standard python `os.makedirs` is insufficient for our
needs: it will only create directories, and ignores the fact
that some path components may be symbolic links.
:param src: The source path in the host file system for which
leading components should be created, or the path
to an sos_* virtual directory inside the archive.
Host paths must be absolute (initial '/'), and
sos_* directory paths must be a path relative to
the root of the archive.
:param mode: An optional mode to be used when creating path
components.
:returns: A rewritten destination path in the case that one
or more symbolic links in intermediate components
of the path have altered the path destination.
"""
self.log_debug("Making leading paths for %s" % src)
root = self._archive_root
dest = src
def in_archive(path):
"""Test whether path ``path`` is inside the archive.
"""
return path.startswith(os.path.join(root, ""))
if not src.startswith("/"):
# Sos archive path (sos_commands, sos_logs etc.)
src_dir = src
else:
# Host file path
src_dir = src if os.path.isdir(src) else os.path.split(src)[0]
# Build a list of path components in root-to-leaf order.
path = src_dir
path_comps = []
while path != '/' and path != '':
head, tail = os.path.split(path)
path_comps.append(tail)
path = head
path_comps.reverse()
abs_path = root
src_path = "/"
# Check and create components as needed
for comp in path_comps:
abs_path = os.path.join(abs_path, comp)
# Do not create components that are above the archive root.
if not in_archive(abs_path):
continue
src_path = os.path.join(src_path, comp)
if not os.path.exists(abs_path):
self.log_debug("Making path %s" % abs_path)
if os.path.islink(src_path) and os.path.isdir(src_path):
target = os.readlink(src_path)
# The directory containing the source in the host fs,
# adjusted for the current level of path creation.
target_dir = os.path.split(src_path)[0]
# The source path of the target in the host fs to be
# recursively copied.
target_src = os.path.join(target_dir, target)
# Recursively create leading components of target
dest = self._make_leading_paths(target_src, mode=mode)
dest = os.path.normpath(dest)
self.log_debug("Making symlink '%s' -> '%s'" %
(abs_path, target))
os.symlink(target, abs_path)
else:
self.log_debug("Making directory %s" % abs_path)
os.mkdir(abs_path, mode)
dest = src_path
return dest | def function[_make_leading_paths, parameter[self, src, mode]]:
constant[Create leading path components
The standard python `os.makedirs` is insufficient for our
needs: it will only create directories, and ignores the fact
that some path components may be symbolic links.
:param src: The source path in the host file system for which
leading components should be created, or the path
to an sos_* virtual directory inside the archive.
Host paths must be absolute (initial '/'), and
sos_* directory paths must be a path relative to
the root of the archive.
:param mode: An optional mode to be used when creating path
components.
:returns: A rewritten destination path in the case that one
or more symbolic links in intermediate components
of the path have altered the path destination.
]
call[name[self].log_debug, parameter[binary_operation[constant[Making leading paths for %s] <ast.Mod object at 0x7da2590d6920> name[src]]]]
variable[root] assign[=] name[self]._archive_root
variable[dest] assign[=] name[src]
def function[in_archive, parameter[path]]:
constant[Test whether path ``path`` is inside the archive.
]
return[call[name[path].startswith, parameter[call[name[os].path.join, parameter[name[root], constant[]]]]]]
if <ast.UnaryOp object at 0x7da18f8122f0> begin[:]
variable[src_dir] assign[=] name[src]
variable[path] assign[=] name[src_dir]
variable[path_comps] assign[=] list[[]]
while <ast.BoolOp object at 0x7da18f8109d0> begin[:]
<ast.Tuple object at 0x7da18f813a00> assign[=] call[name[os].path.split, parameter[name[path]]]
call[name[path_comps].append, parameter[name[tail]]]
variable[path] assign[=] name[head]
call[name[path_comps].reverse, parameter[]]
variable[abs_path] assign[=] name[root]
variable[src_path] assign[=] constant[/]
for taget[name[comp]] in starred[name[path_comps]] begin[:]
variable[abs_path] assign[=] call[name[os].path.join, parameter[name[abs_path], name[comp]]]
if <ast.UnaryOp object at 0x7da18f813a30> begin[:]
continue
variable[src_path] assign[=] call[name[os].path.join, parameter[name[src_path], name[comp]]]
if <ast.UnaryOp object at 0x7da18f811fc0> begin[:]
call[name[self].log_debug, parameter[binary_operation[constant[Making path %s] <ast.Mod object at 0x7da2590d6920> name[abs_path]]]]
if <ast.BoolOp object at 0x7da1b17a8220> begin[:]
variable[target] assign[=] call[name[os].readlink, parameter[name[src_path]]]
variable[target_dir] assign[=] call[call[name[os].path.split, parameter[name[src_path]]]][constant[0]]
variable[target_src] assign[=] call[name[os].path.join, parameter[name[target_dir], name[target]]]
variable[dest] assign[=] call[name[self]._make_leading_paths, parameter[name[target_src]]]
variable[dest] assign[=] call[name[os].path.normpath, parameter[name[dest]]]
call[name[self].log_debug, parameter[binary_operation[constant[Making symlink '%s' -> '%s'] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f810e50>, <ast.Name object at 0x7da18f813790>]]]]]
call[name[os].symlink, parameter[name[target], name[abs_path]]]
return[name[dest]] | keyword[def] identifier[_make_leading_paths] ( identifier[self] , identifier[src] , identifier[mode] = literal[int] ):
literal[string]
identifier[self] . identifier[log_debug] ( literal[string] % identifier[src] )
identifier[root] = identifier[self] . identifier[_archive_root]
identifier[dest] = identifier[src]
keyword[def] identifier[in_archive] ( identifier[path] ):
literal[string]
keyword[return] identifier[path] . identifier[startswith] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root] , literal[string] ))
keyword[if] keyword[not] identifier[src] . identifier[startswith] ( literal[string] ):
identifier[src_dir] = identifier[src]
keyword[else] :
identifier[src_dir] = identifier[src] keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[src] ) keyword[else] identifier[os] . identifier[path] . identifier[split] ( identifier[src] )[ literal[int] ]
identifier[path] = identifier[src_dir]
identifier[path_comps] =[]
keyword[while] identifier[path] != literal[string] keyword[and] identifier[path] != literal[string] :
identifier[head] , identifier[tail] = identifier[os] . identifier[path] . identifier[split] ( identifier[path] )
identifier[path_comps] . identifier[append] ( identifier[tail] )
identifier[path] = identifier[head]
identifier[path_comps] . identifier[reverse] ()
identifier[abs_path] = identifier[root]
identifier[src_path] = literal[string]
keyword[for] identifier[comp] keyword[in] identifier[path_comps] :
identifier[abs_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[abs_path] , identifier[comp] )
keyword[if] keyword[not] identifier[in_archive] ( identifier[abs_path] ):
keyword[continue]
identifier[src_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[src_path] , identifier[comp] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[abs_path] ):
identifier[self] . identifier[log_debug] ( literal[string] % identifier[abs_path] )
keyword[if] identifier[os] . identifier[path] . identifier[islink] ( identifier[src_path] ) keyword[and] identifier[os] . identifier[path] . identifier[isdir] ( identifier[src_path] ):
identifier[target] = identifier[os] . identifier[readlink] ( identifier[src_path] )
identifier[target_dir] = identifier[os] . identifier[path] . identifier[split] ( identifier[src_path] )[ literal[int] ]
identifier[target_src] = identifier[os] . identifier[path] . identifier[join] ( identifier[target_dir] , identifier[target] )
identifier[dest] = identifier[self] . identifier[_make_leading_paths] ( identifier[target_src] , identifier[mode] = identifier[mode] )
identifier[dest] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[dest] )
identifier[self] . identifier[log_debug] ( literal[string] %
( identifier[abs_path] , identifier[target] ))
identifier[os] . identifier[symlink] ( identifier[target] , identifier[abs_path] )
keyword[else] :
identifier[self] . identifier[log_debug] ( literal[string] % identifier[abs_path] )
identifier[os] . identifier[mkdir] ( identifier[abs_path] , identifier[mode] )
identifier[dest] = identifier[src_path]
keyword[return] identifier[dest] | def _make_leading_paths(self, src, mode=448):
"""Create leading path components
The standard python `os.makedirs` is insufficient for our
needs: it will only create directories, and ignores the fact
that some path components may be symbolic links.
:param src: The source path in the host file system for which
leading components should be created, or the path
to an sos_* virtual directory inside the archive.
Host paths must be absolute (initial '/'), and
sos_* directory paths must be a path relative to
the root of the archive.
:param mode: An optional mode to be used when creating path
components.
:returns: A rewritten destination path in the case that one
or more symbolic links in intermediate components
of the path have altered the path destination.
"""
self.log_debug('Making leading paths for %s' % src)
root = self._archive_root
dest = src
def in_archive(path):
"""Test whether path ``path`` is inside the archive.
"""
return path.startswith(os.path.join(root, ''))
if not src.startswith('/'):
# Sos archive path (sos_commands, sos_logs etc.)
src_dir = src # depends on [control=['if'], data=[]]
else:
# Host file path
src_dir = src if os.path.isdir(src) else os.path.split(src)[0]
# Build a list of path components in root-to-leaf order.
path = src_dir
path_comps = []
while path != '/' and path != '':
(head, tail) = os.path.split(path)
path_comps.append(tail)
path = head # depends on [control=['while'], data=[]]
path_comps.reverse()
abs_path = root
src_path = '/'
# Check and create components as needed
for comp in path_comps:
abs_path = os.path.join(abs_path, comp)
# Do not create components that are above the archive root.
if not in_archive(abs_path):
continue # depends on [control=['if'], data=[]]
src_path = os.path.join(src_path, comp)
if not os.path.exists(abs_path):
self.log_debug('Making path %s' % abs_path)
if os.path.islink(src_path) and os.path.isdir(src_path):
target = os.readlink(src_path)
# The directory containing the source in the host fs,
# adjusted for the current level of path creation.
target_dir = os.path.split(src_path)[0]
# The source path of the target in the host fs to be
# recursively copied.
target_src = os.path.join(target_dir, target)
# Recursively create leading components of target
dest = self._make_leading_paths(target_src, mode=mode)
dest = os.path.normpath(dest)
self.log_debug("Making symlink '%s' -> '%s'" % (abs_path, target))
os.symlink(target, abs_path) # depends on [control=['if'], data=[]]
else:
self.log_debug('Making directory %s' % abs_path)
os.mkdir(abs_path, mode)
dest = src_path # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['comp']]
return dest |
def progress_stats(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Compute progress stats for a result.
:param id: Result ID as an int.
:return: :class:`results.Progress <results.Progress>` object
:rtype: results.Progress
"""
schema = ProgressSchema()
resp = self.service.get(self.base+str(id)+'/', params={'stats': 'progress'})
return self.service.decode(schema, resp) | def function[progress_stats, parameter[self, id]]:
constant[Compute progress stats for a result.
:param id: Result ID as an int.
:return: :class:`results.Progress <results.Progress>` object
:rtype: results.Progress
]
variable[schema] assign[=] call[name[ProgressSchema], parameter[]]
variable[resp] assign[=] call[name[self].service.get, parameter[binary_operation[binary_operation[name[self].base + call[name[str], parameter[name[id]]]] + constant[/]]]]
return[call[name[self].service.decode, parameter[name[schema], name[resp]]]] | keyword[def] identifier[progress_stats] ( identifier[self] , identifier[id] ):
literal[string]
identifier[schema] = identifier[ProgressSchema] ()
identifier[resp] = identifier[self] . identifier[service] . identifier[get] ( identifier[self] . identifier[base] + identifier[str] ( identifier[id] )+ literal[string] , identifier[params] ={ literal[string] : literal[string] })
keyword[return] identifier[self] . identifier[service] . identifier[decode] ( identifier[schema] , identifier[resp] ) | def progress_stats(self, id): # pylint: disable=invalid-name,redefined-builtin
'Compute progress stats for a result.\n\n :param id: Result ID as an int.\n :return: :class:`results.Progress <results.Progress>` object\n :rtype: results.Progress\n '
schema = ProgressSchema()
resp = self.service.get(self.base + str(id) + '/', params={'stats': 'progress'})
return self.service.decode(schema, resp) |
def create_action(self):
"""Create actions associated with this widget."""
actions = {}
act = QAction(QIcon(ICON['step_prev']), 'Previous Step', self)
act.setShortcut('[')
act.triggered.connect(self.step_prev)
actions['step_prev'] = act
act = QAction(QIcon(ICON['step_next']), 'Next Step', self)
act.setShortcut(']')
act.triggered.connect(self.step_next)
actions['step_next'] = act
act = QAction(QIcon(ICON['page_prev']), 'Previous Page', self)
act.setShortcut(QKeySequence.MoveToPreviousChar)
act.triggered.connect(self.page_prev)
actions['page_prev'] = act
act = QAction(QIcon(ICON['page_next']), 'Next Page', self)
act.setShortcut(QKeySequence.MoveToNextChar)
act.triggered.connect(self.page_next)
actions['page_next'] = act
act = QAction('Go to Epoch', self)
act.setShortcut(QKeySequence.FindNext)
act.triggered.connect(self.go_to_epoch)
actions['go_to_epoch'] = act
act = QAction('Line Up with Epoch', self)
act.setShortcut('F4')
act.triggered.connect(self.line_up_with_epoch)
actions['line_up_with_epoch'] = act
act = QAction(QIcon(ICON['zoomprev']), 'Wider Time Window', self)
act.setShortcut(QKeySequence.ZoomIn)
act.triggered.connect(self.X_more)
actions['X_more'] = act
act = QAction(QIcon(ICON['zoomnext']), 'Narrower Time Window', self)
act.setShortcut(QKeySequence.ZoomOut)
act.triggered.connect(self.X_less)
actions['X_less'] = act
act = QAction(QIcon(ICON['zoomin']), 'Larger Scaling', self)
act.setShortcut(QKeySequence.MoveToPreviousLine)
act.triggered.connect(self.Y_more)
actions['Y_less'] = act
act = QAction(QIcon(ICON['zoomout']), 'Smaller Scaling', self)
act.setShortcut(QKeySequence.MoveToNextLine)
act.triggered.connect(self.Y_less)
actions['Y_more'] = act
act = QAction(QIcon(ICON['ydist_more']), 'Larger Y Distance', self)
act.triggered.connect(self.Y_wider)
actions['Y_wider'] = act
act = QAction(QIcon(ICON['ydist_less']), 'Smaller Y Distance', self)
act.triggered.connect(self.Y_tighter)
actions['Y_tighter'] = act
act = QAction(QIcon(ICON['chronometer']), '6 Hours Earlier', self)
act.triggered.connect(partial(self.add_time, -6 * 60 * 60))
actions['addtime_-6h'] = act
act = QAction(QIcon(ICON['chronometer']), '1 Hour Earlier', self)
act.triggered.connect(partial(self.add_time, -60 * 60))
actions['addtime_-1h'] = act
act = QAction(QIcon(ICON['chronometer']), '10 Minutes Earlier', self)
act.triggered.connect(partial(self.add_time, -10 * 60))
actions['addtime_-10min'] = act
act = QAction(QIcon(ICON['chronometer']), '10 Minutes Later', self)
act.triggered.connect(partial(self.add_time, 10 * 60))
actions['addtime_10min'] = act
act = QAction(QIcon(ICON['chronometer']), '1 Hour Later', self)
act.triggered.connect(partial(self.add_time, 60 * 60))
actions['addtime_1h'] = act
act = QAction(QIcon(ICON['chronometer']), '6 Hours Later', self)
act.triggered.connect(partial(self.add_time, 6 * 60 * 60))
actions['addtime_6h'] = act
act = QAction('Go to Next Event', self)
act.setShortcut('s')
act.triggered.connect(self.next_event)
actions['next_event'] = act
act = QAction('Delete Event and Go to Next', self)
act.setShortcut('d')
act.triggered.connect(partial(self.next_event, True))
actions['del_and_next_event'] = act
act = QAction('Next Event of Same Type', self)
act.setCheckable(True)
act.setChecked(True)
actions['next_of_same_type'] = act
act = QAction('Change Event Type', self)
act.setShortcut('e')
act.triggered.connect(self.change_event_type)
actions['change_event_type'] = act
act = QAction('Centre Window Around Event', self)
act.setCheckable(True)
act.setChecked(True)
actions['centre_event'] = act
act = QAction('Full-length Markers', self)
act.setCheckable(True)
act.setChecked(True)
act.triggered.connect(self.display_annotations)
actions['cross_chan_mrk'] = act
# Misc
act = QAction('Export to svg...', self)
act.triggered.connect(partial(export_graphics, MAIN=self.parent))
actions['export_svg'] = act
self.action = actions | def function[create_action, parameter[self]]:
constant[Create actions associated with this widget.]
variable[actions] assign[=] dictionary[[], []]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[step_prev]]]], constant[Previous Step], name[self]]]
call[name[act].setShortcut, parameter[constant[[]]]
call[name[act].triggered.connect, parameter[name[self].step_prev]]
call[name[actions]][constant[step_prev]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[step_next]]]], constant[Next Step], name[self]]]
call[name[act].setShortcut, parameter[constant[]]]]
call[name[act].triggered.connect, parameter[name[self].step_next]]
call[name[actions]][constant[step_next]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[page_prev]]]], constant[Previous Page], name[self]]]
call[name[act].setShortcut, parameter[name[QKeySequence].MoveToPreviousChar]]
call[name[act].triggered.connect, parameter[name[self].page_prev]]
call[name[actions]][constant[page_prev]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[page_next]]]], constant[Next Page], name[self]]]
call[name[act].setShortcut, parameter[name[QKeySequence].MoveToNextChar]]
call[name[act].triggered.connect, parameter[name[self].page_next]]
call[name[actions]][constant[page_next]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[constant[Go to Epoch], name[self]]]
call[name[act].setShortcut, parameter[name[QKeySequence].FindNext]]
call[name[act].triggered.connect, parameter[name[self].go_to_epoch]]
call[name[actions]][constant[go_to_epoch]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[constant[Line Up with Epoch], name[self]]]
call[name[act].setShortcut, parameter[constant[F4]]]
call[name[act].triggered.connect, parameter[name[self].line_up_with_epoch]]
call[name[actions]][constant[line_up_with_epoch]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[zoomprev]]]], constant[Wider Time Window], name[self]]]
call[name[act].setShortcut, parameter[name[QKeySequence].ZoomIn]]
call[name[act].triggered.connect, parameter[name[self].X_more]]
call[name[actions]][constant[X_more]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[zoomnext]]]], constant[Narrower Time Window], name[self]]]
call[name[act].setShortcut, parameter[name[QKeySequence].ZoomOut]]
call[name[act].triggered.connect, parameter[name[self].X_less]]
call[name[actions]][constant[X_less]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[zoomin]]]], constant[Larger Scaling], name[self]]]
call[name[act].setShortcut, parameter[name[QKeySequence].MoveToPreviousLine]]
call[name[act].triggered.connect, parameter[name[self].Y_more]]
call[name[actions]][constant[Y_less]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[zoomout]]]], constant[Smaller Scaling], name[self]]]
call[name[act].setShortcut, parameter[name[QKeySequence].MoveToNextLine]]
call[name[act].triggered.connect, parameter[name[self].Y_less]]
call[name[actions]][constant[Y_more]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[ydist_more]]]], constant[Larger Y Distance], name[self]]]
call[name[act].triggered.connect, parameter[name[self].Y_wider]]
call[name[actions]][constant[Y_wider]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[ydist_less]]]], constant[Smaller Y Distance], name[self]]]
call[name[act].triggered.connect, parameter[name[self].Y_tighter]]
call[name[actions]][constant[Y_tighter]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[chronometer]]]], constant[6 Hours Earlier], name[self]]]
call[name[act].triggered.connect, parameter[call[name[partial], parameter[name[self].add_time, binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b0dee260> * constant[60]] * constant[60]]]]]]
call[name[actions]][constant[addtime_-6h]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[chronometer]]]], constant[1 Hour Earlier], name[self]]]
call[name[act].triggered.connect, parameter[call[name[partial], parameter[name[self].add_time, binary_operation[<ast.UnaryOp object at 0x7da1b0deeef0> * constant[60]]]]]]
call[name[actions]][constant[addtime_-1h]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[chronometer]]]], constant[10 Minutes Earlier], name[self]]]
call[name[act].triggered.connect, parameter[call[name[partial], parameter[name[self].add_time, binary_operation[<ast.UnaryOp object at 0x7da1b0e04f10> * constant[60]]]]]]
call[name[actions]][constant[addtime_-10min]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[chronometer]]]], constant[10 Minutes Later], name[self]]]
call[name[act].triggered.connect, parameter[call[name[partial], parameter[name[self].add_time, binary_operation[constant[10] * constant[60]]]]]]
call[name[actions]][constant[addtime_10min]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[chronometer]]]], constant[1 Hour Later], name[self]]]
call[name[act].triggered.connect, parameter[call[name[partial], parameter[name[self].add_time, binary_operation[constant[60] * constant[60]]]]]]
call[name[actions]][constant[addtime_1h]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[call[name[ICON]][constant[chronometer]]]], constant[6 Hours Later], name[self]]]
call[name[act].triggered.connect, parameter[call[name[partial], parameter[name[self].add_time, binary_operation[binary_operation[constant[6] * constant[60]] * constant[60]]]]]]
call[name[actions]][constant[addtime_6h]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[constant[Go to Next Event], name[self]]]
call[name[act].setShortcut, parameter[constant[s]]]
call[name[act].triggered.connect, parameter[name[self].next_event]]
call[name[actions]][constant[next_event]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[constant[Delete Event and Go to Next], name[self]]]
call[name[act].setShortcut, parameter[constant[d]]]
call[name[act].triggered.connect, parameter[call[name[partial], parameter[name[self].next_event, constant[True]]]]]
call[name[actions]][constant[del_and_next_event]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[constant[Next Event of Same Type], name[self]]]
call[name[act].setCheckable, parameter[constant[True]]]
call[name[act].setChecked, parameter[constant[True]]]
call[name[actions]][constant[next_of_same_type]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[constant[Change Event Type], name[self]]]
call[name[act].setShortcut, parameter[constant[e]]]
call[name[act].triggered.connect, parameter[name[self].change_event_type]]
call[name[actions]][constant[change_event_type]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[constant[Centre Window Around Event], name[self]]]
call[name[act].setCheckable, parameter[constant[True]]]
call[name[act].setChecked, parameter[constant[True]]]
call[name[actions]][constant[centre_event]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[constant[Full-length Markers], name[self]]]
call[name[act].setCheckable, parameter[constant[True]]]
call[name[act].setChecked, parameter[constant[True]]]
call[name[act].triggered.connect, parameter[name[self].display_annotations]]
call[name[actions]][constant[cross_chan_mrk]] assign[=] name[act]
variable[act] assign[=] call[name[QAction], parameter[constant[Export to svg...], name[self]]]
call[name[act].triggered.connect, parameter[call[name[partial], parameter[name[export_graphics]]]]]
call[name[actions]][constant[export_svg]] assign[=] name[act]
name[self].action assign[=] name[actions] | keyword[def] identifier[create_action] ( identifier[self] ):
literal[string]
identifier[actions] ={}
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( literal[string] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[step_prev] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( literal[string] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[step_next] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( identifier[QKeySequence] . identifier[MoveToPreviousChar] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[page_prev] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( identifier[QKeySequence] . identifier[MoveToNextChar] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[page_next] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( identifier[QKeySequence] . identifier[FindNext] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[go_to_epoch] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( literal[string] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[line_up_with_epoch] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( identifier[QKeySequence] . identifier[ZoomIn] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[X_more] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( identifier[QKeySequence] . identifier[ZoomOut] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[X_less] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( identifier[QKeySequence] . identifier[MoveToPreviousLine] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[Y_more] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( identifier[QKeySequence] . identifier[MoveToNextLine] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[Y_less] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[Y_wider] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[Y_tighter] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[partial] ( identifier[self] . identifier[add_time] ,- literal[int] * literal[int] * literal[int] ))
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[partial] ( identifier[self] . identifier[add_time] ,- literal[int] * literal[int] ))
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[partial] ( identifier[self] . identifier[add_time] ,- literal[int] * literal[int] ))
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[partial] ( identifier[self] . identifier[add_time] , literal[int] * literal[int] ))
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[partial] ( identifier[self] . identifier[add_time] , literal[int] * literal[int] ))
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( identifier[QIcon] ( identifier[ICON] [ literal[string] ]), literal[string] , identifier[self] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[partial] ( identifier[self] . identifier[add_time] , literal[int] * literal[int] * literal[int] ))
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( literal[string] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[next_event] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( literal[string] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[partial] ( identifier[self] . identifier[next_event] , keyword[True] ))
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( literal[string] , identifier[self] )
identifier[act] . identifier[setCheckable] ( keyword[True] )
identifier[act] . identifier[setChecked] ( keyword[True] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( literal[string] , identifier[self] )
identifier[act] . identifier[setShortcut] ( literal[string] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[change_event_type] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( literal[string] , identifier[self] )
identifier[act] . identifier[setCheckable] ( keyword[True] )
identifier[act] . identifier[setChecked] ( keyword[True] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( literal[string] , identifier[self] )
identifier[act] . identifier[setCheckable] ( keyword[True] )
identifier[act] . identifier[setChecked] ( keyword[True] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[display_annotations] )
identifier[actions] [ literal[string] ]= identifier[act]
identifier[act] = identifier[QAction] ( literal[string] , identifier[self] )
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[partial] ( identifier[export_graphics] , identifier[MAIN] = identifier[self] . identifier[parent] ))
identifier[actions] [ literal[string] ]= identifier[act]
identifier[self] . identifier[action] = identifier[actions] | def create_action(self):
"""Create actions associated with this widget."""
actions = {}
act = QAction(QIcon(ICON['step_prev']), 'Previous Step', self)
act.setShortcut('[')
act.triggered.connect(self.step_prev)
actions['step_prev'] = act
act = QAction(QIcon(ICON['step_next']), 'Next Step', self)
act.setShortcut(']')
act.triggered.connect(self.step_next)
actions['step_next'] = act
act = QAction(QIcon(ICON['page_prev']), 'Previous Page', self)
act.setShortcut(QKeySequence.MoveToPreviousChar)
act.triggered.connect(self.page_prev)
actions['page_prev'] = act
act = QAction(QIcon(ICON['page_next']), 'Next Page', self)
act.setShortcut(QKeySequence.MoveToNextChar)
act.triggered.connect(self.page_next)
actions['page_next'] = act
act = QAction('Go to Epoch', self)
act.setShortcut(QKeySequence.FindNext)
act.triggered.connect(self.go_to_epoch)
actions['go_to_epoch'] = act
act = QAction('Line Up with Epoch', self)
act.setShortcut('F4')
act.triggered.connect(self.line_up_with_epoch)
actions['line_up_with_epoch'] = act
act = QAction(QIcon(ICON['zoomprev']), 'Wider Time Window', self)
act.setShortcut(QKeySequence.ZoomIn)
act.triggered.connect(self.X_more)
actions['X_more'] = act
act = QAction(QIcon(ICON['zoomnext']), 'Narrower Time Window', self)
act.setShortcut(QKeySequence.ZoomOut)
act.triggered.connect(self.X_less)
actions['X_less'] = act
act = QAction(QIcon(ICON['zoomin']), 'Larger Scaling', self)
act.setShortcut(QKeySequence.MoveToPreviousLine)
act.triggered.connect(self.Y_more)
actions['Y_less'] = act
act = QAction(QIcon(ICON['zoomout']), 'Smaller Scaling', self)
act.setShortcut(QKeySequence.MoveToNextLine)
act.triggered.connect(self.Y_less)
actions['Y_more'] = act
act = QAction(QIcon(ICON['ydist_more']), 'Larger Y Distance', self)
act.triggered.connect(self.Y_wider)
actions['Y_wider'] = act
act = QAction(QIcon(ICON['ydist_less']), 'Smaller Y Distance', self)
act.triggered.connect(self.Y_tighter)
actions['Y_tighter'] = act
act = QAction(QIcon(ICON['chronometer']), '6 Hours Earlier', self)
act.triggered.connect(partial(self.add_time, -6 * 60 * 60))
actions['addtime_-6h'] = act
act = QAction(QIcon(ICON['chronometer']), '1 Hour Earlier', self)
act.triggered.connect(partial(self.add_time, -60 * 60))
actions['addtime_-1h'] = act
act = QAction(QIcon(ICON['chronometer']), '10 Minutes Earlier', self)
act.triggered.connect(partial(self.add_time, -10 * 60))
actions['addtime_-10min'] = act
act = QAction(QIcon(ICON['chronometer']), '10 Minutes Later', self)
act.triggered.connect(partial(self.add_time, 10 * 60))
actions['addtime_10min'] = act
act = QAction(QIcon(ICON['chronometer']), '1 Hour Later', self)
act.triggered.connect(partial(self.add_time, 60 * 60))
actions['addtime_1h'] = act
act = QAction(QIcon(ICON['chronometer']), '6 Hours Later', self)
act.triggered.connect(partial(self.add_time, 6 * 60 * 60))
actions['addtime_6h'] = act
act = QAction('Go to Next Event', self)
act.setShortcut('s')
act.triggered.connect(self.next_event)
actions['next_event'] = act
act = QAction('Delete Event and Go to Next', self)
act.setShortcut('d')
act.triggered.connect(partial(self.next_event, True))
actions['del_and_next_event'] = act
act = QAction('Next Event of Same Type', self)
act.setCheckable(True)
act.setChecked(True)
actions['next_of_same_type'] = act
act = QAction('Change Event Type', self)
act.setShortcut('e')
act.triggered.connect(self.change_event_type)
actions['change_event_type'] = act
act = QAction('Centre Window Around Event', self)
act.setCheckable(True)
act.setChecked(True)
actions['centre_event'] = act
act = QAction('Full-length Markers', self)
act.setCheckable(True)
act.setChecked(True)
act.triggered.connect(self.display_annotations)
actions['cross_chan_mrk'] = act
# Misc
act = QAction('Export to svg...', self)
act.triggered.connect(partial(export_graphics, MAIN=self.parent))
actions['export_svg'] = act
self.action = actions |
def displayValue(self, vocab, value, widget):
"""Overwrite the Script (Python) `displayValue.py` located at
`Products.Archetypes.skins.archetypes` to handle the references
of our Picklist Widget (Methods) gracefully.
This method gets called by the `picklist.pt` template like this:
display python:context.displayValue(vocab, value, widget);"
"""
# Taken from the Script (Python)
t = self.restrictedTraverse('@@at_utils').translate
# ensure we have strings, otherwise the `getValue` method of
# Products.Archetypes.utils will raise a TypeError
def to_string(v):
if isinstance(v, basestring):
return v
return api.get_title(v)
if isinstance(value, (list, tuple)):
value = map(to_string, value)
return t(vocab, value, widget) | def function[displayValue, parameter[self, vocab, value, widget]]:
constant[Overwrite the Script (Python) `displayValue.py` located at
`Products.Archetypes.skins.archetypes` to handle the references
of our Picklist Widget (Methods) gracefully.
This method gets called by the `picklist.pt` template like this:
display python:context.displayValue(vocab, value, widget);"
]
variable[t] assign[=] call[name[self].restrictedTraverse, parameter[constant[@@at_utils]]].translate
def function[to_string, parameter[v]]:
if call[name[isinstance], parameter[name[v], name[basestring]]] begin[:]
return[name[v]]
return[call[name[api].get_title, parameter[name[v]]]]
if call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da18bcca3b0>, <ast.Name object at 0x7da18bccab30>]]]] begin[:]
variable[value] assign[=] call[name[map], parameter[name[to_string], name[value]]]
return[call[name[t], parameter[name[vocab], name[value], name[widget]]]] | keyword[def] identifier[displayValue] ( identifier[self] , identifier[vocab] , identifier[value] , identifier[widget] ):
literal[string]
identifier[t] = identifier[self] . identifier[restrictedTraverse] ( literal[string] ). identifier[translate]
keyword[def] identifier[to_string] ( identifier[v] ):
keyword[if] identifier[isinstance] ( identifier[v] , identifier[basestring] ):
keyword[return] identifier[v]
keyword[return] identifier[api] . identifier[get_title] ( identifier[v] )
keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )):
identifier[value] = identifier[map] ( identifier[to_string] , identifier[value] )
keyword[return] identifier[t] ( identifier[vocab] , identifier[value] , identifier[widget] ) | def displayValue(self, vocab, value, widget):
"""Overwrite the Script (Python) `displayValue.py` located at
`Products.Archetypes.skins.archetypes` to handle the references
of our Picklist Widget (Methods) gracefully.
This method gets called by the `picklist.pt` template like this:
display python:context.displayValue(vocab, value, widget);"
"""
# Taken from the Script (Python)
t = self.restrictedTraverse('@@at_utils').translate
# ensure we have strings, otherwise the `getValue` method of
# Products.Archetypes.utils will raise a TypeError
def to_string(v):
if isinstance(v, basestring):
return v # depends on [control=['if'], data=[]]
return api.get_title(v)
if isinstance(value, (list, tuple)):
value = map(to_string, value) # depends on [control=['if'], data=[]]
return t(vocab, value, widget) |
def last_post_on(self):
""" Returns the latest post date associated with the node or one of its descendants. """
dates = [n.last_post_on for n in self.children if n.last_post_on is not None]
children_last_post_on = max(dates) if dates else None
if children_last_post_on and self.obj.last_post_on:
return max(self.obj.last_post_on, children_last_post_on)
return children_last_post_on or self.obj.last_post_on | def function[last_post_on, parameter[self]]:
constant[ Returns the latest post date associated with the node or one of its descendants. ]
variable[dates] assign[=] <ast.ListComp object at 0x7da18f810cd0>
variable[children_last_post_on] assign[=] <ast.IfExp object at 0x7da18f810940>
if <ast.BoolOp object at 0x7da1b2344a90> begin[:]
return[call[name[max], parameter[name[self].obj.last_post_on, name[children_last_post_on]]]]
return[<ast.BoolOp object at 0x7da1b2346f20>] | keyword[def] identifier[last_post_on] ( identifier[self] ):
literal[string]
identifier[dates] =[ identifier[n] . identifier[last_post_on] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[children] keyword[if] identifier[n] . identifier[last_post_on] keyword[is] keyword[not] keyword[None] ]
identifier[children_last_post_on] = identifier[max] ( identifier[dates] ) keyword[if] identifier[dates] keyword[else] keyword[None]
keyword[if] identifier[children_last_post_on] keyword[and] identifier[self] . identifier[obj] . identifier[last_post_on] :
keyword[return] identifier[max] ( identifier[self] . identifier[obj] . identifier[last_post_on] , identifier[children_last_post_on] )
keyword[return] identifier[children_last_post_on] keyword[or] identifier[self] . identifier[obj] . identifier[last_post_on] | def last_post_on(self):
""" Returns the latest post date associated with the node or one of its descendants. """
dates = [n.last_post_on for n in self.children if n.last_post_on is not None]
children_last_post_on = max(dates) if dates else None
if children_last_post_on and self.obj.last_post_on:
return max(self.obj.last_post_on, children_last_post_on) # depends on [control=['if'], data=[]]
return children_last_post_on or self.obj.last_post_on |
def to_json(value, pretty=False):
"""
Serializes the given value to JSON.
:param value: the value to serialize
:param pretty:
whether or not to format the output in a more human-readable way; if
not specified, defaults to ``False``
:type pretty: bool
:rtype: str
"""
options = {
'sort_keys': False,
'cls': BasicJSONEncoder,
}
if pretty:
options['indent'] = 2
options['separators'] = (',', ': ')
return json.dumps(value, **options) | def function[to_json, parameter[value, pretty]]:
constant[
Serializes the given value to JSON.
:param value: the value to serialize
:param pretty:
whether or not to format the output in a more human-readable way; if
not specified, defaults to ``False``
:type pretty: bool
:rtype: str
]
variable[options] assign[=] dictionary[[<ast.Constant object at 0x7da207f03df0>, <ast.Constant object at 0x7da207f00dc0>], [<ast.Constant object at 0x7da207f02e00>, <ast.Name object at 0x7da207f02920>]]
if name[pretty] begin[:]
call[name[options]][constant[indent]] assign[=] constant[2]
call[name[options]][constant[separators]] assign[=] tuple[[<ast.Constant object at 0x7da207f010c0>, <ast.Constant object at 0x7da207f03610>]]
return[call[name[json].dumps, parameter[name[value]]]] | keyword[def] identifier[to_json] ( identifier[value] , identifier[pretty] = keyword[False] ):
literal[string]
identifier[options] ={
literal[string] : keyword[False] ,
literal[string] : identifier[BasicJSONEncoder] ,
}
keyword[if] identifier[pretty] :
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]=( literal[string] , literal[string] )
keyword[return] identifier[json] . identifier[dumps] ( identifier[value] ,** identifier[options] ) | def to_json(value, pretty=False):
"""
Serializes the given value to JSON.
:param value: the value to serialize
:param pretty:
whether or not to format the output in a more human-readable way; if
not specified, defaults to ``False``
:type pretty: bool
:rtype: str
"""
options = {'sort_keys': False, 'cls': BasicJSONEncoder}
if pretty:
options['indent'] = 2
options['separators'] = (',', ': ') # depends on [control=['if'], data=[]]
return json.dumps(value, **options) |
def IOR(type, nr, size):
"""
An ioctl with read parameters.
size (ctype type or instance)
Type/structure of the argument passed to ioctl's "arg" argument.
"""
return IOC(IOC_READ, type, nr, IOC_TYPECHECK(size)) | def function[IOR, parameter[type, nr, size]]:
constant[
An ioctl with read parameters.
size (ctype type or instance)
Type/structure of the argument passed to ioctl's "arg" argument.
]
return[call[name[IOC], parameter[name[IOC_READ], name[type], name[nr], call[name[IOC_TYPECHECK], parameter[name[size]]]]]] | keyword[def] identifier[IOR] ( identifier[type] , identifier[nr] , identifier[size] ):
literal[string]
keyword[return] identifier[IOC] ( identifier[IOC_READ] , identifier[type] , identifier[nr] , identifier[IOC_TYPECHECK] ( identifier[size] )) | def IOR(type, nr, size):
"""
An ioctl with read parameters.
size (ctype type or instance)
Type/structure of the argument passed to ioctl's "arg" argument.
"""
return IOC(IOC_READ, type, nr, IOC_TYPECHECK(size)) |
def get_app_state():
"""Get current status of application in context
Returns:
:obj:`dict` of application status
"""
if not hasattr(g, 'app_state'):
model = get_model()
g.app_state = {
'app_title': APP_TITLE,
'model_name': type(model).__name__,
'latest_ckpt_name': model.latest_ckpt_name,
'latest_ckpt_time': model.latest_ckpt_time
}
return g.app_state | def function[get_app_state, parameter[]]:
constant[Get current status of application in context
Returns:
:obj:`dict` of application status
]
if <ast.UnaryOp object at 0x7da20c7cb190> begin[:]
variable[model] assign[=] call[name[get_model], parameter[]]
name[g].app_state assign[=] dictionary[[<ast.Constant object at 0x7da20c7c8a60>, <ast.Constant object at 0x7da20c7c8fd0>, <ast.Constant object at 0x7da20c7c9270>, <ast.Constant object at 0x7da20c7c9ae0>], [<ast.Name object at 0x7da20c7cbe20>, <ast.Attribute object at 0x7da20c7c8bb0>, <ast.Attribute object at 0x7da20c7ca110>, <ast.Attribute object at 0x7da20c7cb520>]]
return[name[g].app_state] | keyword[def] identifier[get_app_state] ():
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[g] , literal[string] ):
identifier[model] = identifier[get_model] ()
identifier[g] . identifier[app_state] ={
literal[string] : identifier[APP_TITLE] ,
literal[string] : identifier[type] ( identifier[model] ). identifier[__name__] ,
literal[string] : identifier[model] . identifier[latest_ckpt_name] ,
literal[string] : identifier[model] . identifier[latest_ckpt_time]
}
keyword[return] identifier[g] . identifier[app_state] | def get_app_state():
"""Get current status of application in context
Returns:
:obj:`dict` of application status
"""
if not hasattr(g, 'app_state'):
model = get_model()
g.app_state = {'app_title': APP_TITLE, 'model_name': type(model).__name__, 'latest_ckpt_name': model.latest_ckpt_name, 'latest_ckpt_time': model.latest_ckpt_time} # depends on [control=['if'], data=[]]
return g.app_state |
def collect_all_bucket_keys(self):
"""
Just collects all buckets keys from subtree
"""
if len(self.childs) == 0:
# This is a leaf so just return the bucket key (we reached the bucket leaf)
#print 'Returning (collect) leaf bucket key %s with %d vectors' % (self.bucket_key, self.vector_count)
return [self.bucket_key]
# Not leaf, return results of childs
result = []
for child in self.childs.values():
result = result + child.collect_all_bucket_keys()
return result | def function[collect_all_bucket_keys, parameter[self]]:
constant[
Just collects all buckets keys from subtree
]
if compare[call[name[len], parameter[name[self].childs]] equal[==] constant[0]] begin[:]
return[list[[<ast.Attribute object at 0x7da1b08bf2e0>]]]
variable[result] assign[=] list[[]]
for taget[name[child]] in starred[call[name[self].childs.values, parameter[]]] begin[:]
variable[result] assign[=] binary_operation[name[result] + call[name[child].collect_all_bucket_keys, parameter[]]]
return[name[result]] | keyword[def] identifier[collect_all_bucket_keys] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[childs] )== literal[int] :
keyword[return] [ identifier[self] . identifier[bucket_key] ]
identifier[result] =[]
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[childs] . identifier[values] ():
identifier[result] = identifier[result] + identifier[child] . identifier[collect_all_bucket_keys] ()
keyword[return] identifier[result] | def collect_all_bucket_keys(self):
"""
Just collects all buckets keys from subtree
"""
if len(self.childs) == 0:
# This is a leaf so just return the bucket key (we reached the bucket leaf)
#print 'Returning (collect) leaf bucket key %s with %d vectors' % (self.bucket_key, self.vector_count)
return [self.bucket_key] # depends on [control=['if'], data=[]]
# Not leaf, return results of childs
result = []
for child in self.childs.values():
result = result + child.collect_all_bucket_keys() # depends on [control=['for'], data=['child']]
return result |
def _get_arrays(self, wavelengths, **kwargs):
"""Get sampled spectrum or bandpass in user units."""
x = self._validate_wavelengths(wavelengths)
y = self(x, **kwargs)
if isinstance(wavelengths, u.Quantity):
w = x.to(wavelengths.unit, u.spectral())
else:
w = x
return w, y | def function[_get_arrays, parameter[self, wavelengths]]:
constant[Get sampled spectrum or bandpass in user units.]
variable[x] assign[=] call[name[self]._validate_wavelengths, parameter[name[wavelengths]]]
variable[y] assign[=] call[name[self], parameter[name[x]]]
if call[name[isinstance], parameter[name[wavelengths], name[u].Quantity]] begin[:]
variable[w] assign[=] call[name[x].to, parameter[name[wavelengths].unit, call[name[u].spectral, parameter[]]]]
return[tuple[[<ast.Name object at 0x7da1b26adea0>, <ast.Name object at 0x7da1b26ade10>]]] | keyword[def] identifier[_get_arrays] ( identifier[self] , identifier[wavelengths] ,** identifier[kwargs] ):
literal[string]
identifier[x] = identifier[self] . identifier[_validate_wavelengths] ( identifier[wavelengths] )
identifier[y] = identifier[self] ( identifier[x] ,** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[wavelengths] , identifier[u] . identifier[Quantity] ):
identifier[w] = identifier[x] . identifier[to] ( identifier[wavelengths] . identifier[unit] , identifier[u] . identifier[spectral] ())
keyword[else] :
identifier[w] = identifier[x]
keyword[return] identifier[w] , identifier[y] | def _get_arrays(self, wavelengths, **kwargs):
"""Get sampled spectrum or bandpass in user units."""
x = self._validate_wavelengths(wavelengths)
y = self(x, **kwargs)
if isinstance(wavelengths, u.Quantity):
w = x.to(wavelengths.unit, u.spectral()) # depends on [control=['if'], data=[]]
else:
w = x
return (w, y) |
def enrich_internal_unqualified_edges(graph, subgraph):
"""Add the missing unqualified edges between entities in the subgraph that are contained within the full graph.
:param pybel.BELGraph graph: The full BEL graph
:param pybel.BELGraph subgraph: The query BEL subgraph
"""
for u, v in itt.combinations(subgraph, 2):
if not graph.has_edge(u, v):
continue
for k in graph[u][v]:
if k < 0:
subgraph.add_edge(u, v, key=k, **graph[u][v][k]) | def function[enrich_internal_unqualified_edges, parameter[graph, subgraph]]:
constant[Add the missing unqualified edges between entities in the subgraph that are contained within the full graph.
:param pybel.BELGraph graph: The full BEL graph
:param pybel.BELGraph subgraph: The query BEL subgraph
]
for taget[tuple[[<ast.Name object at 0x7da1aff76770>, <ast.Name object at 0x7da1aff75fc0>]]] in starred[call[name[itt].combinations, parameter[name[subgraph], constant[2]]]] begin[:]
if <ast.UnaryOp object at 0x7da1aff768c0> begin[:]
continue
for taget[name[k]] in starred[call[call[name[graph]][name[u]]][name[v]]] begin[:]
if compare[name[k] less[<] constant[0]] begin[:]
call[name[subgraph].add_edge, parameter[name[u], name[v]]] | keyword[def] identifier[enrich_internal_unqualified_edges] ( identifier[graph] , identifier[subgraph] ):
literal[string]
keyword[for] identifier[u] , identifier[v] keyword[in] identifier[itt] . identifier[combinations] ( identifier[subgraph] , literal[int] ):
keyword[if] keyword[not] identifier[graph] . identifier[has_edge] ( identifier[u] , identifier[v] ):
keyword[continue]
keyword[for] identifier[k] keyword[in] identifier[graph] [ identifier[u] ][ identifier[v] ]:
keyword[if] identifier[k] < literal[int] :
identifier[subgraph] . identifier[add_edge] ( identifier[u] , identifier[v] , identifier[key] = identifier[k] ,** identifier[graph] [ identifier[u] ][ identifier[v] ][ identifier[k] ]) | def enrich_internal_unqualified_edges(graph, subgraph):
"""Add the missing unqualified edges between entities in the subgraph that are contained within the full graph.
:param pybel.BELGraph graph: The full BEL graph
:param pybel.BELGraph subgraph: The query BEL subgraph
"""
for (u, v) in itt.combinations(subgraph, 2):
if not graph.has_edge(u, v):
continue # depends on [control=['if'], data=[]]
for k in graph[u][v]:
if k < 0:
subgraph.add_edge(u, v, key=k, **graph[u][v][k]) # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=[]] |
def g_coil(FlowPlant, IDTube, RadiusCoil, Temp):
"""We need a reference for this.
Karen's thesis likely has this equation and the reference.
"""
return (g_straight(FlowPlant, IDTube).magnitude
* (1 + 0.033 *
np.log10(dean_number(FlowPlant, IDTube, RadiusCoil, Temp)
) ** 4
) ** (1/2)
) | def function[g_coil, parameter[FlowPlant, IDTube, RadiusCoil, Temp]]:
constant[We need a reference for this.
Karen's thesis likely has this equation and the reference.
]
return[binary_operation[call[name[g_straight], parameter[name[FlowPlant], name[IDTube]]].magnitude * binary_operation[binary_operation[constant[1] + binary_operation[constant[0.033] * binary_operation[call[name[np].log10, parameter[call[name[dean_number], parameter[name[FlowPlant], name[IDTube], name[RadiusCoil], name[Temp]]]]] ** constant[4]]]] ** binary_operation[constant[1] / constant[2]]]]] | keyword[def] identifier[g_coil] ( identifier[FlowPlant] , identifier[IDTube] , identifier[RadiusCoil] , identifier[Temp] ):
literal[string]
keyword[return] ( identifier[g_straight] ( identifier[FlowPlant] , identifier[IDTube] ). identifier[magnitude]
*( literal[int] + literal[int] *
identifier[np] . identifier[log10] ( identifier[dean_number] ( identifier[FlowPlant] , identifier[IDTube] , identifier[RadiusCoil] , identifier[Temp] )
)** literal[int]
)**( literal[int] / literal[int] )
) | def g_coil(FlowPlant, IDTube, RadiusCoil, Temp):
"""We need a reference for this.
Karen's thesis likely has this equation and the reference.
"""
return g_straight(FlowPlant, IDTube).magnitude * (1 + 0.033 * np.log10(dean_number(FlowPlant, IDTube, RadiusCoil, Temp)) ** 4) ** (1 / 2) |
def addmag(self, magval):
"""Add a scalar magnitude to existing flux values.
.. math::
\\textnormal{flux}_{\\textnormal{new}} = 10^{-0.4 \\; \\textnormal{magval}} \\; \\textnormal{flux}
Parameters
----------
magval : number
Magnitude value.
Returns
-------
sp : `CompositeSourceSpectrum`
New source spectrum with adjusted flux values.
Raises
------
TypeError
Magnitude value is not a scalar number.
"""
if N.isscalar(magval):
factor = 10**(-0.4*magval)
return self*factor
else:
raise TypeError(".addmag() only takes a constant scalar argument") | def function[addmag, parameter[self, magval]]:
constant[Add a scalar magnitude to existing flux values.
.. math::
\textnormal{flux}_{\textnormal{new}} = 10^{-0.4 \; \textnormal{magval}} \; \textnormal{flux}
Parameters
----------
magval : number
Magnitude value.
Returns
-------
sp : `CompositeSourceSpectrum`
New source spectrum with adjusted flux values.
Raises
------
TypeError
Magnitude value is not a scalar number.
]
if call[name[N].isscalar, parameter[name[magval]]] begin[:]
variable[factor] assign[=] binary_operation[constant[10] ** binary_operation[<ast.UnaryOp object at 0x7da18f722950> * name[magval]]]
return[binary_operation[name[self] * name[factor]]] | keyword[def] identifier[addmag] ( identifier[self] , identifier[magval] ):
literal[string]
keyword[if] identifier[N] . identifier[isscalar] ( identifier[magval] ):
identifier[factor] = literal[int] **(- literal[int] * identifier[magval] )
keyword[return] identifier[self] * identifier[factor]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] ) | def addmag(self, magval):
"""Add a scalar magnitude to existing flux values.
.. math::
\\textnormal{flux}_{\\textnormal{new}} = 10^{-0.4 \\; \\textnormal{magval}} \\; \\textnormal{flux}
Parameters
----------
magval : number
Magnitude value.
Returns
-------
sp : `CompositeSourceSpectrum`
New source spectrum with adjusted flux values.
Raises
------
TypeError
Magnitude value is not a scalar number.
"""
if N.isscalar(magval):
factor = 10 ** (-0.4 * magval)
return self * factor # depends on [control=['if'], data=[]]
else:
raise TypeError('.addmag() only takes a constant scalar argument') |
def find_permission_view_menu(self, permission_name, view_menu_name):
"""
Finds and returns a PermissionView by names
"""
permission = self.find_permission(permission_name)
view_menu = self.find_view_menu(view_menu_name)
if permission and view_menu:
return (
self.get_session.query(self.permissionview_model)
.filter_by(permission=permission, view_menu=view_menu)
.first()
) | def function[find_permission_view_menu, parameter[self, permission_name, view_menu_name]]:
constant[
Finds and returns a PermissionView by names
]
variable[permission] assign[=] call[name[self].find_permission, parameter[name[permission_name]]]
variable[view_menu] assign[=] call[name[self].find_view_menu, parameter[name[view_menu_name]]]
if <ast.BoolOp object at 0x7da18f09e3b0> begin[:]
return[call[call[call[name[self].get_session.query, parameter[name[self].permissionview_model]].filter_by, parameter[]].first, parameter[]]] | keyword[def] identifier[find_permission_view_menu] ( identifier[self] , identifier[permission_name] , identifier[view_menu_name] ):
literal[string]
identifier[permission] = identifier[self] . identifier[find_permission] ( identifier[permission_name] )
identifier[view_menu] = identifier[self] . identifier[find_view_menu] ( identifier[view_menu_name] )
keyword[if] identifier[permission] keyword[and] identifier[view_menu] :
keyword[return] (
identifier[self] . identifier[get_session] . identifier[query] ( identifier[self] . identifier[permissionview_model] )
. identifier[filter_by] ( identifier[permission] = identifier[permission] , identifier[view_menu] = identifier[view_menu] )
. identifier[first] ()
) | def find_permission_view_menu(self, permission_name, view_menu_name):
"""
Finds and returns a PermissionView by names
"""
permission = self.find_permission(permission_name)
view_menu = self.find_view_menu(view_menu_name)
if permission and view_menu:
return self.get_session.query(self.permissionview_model).filter_by(permission=permission, view_menu=view_menu).first() # depends on [control=['if'], data=[]] |
def _clean_query_string(q):
"""Clean up a query string for searching.
Removes unmatched parentheses and joining operators.
Arguments:
q (str): Query string to be cleaned
Returns:
str: The clean query string.
"""
q = q.replace("()", "").strip()
if q.endswith("("):
q = q[:-1].strip()
# Remove misplaced AND/OR/NOT at end
if q[-3:] == "AND" or q[-3:] == "NOT":
q = q[:-3]
elif q[-2:] == "OR":
q = q[:-2]
# Balance parentheses
while q.count("(") > q.count(")"):
q += ")"
while q.count(")") > q.count("("):
q = "(" + q
return q.strip() | def function[_clean_query_string, parameter[q]]:
constant[Clean up a query string for searching.
Removes unmatched parentheses and joining operators.
Arguments:
q (str): Query string to be cleaned
Returns:
str: The clean query string.
]
variable[q] assign[=] call[call[name[q].replace, parameter[constant[()], constant[]]].strip, parameter[]]
if call[name[q].endswith, parameter[constant[(]]] begin[:]
variable[q] assign[=] call[call[name[q]][<ast.Slice object at 0x7da1b24b77f0>].strip, parameter[]]
if <ast.BoolOp object at 0x7da1b24b6bf0> begin[:]
variable[q] assign[=] call[name[q]][<ast.Slice object at 0x7da1b24b4730>]
while compare[call[name[q].count, parameter[constant[(]]] greater[>] call[name[q].count, parameter[constant[)]]]] begin[:]
<ast.AugAssign object at 0x7da1b24b6da0>
while compare[call[name[q].count, parameter[constant[)]]] greater[>] call[name[q].count, parameter[constant[(]]]] begin[:]
variable[q] assign[=] binary_operation[constant[(] + name[q]]
return[call[name[q].strip, parameter[]]] | keyword[def] identifier[_clean_query_string] ( identifier[q] ):
literal[string]
identifier[q] = identifier[q] . identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ()
keyword[if] identifier[q] . identifier[endswith] ( literal[string] ):
identifier[q] = identifier[q] [:- literal[int] ]. identifier[strip] ()
keyword[if] identifier[q] [- literal[int] :]== literal[string] keyword[or] identifier[q] [- literal[int] :]== literal[string] :
identifier[q] = identifier[q] [:- literal[int] ]
keyword[elif] identifier[q] [- literal[int] :]== literal[string] :
identifier[q] = identifier[q] [:- literal[int] ]
keyword[while] identifier[q] . identifier[count] ( literal[string] )> identifier[q] . identifier[count] ( literal[string] ):
identifier[q] += literal[string]
keyword[while] identifier[q] . identifier[count] ( literal[string] )> identifier[q] . identifier[count] ( literal[string] ):
identifier[q] = literal[string] + identifier[q]
keyword[return] identifier[q] . identifier[strip] () | def _clean_query_string(q):
"""Clean up a query string for searching.
Removes unmatched parentheses and joining operators.
Arguments:
q (str): Query string to be cleaned
Returns:
str: The clean query string.
"""
q = q.replace('()', '').strip()
if q.endswith('('):
q = q[:-1].strip() # depends on [control=['if'], data=[]]
# Remove misplaced AND/OR/NOT at end
if q[-3:] == 'AND' or q[-3:] == 'NOT':
q = q[:-3] # depends on [control=['if'], data=[]]
elif q[-2:] == 'OR':
q = q[:-2] # depends on [control=['if'], data=[]]
# Balance parentheses
while q.count('(') > q.count(')'):
q += ')' # depends on [control=['while'], data=[]]
while q.count(')') > q.count('('):
q = '(' + q # depends on [control=['while'], data=[]]
return q.strip() |
def _parse_scalars(scalars):
"""Parse the scalars from the YAML file content to a dictionary of ScalarType(s).
:return: A dictionary { 'full.scalar.label': ScalarType }
"""
scalar_dict = {}
# Scalars are defined in a fixed two-level hierarchy within the definition file.
# The first level contains the category name, while the second level contains the
# probe name (e.g. "category.name: probe: ...").
for category_name in scalars:
category = scalars[category_name]
for probe_name in category:
# We found a scalar type. Go ahead and parse it.
scalar_definition = category[probe_name]
# We pass |strict_type_checks=False| as we don't want to do any check
# server side. This includes skipping the checks for the required keys.
scalar_info = ScalarType(category_name, probe_name, scalar_definition,
strict_type_checks=False)
scalar_dict[scalar_info.label] = scalar_info
return scalar_dict | def function[_parse_scalars, parameter[scalars]]:
constant[Parse the scalars from the YAML file content to a dictionary of ScalarType(s).
:return: A dictionary { 'full.scalar.label': ScalarType }
]
variable[scalar_dict] assign[=] dictionary[[], []]
for taget[name[category_name]] in starred[name[scalars]] begin[:]
variable[category] assign[=] call[name[scalars]][name[category_name]]
for taget[name[probe_name]] in starred[name[category]] begin[:]
variable[scalar_definition] assign[=] call[name[category]][name[probe_name]]
variable[scalar_info] assign[=] call[name[ScalarType], parameter[name[category_name], name[probe_name], name[scalar_definition]]]
call[name[scalar_dict]][name[scalar_info].label] assign[=] name[scalar_info]
return[name[scalar_dict]] | keyword[def] identifier[_parse_scalars] ( identifier[scalars] ):
literal[string]
identifier[scalar_dict] ={}
keyword[for] identifier[category_name] keyword[in] identifier[scalars] :
identifier[category] = identifier[scalars] [ identifier[category_name] ]
keyword[for] identifier[probe_name] keyword[in] identifier[category] :
identifier[scalar_definition] = identifier[category] [ identifier[probe_name] ]
identifier[scalar_info] = identifier[ScalarType] ( identifier[category_name] , identifier[probe_name] , identifier[scalar_definition] ,
identifier[strict_type_checks] = keyword[False] )
identifier[scalar_dict] [ identifier[scalar_info] . identifier[label] ]= identifier[scalar_info]
keyword[return] identifier[scalar_dict] | def _parse_scalars(scalars):
"""Parse the scalars from the YAML file content to a dictionary of ScalarType(s).
:return: A dictionary { 'full.scalar.label': ScalarType }
"""
scalar_dict = {}
# Scalars are defined in a fixed two-level hierarchy within the definition file.
# The first level contains the category name, while the second level contains the
# probe name (e.g. "category.name: probe: ...").
for category_name in scalars:
category = scalars[category_name]
for probe_name in category:
# We found a scalar type. Go ahead and parse it.
scalar_definition = category[probe_name]
# We pass |strict_type_checks=False| as we don't want to do any check
# server side. This includes skipping the checks for the required keys.
scalar_info = ScalarType(category_name, probe_name, scalar_definition, strict_type_checks=False)
scalar_dict[scalar_info.label] = scalar_info # depends on [control=['for'], data=['probe_name']] # depends on [control=['for'], data=['category_name']]
return scalar_dict |
def main():
"""
NAME
plot_map_pts.py
DESCRIPTION
plots points on map
SYNTAX
plot_map_pts.py [command line options]
OPTIONS
-h prints help and quits
-sym [ro, bs, g^, r., b-, etc.] [1,5,10] symbol and size for points
colors are r=red,b=blue,g=green, etc.
symbols are '.' for points, ^, for triangle, s for square, etc.
-, for lines, -- for dotted lines, see matplotlib online documentation for plot()
-eye ELAT ELON [specify eyeball location]
-etp put on topography
-cmap color map [default is jet]
-f FILE, specify input file
-o color ocean blue/land green (default is not)
-res [c,l,i,h] specify resolution (crude, low, intermediate, high]
-fmt [pdf,eps, png] specify output format (default is pdf)
-R don't plot details of rivers
-B don't plot national/state boundaries, etc.
-pad [LAT LON] pad bounding box by LAT/LON (default is not)
-grd SPACE specify grid spacing
-sav save plot and quit
-prj PROJ, specify one of the supported projections:
pc = Plate Carree
aea = Albers Equal Area
aeqd = Azimuthal Equidistant
lcc = Lambert Conformal
lcyl = Lambert Cylindrical
merc = Mercator
mill = Miller Cylindrical
moll = Mollweide [default]
ortho = Orthographic
robin = Robinson
sinu = Sinusoidal
stere = Stereographic
tmerc = Transverse Mercator
utm = UTM
laea = Lambert Azimuthal Equal Area
geos = Geostationary
npstere = North-Polar Stereographic
spstere = South-Polar Stereographic
Special codes for MagIC formatted input files:
-n
-l
INPUTS
space or tab delimited LON LAT data
OR:
standard MagIC formatted er_sites or pmag_results table
DEFAULTS
res: c
prj: mollweide; lcc for MagIC format files
ELAT,ELON = 0,0
pad LAT,LON=0,0
NB: high resolution or lines can be very slow
"""
dir_path='.'
plot=0
ocean=0
res='c'
proj='moll'
Lats,Lons=[],[]
fmt='pdf'
sym='ro'
symsize=5
fancy=0
rivers,boundaries,ocean=1,1,0
latmin,latmax,lonmin,lonmax,lat_0,lon_0=-90,90,0.,360.,0.,0.
padlat,padlon,gridspace=0,0,30
lat_0,lon_0="",""
basemap=1
prn_name,prn_loc,names,locs=0,0,[],[]
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-res' in sys.argv:
ind = sys.argv.index('-res')
res=sys.argv[ind+1]
if res!= 'c' and res!='l':
print('this resolution will take a while - be patient')
if '-etp' in sys.argv:
fancy=1
print ('-W- plotting will require patience!')
if '-ctp' in sys.argv: basemap=0
if '-sav' in sys.argv: plot=1
if '-R' in sys.argv:rivers=0
if '-B' in sys.argv:boundaries=0
if '-o' in sys.argv:ocean=1
if '-cmap' in sys.argv:
ind = sys.argv.index('-cmap')
cmap=float(sys.argv[ind+1])
else:
cmap='jet'
if '-grd' in sys.argv:
ind = sys.argv.index('-grd')
gridspace=float(sys.argv[ind+1])
if '-eye' in sys.argv:
ind = sys.argv.index('-eye')
lat_0=float(sys.argv[ind+1])
lon_0=float(sys.argv[ind+2])
if '-sym' in sys.argv:
ind = sys.argv.index('-sym')
sym=sys.argv[ind+1]
symsize=int(sys.argv[ind+2])
if '-pad' in sys.argv:
ind = sys.argv.index('-pad')
padlat=float(sys.argv[ind+1])
padlon=float(sys.argv[ind+2])
if '-f' in sys.argv:
ind = sys.argv.index('-f')
file=dir_path+'/'+sys.argv[ind+1]
header=open(file,'r').readlines()[0].split('\t')
if 'tab' in header[0]:
proj='lcc'
if 'sites' in header[1]:
latkey='lat'
lonkey='lon'
namekey='site'
lockey=''
else:
print('file type not supported')
print(main.__doc__)
sys.exit()
Sites,file_type=pmag.magic_read(file)
Lats=pmag.get_dictkey(Sites,latkey,'f')
Lons=pmag.get_dictkey(Sites,lonkey,'f')
if prn_name==1:names=pmag.get_dictkey(Sites,namekey,'')
if prn_loc==1:names=pmag.get_dictkey(Sites,lockey,'')
else:
ptdata=numpy.loadtxt(file)
Lons=ptdata.transpose()[0]
Lats=ptdata.transpose()[1]
latmin=numpy.min(Lats)-padlat
lonmin=numpy.min(Lons)-padlon
latmax=numpy.max(Lats)+padlat
lonmax=numpy.max(Lons)+padlon
if lon_0=="":
lon_0=0.5*(lonmin+lonmax)
lat_0=0.5*(latmin+latmax)
else:
print("input file must be specified")
sys.exit()
if '-prj' in sys.argv:
ind = sys.argv.index('-prj')
proj=sys.argv[ind+1]
FIG={'map':1}
pmagplotlib.plot_init(FIG['map'],6,6)
cnt=0
Opts={'latmin':latmin,'latmax':latmax,'lonmin':lonmin,'lonmax':lonmax,'lat_0':lat_0,'lon_0':lon_0,'proj':proj,'sym':sym,'symsize':3,'pltgrid':1,'res':res,'boundinglat':0.,'padlon':padlon,'padlat':padlat,'gridspace':gridspace,'cmap':cmap}
Opts['details']={}
Opts['details']['coasts']=1
Opts['details']['rivers']=rivers
Opts['details']['states']=boundaries
Opts['details']['countries']=boundaries
Opts['details']['ocean']=ocean
Opts['details']['fancy']=fancy
if len(names)>0:Opts['names']=names
if len(locs)>0:Opts['loc_name']=locs
if proj=='merc':
Opts['latmin']=-70
Opts['latmax']=70
Opts['lonmin']=-180
Opts['lonmax']=180
print('please wait to draw points')
Opts['sym']=sym
Opts['symsize']=symsize
if basemap:
pmagplotlib.plot_map(FIG['map'],Lats,Lons,Opts)
else:
pmagplotlib.plot_map(FIG['map'],Lats,Lons,Opts)
files={}
titles={}
titles['map']='PT Map'
for key in list(FIG.keys()):
files[key]='map_pts'+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
FIG = pmagplotlib.add_borders(FIG,titles,black,purple)
pmagplotlib.save_plots(FIG,files)
if plot==1:
pmagplotlib.save_plots(FIG,files)
else:
pmagplotlib.draw_figs(FIG)
ans=input(" S[a]ve to save plot, Return to quit: ")
if ans=="a": pmagplotlib.save_plots(FIG,files) | def function[main, parameter[]]:
constant[
NAME
plot_map_pts.py
DESCRIPTION
plots points on map
SYNTAX
plot_map_pts.py [command line options]
OPTIONS
-h prints help and quits
-sym [ro, bs, g^, r., b-, etc.] [1,5,10] symbol and size for points
colors are r=red,b=blue,g=green, etc.
symbols are '.' for points, ^, for triangle, s for square, etc.
-, for lines, -- for dotted lines, see matplotlib online documentation for plot()
-eye ELAT ELON [specify eyeball location]
-etp put on topography
-cmap color map [default is jet]
-f FILE, specify input file
-o color ocean blue/land green (default is not)
-res [c,l,i,h] specify resolution (crude, low, intermediate, high]
-fmt [pdf,eps, png] specify output format (default is pdf)
-R don't plot details of rivers
-B don't plot national/state boundaries, etc.
-pad [LAT LON] pad bounding box by LAT/LON (default is not)
-grd SPACE specify grid spacing
-sav save plot and quit
-prj PROJ, specify one of the supported projections:
pc = Plate Carree
aea = Albers Equal Area
aeqd = Azimuthal Equidistant
lcc = Lambert Conformal
lcyl = Lambert Cylindrical
merc = Mercator
mill = Miller Cylindrical
moll = Mollweide [default]
ortho = Orthographic
robin = Robinson
sinu = Sinusoidal
stere = Stereographic
tmerc = Transverse Mercator
utm = UTM
laea = Lambert Azimuthal Equal Area
geos = Geostationary
npstere = North-Polar Stereographic
spstere = South-Polar Stereographic
Special codes for MagIC formatted input files:
-n
-l
INPUTS
space or tab delimited LON LAT data
OR:
standard MagIC formatted er_sites or pmag_results table
DEFAULTS
res: c
prj: mollweide; lcc for MagIC format files
ELAT,ELON = 0,0
pad LAT,LON=0,0
NB: high resolution or lines can be very slow
]
variable[dir_path] assign[=] constant[.]
variable[plot] assign[=] constant[0]
variable[ocean] assign[=] constant[0]
variable[res] assign[=] constant[c]
variable[proj] assign[=] constant[moll]
<ast.Tuple object at 0x7da2046210c0> assign[=] tuple[[<ast.List object at 0x7da2046214e0>, <ast.List object at 0x7da2046208b0>]]
variable[fmt] assign[=] constant[pdf]
variable[sym] assign[=] constant[ro]
variable[symsize] assign[=] constant[5]
variable[fancy] assign[=] constant[0]
<ast.Tuple object at 0x7da2046208e0> assign[=] tuple[[<ast.Constant object at 0x7da204621fc0>, <ast.Constant object at 0x7da204623d90>, <ast.Constant object at 0x7da204622080>]]
<ast.Tuple object at 0x7da204623850> assign[=] tuple[[<ast.UnaryOp object at 0x7da2046205b0>, <ast.Constant object at 0x7da204620040>, <ast.Constant object at 0x7da204623940>, <ast.Constant object at 0x7da2046230d0>, <ast.Constant object at 0x7da2046218a0>, <ast.Constant object at 0x7da204621720>]]
<ast.Tuple object at 0x7da1b0404280> assign[=] tuple[[<ast.Constant object at 0x7da1b0406e90>, <ast.Constant object at 0x7da1b04042b0>, <ast.Constant object at 0x7da1b0406e60>]]
<ast.Tuple object at 0x7da1b0405000> assign[=] tuple[[<ast.Constant object at 0x7da1b04053f0>, <ast.Constant object at 0x7da1b04043d0>]]
variable[basemap] assign[=] constant[1]
<ast.Tuple object at 0x7da1b0405360> assign[=] tuple[[<ast.Constant object at 0x7da1b0406ec0>, <ast.Constant object at 0x7da1b0406c50>, <ast.List object at 0x7da1b0406c80>, <ast.List object at 0x7da1b04052a0>]]
if compare[constant[-WD] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-WD]]]
variable[dir_path] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-h] in name[sys].argv] begin[:]
call[name[print], parameter[name[main].__doc__]]
call[name[sys].exit, parameter[]]
if compare[constant[-fmt] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-fmt]]]
variable[fmt] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-res] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-res]]]
variable[res] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if <ast.BoolOp object at 0x7da1b0405990> begin[:]
call[name[print], parameter[constant[this resolution will take a while - be patient]]]
if compare[constant[-etp] in name[sys].argv] begin[:]
variable[fancy] assign[=] constant[1]
call[name[print], parameter[constant[-W- plotting will require patience!]]]
if compare[constant[-ctp] in name[sys].argv] begin[:]
variable[basemap] assign[=] constant[0]
if compare[constant[-sav] in name[sys].argv] begin[:]
variable[plot] assign[=] constant[1]
if compare[constant[-R] in name[sys].argv] begin[:]
variable[rivers] assign[=] constant[0]
if compare[constant[-B] in name[sys].argv] begin[:]
variable[boundaries] assign[=] constant[0]
if compare[constant[-o] in name[sys].argv] begin[:]
variable[ocean] assign[=] constant[1]
if compare[constant[-cmap] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-cmap]]]
variable[cmap] assign[=] call[name[float], parameter[call[name[sys].argv][binary_operation[name[ind] + constant[1]]]]]
if compare[constant[-grd] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-grd]]]
variable[gridspace] assign[=] call[name[float], parameter[call[name[sys].argv][binary_operation[name[ind] + constant[1]]]]]
if compare[constant[-eye] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-eye]]]
variable[lat_0] assign[=] call[name[float], parameter[call[name[sys].argv][binary_operation[name[ind] + constant[1]]]]]
variable[lon_0] assign[=] call[name[float], parameter[call[name[sys].argv][binary_operation[name[ind] + constant[2]]]]]
if compare[constant[-sym] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-sym]]]
variable[sym] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
variable[symsize] assign[=] call[name[int], parameter[call[name[sys].argv][binary_operation[name[ind] + constant[2]]]]]
if compare[constant[-pad] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-pad]]]
variable[padlat] assign[=] call[name[float], parameter[call[name[sys].argv][binary_operation[name[ind] + constant[1]]]]]
variable[padlon] assign[=] call[name[float], parameter[call[name[sys].argv][binary_operation[name[ind] + constant[2]]]]]
if compare[constant[-f] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-f]]]
variable[file] assign[=] binary_operation[binary_operation[name[dir_path] + constant[/]] + call[name[sys].argv][binary_operation[name[ind] + constant[1]]]]
variable[header] assign[=] call[call[call[call[name[open], parameter[name[file], constant[r]]].readlines, parameter[]]][constant[0]].split, parameter[constant[ ]]]
if compare[constant[tab] in call[name[header]][constant[0]]] begin[:]
variable[proj] assign[=] constant[lcc]
if compare[constant[sites] in call[name[header]][constant[1]]] begin[:]
variable[latkey] assign[=] constant[lat]
variable[lonkey] assign[=] constant[lon]
variable[namekey] assign[=] constant[site]
variable[lockey] assign[=] constant[]
<ast.Tuple object at 0x7da20c76efe0> assign[=] call[name[pmag].magic_read, parameter[name[file]]]
variable[Lats] assign[=] call[name[pmag].get_dictkey, parameter[name[Sites], name[latkey], constant[f]]]
variable[Lons] assign[=] call[name[pmag].get_dictkey, parameter[name[Sites], name[lonkey], constant[f]]]
if compare[name[prn_name] equal[==] constant[1]] begin[:]
variable[names] assign[=] call[name[pmag].get_dictkey, parameter[name[Sites], name[namekey], constant[]]]
if compare[name[prn_loc] equal[==] constant[1]] begin[:]
variable[names] assign[=] call[name[pmag].get_dictkey, parameter[name[Sites], name[lockey], constant[]]]
variable[latmin] assign[=] binary_operation[call[name[numpy].min, parameter[name[Lats]]] - name[padlat]]
variable[lonmin] assign[=] binary_operation[call[name[numpy].min, parameter[name[Lons]]] - name[padlon]]
variable[latmax] assign[=] binary_operation[call[name[numpy].max, parameter[name[Lats]]] + name[padlat]]
variable[lonmax] assign[=] binary_operation[call[name[numpy].max, parameter[name[Lons]]] + name[padlon]]
if compare[name[lon_0] equal[==] constant[]] begin[:]
variable[lon_0] assign[=] binary_operation[constant[0.5] * binary_operation[name[lonmin] + name[lonmax]]]
variable[lat_0] assign[=] binary_operation[constant[0.5] * binary_operation[name[latmin] + name[latmax]]]
if compare[constant[-prj] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-prj]]]
variable[proj] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
variable[FIG] assign[=] dictionary[[<ast.Constant object at 0x7da20c76f4c0>], [<ast.Constant object at 0x7da20c76fb80>]]
call[name[pmagplotlib].plot_init, parameter[call[name[FIG]][constant[map]], constant[6], constant[6]]]
variable[cnt] assign[=] constant[0]
variable[Opts] assign[=] dictionary[[<ast.Constant object at 0x7da20c76ecb0>, <ast.Constant object at 0x7da20c76ea10>, <ast.Constant object at 0x7da20c76e260>, <ast.Constant object at 0x7da20c76c700>, <ast.Constant object at 0x7da20c76ee60>, <ast.Constant object at 0x7da20c76f9a0>, <ast.Constant object at 0x7da20c76fd60>, <ast.Constant object at 0x7da20c76f8b0>, <ast.Constant object at 0x7da20c76e6b0>, <ast.Constant object at 0x7da20c76f760>, <ast.Constant object at 0x7da20c76c3d0>, <ast.Constant object at 0x7da20c76c610>, <ast.Constant object at 0x7da20c76e6e0>, <ast.Constant object at 0x7da20c76c5b0>, <ast.Constant object at 0x7da20c76db40>, <ast.Constant object at 0x7da20c76d2a0>], [<ast.Name object at 0x7da20c76d2d0>, <ast.Name object at 0x7da20c76c310>, <ast.Name object at 0x7da20c76c580>, <ast.Name object at 0x7da20c76fa90>, <ast.Name object at 0x7da20c76d720>, <ast.Name object at 0x7da20c76f220>, <ast.Name object at 0x7da20c76d9f0>, <ast.Name object at 0x7da20c76f520>, <ast.Constant object at 0x7da20c76f4f0>, <ast.Constant object at 0x7da20c76f2b0>, <ast.Name object at 0x7da20c76c250>, <ast.Constant object at 0x7da20c76e320>, <ast.Name object at 0x7da20c76d7e0>, <ast.Name object at 0x7da20c76fc70>, <ast.Name object at 0x7da20c76e1a0>, <ast.Name object at 0x7da20c76c0a0>]]
call[name[Opts]][constant[details]] assign[=] dictionary[[], []]
call[call[name[Opts]][constant[details]]][constant[coasts]] assign[=] constant[1]
call[call[name[Opts]][constant[details]]][constant[rivers]] assign[=] name[rivers]
call[call[name[Opts]][constant[details]]][constant[states]] assign[=] name[boundaries]
call[call[name[Opts]][constant[details]]][constant[countries]] assign[=] name[boundaries]
call[call[name[Opts]][constant[details]]][constant[ocean]] assign[=] name[ocean]
call[call[name[Opts]][constant[details]]][constant[fancy]] assign[=] name[fancy]
if compare[call[name[len], parameter[name[names]]] greater[>] constant[0]] begin[:]
call[name[Opts]][constant[names]] assign[=] name[names]
if compare[call[name[len], parameter[name[locs]]] greater[>] constant[0]] begin[:]
call[name[Opts]][constant[loc_name]] assign[=] name[locs]
if compare[name[proj] equal[==] constant[merc]] begin[:]
call[name[Opts]][constant[latmin]] assign[=] <ast.UnaryOp object at 0x7da20c76d1b0>
call[name[Opts]][constant[latmax]] assign[=] constant[70]
call[name[Opts]][constant[lonmin]] assign[=] <ast.UnaryOp object at 0x7da20c76e020>
call[name[Opts]][constant[lonmax]] assign[=] constant[180]
call[name[print], parameter[constant[please wait to draw points]]]
call[name[Opts]][constant[sym]] assign[=] name[sym]
call[name[Opts]][constant[symsize]] assign[=] name[symsize]
if name[basemap] begin[:]
call[name[pmagplotlib].plot_map, parameter[call[name[FIG]][constant[map]], name[Lats], name[Lons], name[Opts]]]
variable[files] assign[=] dictionary[[], []]
variable[titles] assign[=] dictionary[[], []]
call[name[titles]][constant[map]] assign[=] constant[PT Map]
for taget[name[key]] in starred[call[name[list], parameter[call[name[FIG].keys, parameter[]]]]] begin[:]
call[name[files]][name[key]] assign[=] binary_operation[binary_operation[constant[map_pts] + constant[.]] + name[fmt]]
if name[pmagplotlib].isServer begin[:]
variable[black] assign[=] constant[#000000]
variable[purple] assign[=] constant[#800080]
variable[FIG] assign[=] call[name[pmagplotlib].add_borders, parameter[name[FIG], name[titles], name[black], name[purple]]]
call[name[pmagplotlib].save_plots, parameter[name[FIG], name[files]]]
if compare[name[plot] equal[==] constant[1]] begin[:]
call[name[pmagplotlib].save_plots, parameter[name[FIG], name[files]]] | keyword[def] identifier[main] ():
literal[string]
identifier[dir_path] = literal[string]
identifier[plot] = literal[int]
identifier[ocean] = literal[int]
identifier[res] = literal[string]
identifier[proj] = literal[string]
identifier[Lats] , identifier[Lons] =[],[]
identifier[fmt] = literal[string]
identifier[sym] = literal[string]
identifier[symsize] = literal[int]
identifier[fancy] = literal[int]
identifier[rivers] , identifier[boundaries] , identifier[ocean] = literal[int] , literal[int] , literal[int]
identifier[latmin] , identifier[latmax] , identifier[lonmin] , identifier[lonmax] , identifier[lat_0] , identifier[lon_0] =- literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int]
identifier[padlat] , identifier[padlon] , identifier[gridspace] = literal[int] , literal[int] , literal[int]
identifier[lat_0] , identifier[lon_0] = literal[string] , literal[string]
identifier[basemap] = literal[int]
identifier[prn_name] , identifier[prn_loc] , identifier[names] , identifier[locs] = literal[int] , literal[int] ,[],[]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[dir_path] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[print] ( identifier[main] . identifier[__doc__] )
identifier[sys] . identifier[exit] ()
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[fmt] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[res] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] identifier[res] != literal[string] keyword[and] identifier[res] != literal[string] :
identifier[print] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[fancy] = literal[int]
identifier[print] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[basemap] = literal[int]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[plot] = literal[int]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[rivers] = literal[int]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[boundaries] = literal[int]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[ocean] = literal[int]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[cmap] = identifier[float] ( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
keyword[else] :
identifier[cmap] = literal[string]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[gridspace] = identifier[float] ( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[lat_0] = identifier[float] ( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
identifier[lon_0] = identifier[float] ( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[sym] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
identifier[symsize] = identifier[int] ( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[padlat] = identifier[float] ( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
identifier[padlon] = identifier[float] ( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[file] = identifier[dir_path] + literal[string] + identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
identifier[header] = identifier[open] ( identifier[file] , literal[string] ). identifier[readlines] ()[ literal[int] ]. identifier[split] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[header] [ literal[int] ]:
identifier[proj] = literal[string]
keyword[if] literal[string] keyword[in] identifier[header] [ literal[int] ]:
identifier[latkey] = literal[string]
identifier[lonkey] = literal[string]
identifier[namekey] = literal[string]
identifier[lockey] = literal[string]
keyword[else] :
identifier[print] ( literal[string] )
identifier[print] ( identifier[main] . identifier[__doc__] )
identifier[sys] . identifier[exit] ()
identifier[Sites] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[file] )
identifier[Lats] = identifier[pmag] . identifier[get_dictkey] ( identifier[Sites] , identifier[latkey] , literal[string] )
identifier[Lons] = identifier[pmag] . identifier[get_dictkey] ( identifier[Sites] , identifier[lonkey] , literal[string] )
keyword[if] identifier[prn_name] == literal[int] : identifier[names] = identifier[pmag] . identifier[get_dictkey] ( identifier[Sites] , identifier[namekey] , literal[string] )
keyword[if] identifier[prn_loc] == literal[int] : identifier[names] = identifier[pmag] . identifier[get_dictkey] ( identifier[Sites] , identifier[lockey] , literal[string] )
keyword[else] :
identifier[ptdata] = identifier[numpy] . identifier[loadtxt] ( identifier[file] )
identifier[Lons] = identifier[ptdata] . identifier[transpose] ()[ literal[int] ]
identifier[Lats] = identifier[ptdata] . identifier[transpose] ()[ literal[int] ]
identifier[latmin] = identifier[numpy] . identifier[min] ( identifier[Lats] )- identifier[padlat]
identifier[lonmin] = identifier[numpy] . identifier[min] ( identifier[Lons] )- identifier[padlon]
identifier[latmax] = identifier[numpy] . identifier[max] ( identifier[Lats] )+ identifier[padlat]
identifier[lonmax] = identifier[numpy] . identifier[max] ( identifier[Lons] )+ identifier[padlon]
keyword[if] identifier[lon_0] == literal[string] :
identifier[lon_0] = literal[int] *( identifier[lonmin] + identifier[lonmax] )
identifier[lat_0] = literal[int] *( identifier[latmin] + identifier[latmax] )
keyword[else] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ()
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[proj] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
identifier[FIG] ={ literal[string] : literal[int] }
identifier[pmagplotlib] . identifier[plot_init] ( identifier[FIG] [ literal[string] ], literal[int] , literal[int] )
identifier[cnt] = literal[int]
identifier[Opts] ={ literal[string] : identifier[latmin] , literal[string] : identifier[latmax] , literal[string] : identifier[lonmin] , literal[string] : identifier[lonmax] , literal[string] : identifier[lat_0] , literal[string] : identifier[lon_0] , literal[string] : identifier[proj] , literal[string] : identifier[sym] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : identifier[res] , literal[string] : literal[int] , literal[string] : identifier[padlon] , literal[string] : identifier[padlat] , literal[string] : identifier[gridspace] , literal[string] : identifier[cmap] }
identifier[Opts] [ literal[string] ]={}
identifier[Opts] [ literal[string] ][ literal[string] ]= literal[int]
identifier[Opts] [ literal[string] ][ literal[string] ]= identifier[rivers]
identifier[Opts] [ literal[string] ][ literal[string] ]= identifier[boundaries]
identifier[Opts] [ literal[string] ][ literal[string] ]= identifier[boundaries]
identifier[Opts] [ literal[string] ][ literal[string] ]= identifier[ocean]
identifier[Opts] [ literal[string] ][ literal[string] ]= identifier[fancy]
keyword[if] identifier[len] ( identifier[names] )> literal[int] : identifier[Opts] [ literal[string] ]= identifier[names]
keyword[if] identifier[len] ( identifier[locs] )> literal[int] : identifier[Opts] [ literal[string] ]= identifier[locs]
keyword[if] identifier[proj] == literal[string] :
identifier[Opts] [ literal[string] ]=- literal[int]
identifier[Opts] [ literal[string] ]= literal[int]
identifier[Opts] [ literal[string] ]=- literal[int]
identifier[Opts] [ literal[string] ]= literal[int]
identifier[print] ( literal[string] )
identifier[Opts] [ literal[string] ]= identifier[sym]
identifier[Opts] [ literal[string] ]= identifier[symsize]
keyword[if] identifier[basemap] :
identifier[pmagplotlib] . identifier[plot_map] ( identifier[FIG] [ literal[string] ], identifier[Lats] , identifier[Lons] , identifier[Opts] )
keyword[else] :
identifier[pmagplotlib] . identifier[plot_map] ( identifier[FIG] [ literal[string] ], identifier[Lats] , identifier[Lons] , identifier[Opts] )
identifier[files] ={}
identifier[titles] ={}
identifier[titles] [ literal[string] ]= literal[string]
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[FIG] . identifier[keys] ()):
identifier[files] [ identifier[key] ]= literal[string] + literal[string] + identifier[fmt]
keyword[if] identifier[pmagplotlib] . identifier[isServer] :
identifier[black] = literal[string]
identifier[purple] = literal[string]
identifier[FIG] = identifier[pmagplotlib] . identifier[add_borders] ( identifier[FIG] , identifier[titles] , identifier[black] , identifier[purple] )
identifier[pmagplotlib] . identifier[save_plots] ( identifier[FIG] , identifier[files] )
keyword[if] identifier[plot] == literal[int] :
identifier[pmagplotlib] . identifier[save_plots] ( identifier[FIG] , identifier[files] )
keyword[else] :
identifier[pmagplotlib] . identifier[draw_figs] ( identifier[FIG] )
identifier[ans] = identifier[input] ( literal[string] )
keyword[if] identifier[ans] == literal[string] : identifier[pmagplotlib] . identifier[save_plots] ( identifier[FIG] , identifier[files] ) | def main():
"""
NAME
plot_map_pts.py
DESCRIPTION
plots points on map
SYNTAX
plot_map_pts.py [command line options]
OPTIONS
-h prints help and quits
-sym [ro, bs, g^, r., b-, etc.] [1,5,10] symbol and size for points
colors are r=red,b=blue,g=green, etc.
symbols are '.' for points, ^, for triangle, s for square, etc.
-, for lines, -- for dotted lines, see matplotlib online documentation for plot()
-eye ELAT ELON [specify eyeball location]
-etp put on topography
-cmap color map [default is jet]
-f FILE, specify input file
-o color ocean blue/land green (default is not)
-res [c,l,i,h] specify resolution (crude, low, intermediate, high]
-fmt [pdf,eps, png] specify output format (default is pdf)
-R don't plot details of rivers
-B don't plot national/state boundaries, etc.
-pad [LAT LON] pad bounding box by LAT/LON (default is not)
-grd SPACE specify grid spacing
-sav save plot and quit
-prj PROJ, specify one of the supported projections:
pc = Plate Carree
aea = Albers Equal Area
aeqd = Azimuthal Equidistant
lcc = Lambert Conformal
lcyl = Lambert Cylindrical
merc = Mercator
mill = Miller Cylindrical
moll = Mollweide [default]
ortho = Orthographic
robin = Robinson
sinu = Sinusoidal
stere = Stereographic
tmerc = Transverse Mercator
utm = UTM
laea = Lambert Azimuthal Equal Area
geos = Geostationary
npstere = North-Polar Stereographic
spstere = South-Polar Stereographic
Special codes for MagIC formatted input files:
-n
-l
INPUTS
space or tab delimited LON LAT data
OR:
standard MagIC formatted er_sites or pmag_results table
DEFAULTS
res: c
prj: mollweide; lcc for MagIC format files
ELAT,ELON = 0,0
pad LAT,LON=0,0
NB: high resolution or lines can be very slow
"""
dir_path = '.'
plot = 0
ocean = 0
res = 'c'
proj = 'moll'
(Lats, Lons) = ([], [])
fmt = 'pdf'
sym = 'ro'
symsize = 5
fancy = 0
(rivers, boundaries, ocean) = (1, 1, 0)
(latmin, latmax, lonmin, lonmax, lat_0, lon_0) = (-90, 90, 0.0, 360.0, 0.0, 0.0)
(padlat, padlon, gridspace) = (0, 0, 30)
(lat_0, lon_0) = ('', '')
basemap = 1
(prn_name, prn_loc, names, locs) = (0, 0, [], [])
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path = sys.argv[ind + 1] # depends on [control=['if'], data=[]]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit() # depends on [control=['if'], data=[]]
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt = sys.argv[ind + 1] # depends on [control=['if'], data=[]]
if '-res' in sys.argv:
ind = sys.argv.index('-res')
res = sys.argv[ind + 1]
if res != 'c' and res != 'l':
print('this resolution will take a while - be patient') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if '-etp' in sys.argv:
fancy = 1
print('-W- plotting will require patience!') # depends on [control=['if'], data=[]]
if '-ctp' in sys.argv:
basemap = 0 # depends on [control=['if'], data=[]]
if '-sav' in sys.argv:
plot = 1 # depends on [control=['if'], data=[]]
if '-R' in sys.argv:
rivers = 0 # depends on [control=['if'], data=[]]
if '-B' in sys.argv:
boundaries = 0 # depends on [control=['if'], data=[]]
if '-o' in sys.argv:
ocean = 1 # depends on [control=['if'], data=[]]
if '-cmap' in sys.argv:
ind = sys.argv.index('-cmap')
cmap = float(sys.argv[ind + 1]) # depends on [control=['if'], data=[]]
else:
cmap = 'jet'
if '-grd' in sys.argv:
ind = sys.argv.index('-grd')
gridspace = float(sys.argv[ind + 1]) # depends on [control=['if'], data=[]]
if '-eye' in sys.argv:
ind = sys.argv.index('-eye')
lat_0 = float(sys.argv[ind + 1])
lon_0 = float(sys.argv[ind + 2]) # depends on [control=['if'], data=[]]
if '-sym' in sys.argv:
ind = sys.argv.index('-sym')
sym = sys.argv[ind + 1]
symsize = int(sys.argv[ind + 2]) # depends on [control=['if'], data=[]]
if '-pad' in sys.argv:
ind = sys.argv.index('-pad')
padlat = float(sys.argv[ind + 1])
padlon = float(sys.argv[ind + 2]) # depends on [control=['if'], data=[]]
if '-f' in sys.argv:
ind = sys.argv.index('-f')
file = dir_path + '/' + sys.argv[ind + 1]
header = open(file, 'r').readlines()[0].split('\t')
if 'tab' in header[0]:
proj = 'lcc'
if 'sites' in header[1]:
latkey = 'lat'
lonkey = 'lon'
namekey = 'site'
lockey = '' # depends on [control=['if'], data=[]]
else:
print('file type not supported')
print(main.__doc__)
sys.exit()
(Sites, file_type) = pmag.magic_read(file)
Lats = pmag.get_dictkey(Sites, latkey, 'f')
Lons = pmag.get_dictkey(Sites, lonkey, 'f')
if prn_name == 1:
names = pmag.get_dictkey(Sites, namekey, '') # depends on [control=['if'], data=[]]
if prn_loc == 1:
names = pmag.get_dictkey(Sites, lockey, '') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
ptdata = numpy.loadtxt(file)
Lons = ptdata.transpose()[0]
Lats = ptdata.transpose()[1]
latmin = numpy.min(Lats) - padlat
lonmin = numpy.min(Lons) - padlon
latmax = numpy.max(Lats) + padlat
lonmax = numpy.max(Lons) + padlon
if lon_0 == '':
lon_0 = 0.5 * (lonmin + lonmax)
lat_0 = 0.5 * (latmin + latmax) # depends on [control=['if'], data=['lon_0']] # depends on [control=['if'], data=[]]
else:
print('input file must be specified')
sys.exit()
if '-prj' in sys.argv:
ind = sys.argv.index('-prj')
proj = sys.argv[ind + 1] # depends on [control=['if'], data=[]]
FIG = {'map': 1}
pmagplotlib.plot_init(FIG['map'], 6, 6)
cnt = 0
Opts = {'latmin': latmin, 'latmax': latmax, 'lonmin': lonmin, 'lonmax': lonmax, 'lat_0': lat_0, 'lon_0': lon_0, 'proj': proj, 'sym': sym, 'symsize': 3, 'pltgrid': 1, 'res': res, 'boundinglat': 0.0, 'padlon': padlon, 'padlat': padlat, 'gridspace': gridspace, 'cmap': cmap}
Opts['details'] = {}
Opts['details']['coasts'] = 1
Opts['details']['rivers'] = rivers
Opts['details']['states'] = boundaries
Opts['details']['countries'] = boundaries
Opts['details']['ocean'] = ocean
Opts['details']['fancy'] = fancy
if len(names) > 0:
Opts['names'] = names # depends on [control=['if'], data=[]]
if len(locs) > 0:
Opts['loc_name'] = locs # depends on [control=['if'], data=[]]
if proj == 'merc':
Opts['latmin'] = -70
Opts['latmax'] = 70
Opts['lonmin'] = -180
Opts['lonmax'] = 180 # depends on [control=['if'], data=[]]
print('please wait to draw points')
Opts['sym'] = sym
Opts['symsize'] = symsize
if basemap:
pmagplotlib.plot_map(FIG['map'], Lats, Lons, Opts) # depends on [control=['if'], data=[]]
else:
pmagplotlib.plot_map(FIG['map'], Lats, Lons, Opts)
files = {}
titles = {}
titles['map'] = 'PT Map'
for key in list(FIG.keys()):
files[key] = 'map_pts' + '.' + fmt # depends on [control=['for'], data=['key']]
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
FIG = pmagplotlib.add_borders(FIG, titles, black, purple)
pmagplotlib.save_plots(FIG, files) # depends on [control=['if'], data=[]]
if plot == 1:
pmagplotlib.save_plots(FIG, files) # depends on [control=['if'], data=[]]
else:
pmagplotlib.draw_figs(FIG)
ans = input(' S[a]ve to save plot, Return to quit: ')
if ans == 'a':
pmagplotlib.save_plots(FIG, files) # depends on [control=['if'], data=[]] |
def transform_login(config):
"""
Parse login data as dict. Called from load_from_file and
also can be used when collecting information from other
sources as well.
:param dict data: data representing the valid key/value pairs
from smcrc
:return: dict dict of settings that can be sent into session.login
"""
verify = True
if config.pop('smc_ssl', None):
scheme = 'https'
verify = config.pop('ssl_cert_file', None)
if config.pop('verify_ssl', None):
# Get cert path to verify
if not verify: # Setting omitted or already False
verify = False
else:
verify = False
else:
scheme = 'http'
config.pop('verify_ssl', None)
config.pop('ssl_cert_file', None)
verify = False
transformed = {}
url = '{}://{}:{}'.format(
scheme,
config.pop('smc_address', None),
config.pop('smc_port', None))
timeout = config.pop('timeout', None)
if timeout:
try:
timeout = int(timeout)
except ValueError:
timeout = None
api_version = config.pop('api_version', None)
if api_version:
try:
float(api_version)
except ValueError:
api_version = None
transformed.update(
url=url,
api_key=config.pop('smc_apikey', None),
api_version=api_version,
verify=verify,
timeout=timeout,
domain=config.pop('domain', None))
if config:
transformed.update(kwargs=config) # Any remaining args
return transformed | def function[transform_login, parameter[config]]:
constant[
Parse login data as dict. Called from load_from_file and
also can be used when collecting information from other
sources as well.
:param dict data: data representing the valid key/value pairs
from smcrc
:return: dict dict of settings that can be sent into session.login
]
variable[verify] assign[=] constant[True]
if call[name[config].pop, parameter[constant[smc_ssl], constant[None]]] begin[:]
variable[scheme] assign[=] constant[https]
variable[verify] assign[=] call[name[config].pop, parameter[constant[ssl_cert_file], constant[None]]]
if call[name[config].pop, parameter[constant[verify_ssl], constant[None]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1bc2da0> begin[:]
variable[verify] assign[=] constant[False]
variable[transformed] assign[=] dictionary[[], []]
variable[url] assign[=] call[constant[{}://{}:{}].format, parameter[name[scheme], call[name[config].pop, parameter[constant[smc_address], constant[None]]], call[name[config].pop, parameter[constant[smc_port], constant[None]]]]]
variable[timeout] assign[=] call[name[config].pop, parameter[constant[timeout], constant[None]]]
if name[timeout] begin[:]
<ast.Try object at 0x7da1b1a2a290>
variable[api_version] assign[=] call[name[config].pop, parameter[constant[api_version], constant[None]]]
if name[api_version] begin[:]
<ast.Try object at 0x7da1b1a28220>
call[name[transformed].update, parameter[]]
if name[config] begin[:]
call[name[transformed].update, parameter[]]
return[name[transformed]] | keyword[def] identifier[transform_login] ( identifier[config] ):
literal[string]
identifier[verify] = keyword[True]
keyword[if] identifier[config] . identifier[pop] ( literal[string] , keyword[None] ):
identifier[scheme] = literal[string]
identifier[verify] = identifier[config] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[config] . identifier[pop] ( literal[string] , keyword[None] ):
keyword[if] keyword[not] identifier[verify] :
identifier[verify] = keyword[False]
keyword[else] :
identifier[verify] = keyword[False]
keyword[else] :
identifier[scheme] = literal[string]
identifier[config] . identifier[pop] ( literal[string] , keyword[None] )
identifier[config] . identifier[pop] ( literal[string] , keyword[None] )
identifier[verify] = keyword[False]
identifier[transformed] ={}
identifier[url] = literal[string] . identifier[format] (
identifier[scheme] ,
identifier[config] . identifier[pop] ( literal[string] , keyword[None] ),
identifier[config] . identifier[pop] ( literal[string] , keyword[None] ))
identifier[timeout] = identifier[config] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[timeout] :
keyword[try] :
identifier[timeout] = identifier[int] ( identifier[timeout] )
keyword[except] identifier[ValueError] :
identifier[timeout] = keyword[None]
identifier[api_version] = identifier[config] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[api_version] :
keyword[try] :
identifier[float] ( identifier[api_version] )
keyword[except] identifier[ValueError] :
identifier[api_version] = keyword[None]
identifier[transformed] . identifier[update] (
identifier[url] = identifier[url] ,
identifier[api_key] = identifier[config] . identifier[pop] ( literal[string] , keyword[None] ),
identifier[api_version] = identifier[api_version] ,
identifier[verify] = identifier[verify] ,
identifier[timeout] = identifier[timeout] ,
identifier[domain] = identifier[config] . identifier[pop] ( literal[string] , keyword[None] ))
keyword[if] identifier[config] :
identifier[transformed] . identifier[update] ( identifier[kwargs] = identifier[config] )
keyword[return] identifier[transformed] | def transform_login(config):
"""
Parse login data as dict. Called from load_from_file and
also can be used when collecting information from other
sources as well.
:param dict data: data representing the valid key/value pairs
from smcrc
:return: dict dict of settings that can be sent into session.login
"""
verify = True
if config.pop('smc_ssl', None):
scheme = 'https'
verify = config.pop('ssl_cert_file', None)
if config.pop('verify_ssl', None):
# Get cert path to verify
if not verify: # Setting omitted or already False
verify = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
verify = False # depends on [control=['if'], data=[]]
else:
scheme = 'http'
config.pop('verify_ssl', None)
config.pop('ssl_cert_file', None)
verify = False
transformed = {}
url = '{}://{}:{}'.format(scheme, config.pop('smc_address', None), config.pop('smc_port', None))
timeout = config.pop('timeout', None)
if timeout:
try:
timeout = int(timeout) # depends on [control=['try'], data=[]]
except ValueError:
timeout = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
api_version = config.pop('api_version', None)
if api_version:
try:
float(api_version) # depends on [control=['try'], data=[]]
except ValueError:
api_version = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
transformed.update(url=url, api_key=config.pop('smc_apikey', None), api_version=api_version, verify=verify, timeout=timeout, domain=config.pop('domain', None))
if config:
transformed.update(kwargs=config) # Any remaining args # depends on [control=['if'], data=[]]
return transformed |
def follower_num(self):
"""获取关注此收藏夹的人数.
:return: 关注此收藏夹的人数
:rtype: int
"""
href = re_collection_url_split.match(self.url).group(1)
return int(self.soup.find('a', href=href + 'followers').text) | def function[follower_num, parameter[self]]:
constant[获取关注此收藏夹的人数.
:return: 关注此收藏夹的人数
:rtype: int
]
variable[href] assign[=] call[call[name[re_collection_url_split].match, parameter[name[self].url]].group, parameter[constant[1]]]
return[call[name[int], parameter[call[name[self].soup.find, parameter[constant[a]]].text]]] | keyword[def] identifier[follower_num] ( identifier[self] ):
literal[string]
identifier[href] = identifier[re_collection_url_split] . identifier[match] ( identifier[self] . identifier[url] ). identifier[group] ( literal[int] )
keyword[return] identifier[int] ( identifier[self] . identifier[soup] . identifier[find] ( literal[string] , identifier[href] = identifier[href] + literal[string] ). identifier[text] ) | def follower_num(self):
"""获取关注此收藏夹的人数.
:return: 关注此收藏夹的人数
:rtype: int
"""
href = re_collection_url_split.match(self.url).group(1)
return int(self.soup.find('a', href=href + 'followers').text) |
def parsehours (hrstr):
"""Parse a string formatted as sexagesimal hours into an angle.
This function converts a textual representation of an angle, measured in
hours, into a floating point value measured in radians. The format of
*hrstr* is very limited: it may not have leading or trailing whitespace,
and the components of the sexagesimal representation must be separated by
colons. The input must therefore resemble something like
``"12:34:56.78"``. A :exc:`ValueError` will be raised if the input does
not resemble this template. Hours greater than 24 are not allowed, but
negative values are.
"""
hr = _parsesexagesimal (hrstr, 'hours', False)
if hr >= 24:
raise ValueError ('illegal hour specification: ' + hrstr)
return hr * H2R | def function[parsehours, parameter[hrstr]]:
constant[Parse a string formatted as sexagesimal hours into an angle.
This function converts a textual representation of an angle, measured in
hours, into a floating point value measured in radians. The format of
*hrstr* is very limited: it may not have leading or trailing whitespace,
and the components of the sexagesimal representation must be separated by
colons. The input must therefore resemble something like
``"12:34:56.78"``. A :exc:`ValueError` will be raised if the input does
not resemble this template. Hours greater than 24 are not allowed, but
negative values are.
]
variable[hr] assign[=] call[name[_parsesexagesimal], parameter[name[hrstr], constant[hours], constant[False]]]
if compare[name[hr] greater_or_equal[>=] constant[24]] begin[:]
<ast.Raise object at 0x7da1b27a6560>
return[binary_operation[name[hr] * name[H2R]]] | keyword[def] identifier[parsehours] ( identifier[hrstr] ):
literal[string]
identifier[hr] = identifier[_parsesexagesimal] ( identifier[hrstr] , literal[string] , keyword[False] )
keyword[if] identifier[hr] >= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[hrstr] )
keyword[return] identifier[hr] * identifier[H2R] | def parsehours(hrstr):
"""Parse a string formatted as sexagesimal hours into an angle.
This function converts a textual representation of an angle, measured in
hours, into a floating point value measured in radians. The format of
*hrstr* is very limited: it may not have leading or trailing whitespace,
and the components of the sexagesimal representation must be separated by
colons. The input must therefore resemble something like
``"12:34:56.78"``. A :exc:`ValueError` will be raised if the input does
not resemble this template. Hours greater than 24 are not allowed, but
negative values are.
"""
hr = _parsesexagesimal(hrstr, 'hours', False)
if hr >= 24:
raise ValueError('illegal hour specification: ' + hrstr) # depends on [control=['if'], data=[]]
return hr * H2R |
def info(self):
"""
tuple of the start_pc, end_pc, handler_pc and catch_type_ref
"""
return (self.start_pc, self.end_pc,
self.handler_pc, self.get_catch_type()) | def function[info, parameter[self]]:
constant[
tuple of the start_pc, end_pc, handler_pc and catch_type_ref
]
return[tuple[[<ast.Attribute object at 0x7da1b0b1bdf0>, <ast.Attribute object at 0x7da1b0b1a890>, <ast.Attribute object at 0x7da1b0b190f0>, <ast.Call object at 0x7da1b0b19840>]]] | keyword[def] identifier[info] ( identifier[self] ):
literal[string]
keyword[return] ( identifier[self] . identifier[start_pc] , identifier[self] . identifier[end_pc] ,
identifier[self] . identifier[handler_pc] , identifier[self] . identifier[get_catch_type] ()) | def info(self):
"""
tuple of the start_pc, end_pc, handler_pc and catch_type_ref
"""
return (self.start_pc, self.end_pc, self.handler_pc, self.get_catch_type()) |
def connect(self):
""" Todo connect """
self.transport = Transport(self.token, on_connect=self.on_connect, on_message=self.on_message) | def function[connect, parameter[self]]:
constant[ Todo connect ]
name[self].transport assign[=] call[name[Transport], parameter[name[self].token]] | keyword[def] identifier[connect] ( identifier[self] ):
literal[string]
identifier[self] . identifier[transport] = identifier[Transport] ( identifier[self] . identifier[token] , identifier[on_connect] = identifier[self] . identifier[on_connect] , identifier[on_message] = identifier[self] . identifier[on_message] ) | def connect(self):
""" Todo connect """
self.transport = Transport(self.token, on_connect=self.on_connect, on_message=self.on_message) |
def filter_by_analysis_period(self, analysis_period):
"""Filter the Data Collection based on an analysis period.
Args:
analysis period: A Ladybug analysis period
Return:
A new Data Collection with filtered data
"""
self._check_analysis_period(analysis_period)
analysis_period = self._get_analysis_period_subset(analysis_period)
if analysis_period.st_hour == 0 and analysis_period.end_hour == 23:
# We can still return an Hourly Continuous Data Collection
t_s = 60 / analysis_period.timestep
st_ind = int((analysis_period.st_time.moy / t_s) -
(self.header.analysis_period.st_time.moy / t_s))
end_ind = int((analysis_period.end_time.moy / t_s) -
(analysis_period.st_time.moy / t_s) + st_ind + 1)
if end_ind > st_ind:
_filt_values = self._values[st_ind:end_ind]
else:
_filt_values = self._values[st_ind:] + self._values[:end_ind]
_filt_header = self.header.duplicate()
_filt_header._analysis_period = analysis_period
return HourlyContinuousCollection(_filt_header, _filt_values)
else:
# Filter using HOYs and the result cannot be continuous
_filtered_data = self.filter_by_moys(analysis_period.moys)
_filtered_data.header._analysis_period = analysis_period
return _filtered_data | def function[filter_by_analysis_period, parameter[self, analysis_period]]:
constant[Filter the Data Collection based on an analysis period.
Args:
analysis period: A Ladybug analysis period
Return:
A new Data Collection with filtered data
]
call[name[self]._check_analysis_period, parameter[name[analysis_period]]]
variable[analysis_period] assign[=] call[name[self]._get_analysis_period_subset, parameter[name[analysis_period]]]
if <ast.BoolOp object at 0x7da1b1214f10> begin[:]
variable[t_s] assign[=] binary_operation[constant[60] / name[analysis_period].timestep]
variable[st_ind] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[analysis_period].st_time.moy / name[t_s]] - binary_operation[name[self].header.analysis_period.st_time.moy / name[t_s]]]]]
variable[end_ind] assign[=] call[name[int], parameter[binary_operation[binary_operation[binary_operation[binary_operation[name[analysis_period].end_time.moy / name[t_s]] - binary_operation[name[analysis_period].st_time.moy / name[t_s]]] + name[st_ind]] + constant[1]]]]
if compare[name[end_ind] greater[>] name[st_ind]] begin[:]
variable[_filt_values] assign[=] call[name[self]._values][<ast.Slice object at 0x7da1b1215e10>]
variable[_filt_header] assign[=] call[name[self].header.duplicate, parameter[]]
name[_filt_header]._analysis_period assign[=] name[analysis_period]
return[call[name[HourlyContinuousCollection], parameter[name[_filt_header], name[_filt_values]]]] | keyword[def] identifier[filter_by_analysis_period] ( identifier[self] , identifier[analysis_period] ):
literal[string]
identifier[self] . identifier[_check_analysis_period] ( identifier[analysis_period] )
identifier[analysis_period] = identifier[self] . identifier[_get_analysis_period_subset] ( identifier[analysis_period] )
keyword[if] identifier[analysis_period] . identifier[st_hour] == literal[int] keyword[and] identifier[analysis_period] . identifier[end_hour] == literal[int] :
identifier[t_s] = literal[int] / identifier[analysis_period] . identifier[timestep]
identifier[st_ind] = identifier[int] (( identifier[analysis_period] . identifier[st_time] . identifier[moy] / identifier[t_s] )-
( identifier[self] . identifier[header] . identifier[analysis_period] . identifier[st_time] . identifier[moy] / identifier[t_s] ))
identifier[end_ind] = identifier[int] (( identifier[analysis_period] . identifier[end_time] . identifier[moy] / identifier[t_s] )-
( identifier[analysis_period] . identifier[st_time] . identifier[moy] / identifier[t_s] )+ identifier[st_ind] + literal[int] )
keyword[if] identifier[end_ind] > identifier[st_ind] :
identifier[_filt_values] = identifier[self] . identifier[_values] [ identifier[st_ind] : identifier[end_ind] ]
keyword[else] :
identifier[_filt_values] = identifier[self] . identifier[_values] [ identifier[st_ind] :]+ identifier[self] . identifier[_values] [: identifier[end_ind] ]
identifier[_filt_header] = identifier[self] . identifier[header] . identifier[duplicate] ()
identifier[_filt_header] . identifier[_analysis_period] = identifier[analysis_period]
keyword[return] identifier[HourlyContinuousCollection] ( identifier[_filt_header] , identifier[_filt_values] )
keyword[else] :
identifier[_filtered_data] = identifier[self] . identifier[filter_by_moys] ( identifier[analysis_period] . identifier[moys] )
identifier[_filtered_data] . identifier[header] . identifier[_analysis_period] = identifier[analysis_period]
keyword[return] identifier[_filtered_data] | def filter_by_analysis_period(self, analysis_period):
"""Filter the Data Collection based on an analysis period.
Args:
analysis period: A Ladybug analysis period
Return:
A new Data Collection with filtered data
"""
self._check_analysis_period(analysis_period)
analysis_period = self._get_analysis_period_subset(analysis_period)
if analysis_period.st_hour == 0 and analysis_period.end_hour == 23:
# We can still return an Hourly Continuous Data Collection
t_s = 60 / analysis_period.timestep
st_ind = int(analysis_period.st_time.moy / t_s - self.header.analysis_period.st_time.moy / t_s)
end_ind = int(analysis_period.end_time.moy / t_s - analysis_period.st_time.moy / t_s + st_ind + 1)
if end_ind > st_ind:
_filt_values = self._values[st_ind:end_ind] # depends on [control=['if'], data=['end_ind', 'st_ind']]
else:
_filt_values = self._values[st_ind:] + self._values[:end_ind]
_filt_header = self.header.duplicate()
_filt_header._analysis_period = analysis_period
return HourlyContinuousCollection(_filt_header, _filt_values) # depends on [control=['if'], data=[]]
else:
# Filter using HOYs and the result cannot be continuous
_filtered_data = self.filter_by_moys(analysis_period.moys)
_filtered_data.header._analysis_period = analysis_period
return _filtered_data |
def visit_assert(self, node):
"""check the use of an assert statement on a tuple."""
if (
node.fail is None
and isinstance(node.test, astroid.Tuple)
and len(node.test.elts) == 2
):
self.add_message("assert-on-tuple", node=node) | def function[visit_assert, parameter[self, node]]:
constant[check the use of an assert statement on a tuple.]
if <ast.BoolOp object at 0x7da1b0317b80> begin[:]
call[name[self].add_message, parameter[constant[assert-on-tuple]]] | keyword[def] identifier[visit_assert] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] (
identifier[node] . identifier[fail] keyword[is] keyword[None]
keyword[and] identifier[isinstance] ( identifier[node] . identifier[test] , identifier[astroid] . identifier[Tuple] )
keyword[and] identifier[len] ( identifier[node] . identifier[test] . identifier[elts] )== literal[int]
):
identifier[self] . identifier[add_message] ( literal[string] , identifier[node] = identifier[node] ) | def visit_assert(self, node):
"""check the use of an assert statement on a tuple."""
if node.fail is None and isinstance(node.test, astroid.Tuple) and (len(node.test.elts) == 2):
self.add_message('assert-on-tuple', node=node) # depends on [control=['if'], data=[]] |
def is_present(cls):
"""
Check if the currently tested element is into the database.
"""
if PyFunceble.CONFIGURATION["inactive_database"]:
# The database subsystem is activated.
if PyFunceble.INTERN["to_test"] in PyFunceble.INTERN[
"flatten_inactive_db"
] or (
PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["inactive_db"]
and PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]]
and "to_test"
in PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]]
and PyFunceble.INTERN["to_test"]
in PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]][
"to_test"
]
):
return True
return False | def function[is_present, parameter[cls]]:
constant[
Check if the currently tested element is into the database.
]
if call[name[PyFunceble].CONFIGURATION][constant[inactive_database]] begin[:]
if <ast.BoolOp object at 0x7da1b02958d0> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_present] ( identifier[cls] ):
literal[string]
keyword[if] identifier[PyFunceble] . identifier[CONFIGURATION] [ literal[string] ]:
keyword[if] identifier[PyFunceble] . identifier[INTERN] [ literal[string] ] keyword[in] identifier[PyFunceble] . identifier[INTERN] [
literal[string]
] keyword[or] (
identifier[PyFunceble] . identifier[INTERN] [ literal[string] ] keyword[in] identifier[PyFunceble] . identifier[INTERN] [ literal[string] ]
keyword[and] identifier[PyFunceble] . identifier[INTERN] [ literal[string] ][ identifier[PyFunceble] . identifier[INTERN] [ literal[string] ]]
keyword[and] literal[string]
keyword[in] identifier[PyFunceble] . identifier[INTERN] [ literal[string] ][ identifier[PyFunceble] . identifier[INTERN] [ literal[string] ]]
keyword[and] identifier[PyFunceble] . identifier[INTERN] [ literal[string] ]
keyword[in] identifier[PyFunceble] . identifier[INTERN] [ literal[string] ][ identifier[PyFunceble] . identifier[INTERN] [ literal[string] ]][
literal[string]
]
):
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_present(cls):
"""
Check if the currently tested element is into the database.
"""
if PyFunceble.CONFIGURATION['inactive_database']:
# The database subsystem is activated.
if PyFunceble.INTERN['to_test'] in PyFunceble.INTERN['flatten_inactive_db'] or (PyFunceble.INTERN['file_to_test'] in PyFunceble.INTERN['inactive_db'] and PyFunceble.INTERN['inactive_db'][PyFunceble.INTERN['file_to_test']] and ('to_test' in PyFunceble.INTERN['inactive_db'][PyFunceble.INTERN['file_to_test']]) and (PyFunceble.INTERN['to_test'] in PyFunceble.INTERN['inactive_db'][PyFunceble.INTERN['file_to_test']]['to_test'])):
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return False |
def _wrap_measure(individual_state_measure_process, state_measure, loaded_processes):
"""
Creates a function on a state_collection, which creates analysis_collections for each state in the collection.
Optionally sorts the collection if the state_measure has a sort_by parameter (see funtool.lib.general.sort_states for details)
"""
def wrapped_measure(state_collection,overriding_parameters=None,loggers=None):
if loggers == None:
loggers = funtool.logger.set_default_loggers()
if loaded_processes != None :
if state_measure.grouping_selectors != None:
for grouping_selector_name in state_measure.grouping_selectors:
state_collection= funtool.state_collection.add_grouping(state_collection, grouping_selector_name, loaded_processes)
states= state_collection.states
measure_parameters= get_measure_parameters(state_measure, overriding_parameters)
if 'sort_by' in measure_parameters.keys():
states= funtool.lib.general.sort_states(states, measure_parameters['sort_by'])
for state_index,state in enumerate(states):
step_size= len(states)//20
if state_index % step_size == 0:
loggers.status_logger.warn("{}: {} %".format( datetime.datetime.now(), round((state_index/len(states) * 100 ), 1) ) )
analysis_collection = funtool.analysis.AnalysisCollection(state,None,{},{})
if state_measure.analysis_selectors != None:
for analysis_selector in state_measure.analysis_selectors:
analysis_collection = loaded_processes["analysis_selector"][analysis_selector].process_function(analysis_collection,state_collection)
if analysis_collection != None:
individual_state_measure_process(analysis_collection,state_collection,overriding_parameters)
return state_collection
return wrapped_measure | def function[_wrap_measure, parameter[individual_state_measure_process, state_measure, loaded_processes]]:
constant[
Creates a function on a state_collection, which creates analysis_collections for each state in the collection.
Optionally sorts the collection if the state_measure has a sort_by parameter (see funtool.lib.general.sort_states for details)
]
def function[wrapped_measure, parameter[state_collection, overriding_parameters, loggers]]:
if compare[name[loggers] equal[==] constant[None]] begin[:]
variable[loggers] assign[=] call[name[funtool].logger.set_default_loggers, parameter[]]
if compare[name[loaded_processes] not_equal[!=] constant[None]] begin[:]
if compare[name[state_measure].grouping_selectors not_equal[!=] constant[None]] begin[:]
for taget[name[grouping_selector_name]] in starred[name[state_measure].grouping_selectors] begin[:]
variable[state_collection] assign[=] call[name[funtool].state_collection.add_grouping, parameter[name[state_collection], name[grouping_selector_name], name[loaded_processes]]]
variable[states] assign[=] name[state_collection].states
variable[measure_parameters] assign[=] call[name[get_measure_parameters], parameter[name[state_measure], name[overriding_parameters]]]
if compare[constant[sort_by] in call[name[measure_parameters].keys, parameter[]]] begin[:]
variable[states] assign[=] call[name[funtool].lib.general.sort_states, parameter[name[states], call[name[measure_parameters]][constant[sort_by]]]]
for taget[tuple[[<ast.Name object at 0x7da18dc04850>, <ast.Name object at 0x7da18dc066e0>]]] in starred[call[name[enumerate], parameter[name[states]]]] begin[:]
variable[step_size] assign[=] binary_operation[call[name[len], parameter[name[states]]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[20]]
if compare[binary_operation[name[state_index] <ast.Mod object at 0x7da2590d6920> name[step_size]] equal[==] constant[0]] begin[:]
call[name[loggers].status_logger.warn, parameter[call[constant[{}: {} %].format, parameter[call[name[datetime].datetime.now, parameter[]], call[name[round], parameter[binary_operation[binary_operation[name[state_index] / call[name[len], parameter[name[states]]]] * constant[100]], constant[1]]]]]]]
variable[analysis_collection] assign[=] call[name[funtool].analysis.AnalysisCollection, parameter[name[state], constant[None], dictionary[[], []], dictionary[[], []]]]
if compare[name[state_measure].analysis_selectors not_equal[!=] constant[None]] begin[:]
for taget[name[analysis_selector]] in starred[name[state_measure].analysis_selectors] begin[:]
variable[analysis_collection] assign[=] call[call[call[name[loaded_processes]][constant[analysis_selector]]][name[analysis_selector]].process_function, parameter[name[analysis_collection], name[state_collection]]]
if compare[name[analysis_collection] not_equal[!=] constant[None]] begin[:]
call[name[individual_state_measure_process], parameter[name[analysis_collection], name[state_collection], name[overriding_parameters]]]
return[name[state_collection]]
return[name[wrapped_measure]] | keyword[def] identifier[_wrap_measure] ( identifier[individual_state_measure_process] , identifier[state_measure] , identifier[loaded_processes] ):
literal[string]
keyword[def] identifier[wrapped_measure] ( identifier[state_collection] , identifier[overriding_parameters] = keyword[None] , identifier[loggers] = keyword[None] ):
keyword[if] identifier[loggers] == keyword[None] :
identifier[loggers] = identifier[funtool] . identifier[logger] . identifier[set_default_loggers] ()
keyword[if] identifier[loaded_processes] != keyword[None] :
keyword[if] identifier[state_measure] . identifier[grouping_selectors] != keyword[None] :
keyword[for] identifier[grouping_selector_name] keyword[in] identifier[state_measure] . identifier[grouping_selectors] :
identifier[state_collection] = identifier[funtool] . identifier[state_collection] . identifier[add_grouping] ( identifier[state_collection] , identifier[grouping_selector_name] , identifier[loaded_processes] )
identifier[states] = identifier[state_collection] . identifier[states]
identifier[measure_parameters] = identifier[get_measure_parameters] ( identifier[state_measure] , identifier[overriding_parameters] )
keyword[if] literal[string] keyword[in] identifier[measure_parameters] . identifier[keys] ():
identifier[states] = identifier[funtool] . identifier[lib] . identifier[general] . identifier[sort_states] ( identifier[states] , identifier[measure_parameters] [ literal[string] ])
keyword[for] identifier[state_index] , identifier[state] keyword[in] identifier[enumerate] ( identifier[states] ):
identifier[step_size] = identifier[len] ( identifier[states] )// literal[int]
keyword[if] identifier[state_index] % identifier[step_size] == literal[int] :
identifier[loggers] . identifier[status_logger] . identifier[warn] ( literal[string] . identifier[format] ( identifier[datetime] . identifier[datetime] . identifier[now] (), identifier[round] (( identifier[state_index] / identifier[len] ( identifier[states] )* literal[int] ), literal[int] )))
identifier[analysis_collection] = identifier[funtool] . identifier[analysis] . identifier[AnalysisCollection] ( identifier[state] , keyword[None] ,{},{})
keyword[if] identifier[state_measure] . identifier[analysis_selectors] != keyword[None] :
keyword[for] identifier[analysis_selector] keyword[in] identifier[state_measure] . identifier[analysis_selectors] :
identifier[analysis_collection] = identifier[loaded_processes] [ literal[string] ][ identifier[analysis_selector] ]. identifier[process_function] ( identifier[analysis_collection] , identifier[state_collection] )
keyword[if] identifier[analysis_collection] != keyword[None] :
identifier[individual_state_measure_process] ( identifier[analysis_collection] , identifier[state_collection] , identifier[overriding_parameters] )
keyword[return] identifier[state_collection]
keyword[return] identifier[wrapped_measure] | def _wrap_measure(individual_state_measure_process, state_measure, loaded_processes):
"""
Creates a function on a state_collection, which creates analysis_collections for each state in the collection.
Optionally sorts the collection if the state_measure has a sort_by parameter (see funtool.lib.general.sort_states for details)
"""
def wrapped_measure(state_collection, overriding_parameters=None, loggers=None):
if loggers == None:
loggers = funtool.logger.set_default_loggers() # depends on [control=['if'], data=['loggers']]
if loaded_processes != None:
if state_measure.grouping_selectors != None:
for grouping_selector_name in state_measure.grouping_selectors:
state_collection = funtool.state_collection.add_grouping(state_collection, grouping_selector_name, loaded_processes) # depends on [control=['for'], data=['grouping_selector_name']] # depends on [control=['if'], data=[]]
states = state_collection.states
measure_parameters = get_measure_parameters(state_measure, overriding_parameters)
if 'sort_by' in measure_parameters.keys():
states = funtool.lib.general.sort_states(states, measure_parameters['sort_by']) # depends on [control=['if'], data=[]]
for (state_index, state) in enumerate(states):
step_size = len(states) // 20
if state_index % step_size == 0:
loggers.status_logger.warn('{}: {} %'.format(datetime.datetime.now(), round(state_index / len(states) * 100, 1))) # depends on [control=['if'], data=[]]
analysis_collection = funtool.analysis.AnalysisCollection(state, None, {}, {})
if state_measure.analysis_selectors != None:
for analysis_selector in state_measure.analysis_selectors:
analysis_collection = loaded_processes['analysis_selector'][analysis_selector].process_function(analysis_collection, state_collection) # depends on [control=['for'], data=['analysis_selector']] # depends on [control=['if'], data=[]]
if analysis_collection != None:
individual_state_measure_process(analysis_collection, state_collection, overriding_parameters) # depends on [control=['if'], data=['analysis_collection']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['loaded_processes']]
return state_collection
return wrapped_measure |
def dispatch(self, *args, **kwargs):
'''Find and evaluate/return the first method this input dispatches to.
'''
for result in self.gen_dispatch(*args, **kwargs):
return result | def function[dispatch, parameter[self]]:
constant[Find and evaluate/return the first method this input dispatches to.
]
for taget[name[result]] in starred[call[name[self].gen_dispatch, parameter[<ast.Starred object at 0x7da1b008d810>]]] begin[:]
return[name[result]] | keyword[def] identifier[dispatch] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[result] keyword[in] identifier[self] . identifier[gen_dispatch] (* identifier[args] ,** identifier[kwargs] ):
keyword[return] identifier[result] | def dispatch(self, *args, **kwargs):
"""Find and evaluate/return the first method this input dispatches to.
"""
for result in self.gen_dispatch(*args, **kwargs):
return result # depends on [control=['for'], data=['result']] |
def program_pixel_reg(self, enable_receiver=True):
"""
Send the pixel register to the chip and store the output.
Loads the values of self['PIXEL_REG'] onto the chip.
Includes enabling the clock, and loading the Control (CTR)
and DAC shadow registers.
if(enable_receiver), stores the output (by byte) in
self['DATA'], retrievable via `chip['DATA'].get_data()`.
"""
self._clear_strobes()
# enable receiver it work only if pixel register is enabled/clocked
self['PIXEL_RX'].set_en(enable_receiver)
px_size = len(self['PIXEL_REG'][:]) # get the size
self['SEQ']['SHIFT_IN'][0:px_size] = self['PIXEL_REG'][:] # this will be shifted out
self['SEQ']['PIXEL_SHIFT_EN'][0:px_size] = bitarray(px_size * '1') # this is to enable clock
self._run_seq(px_size + 1) | def function[program_pixel_reg, parameter[self, enable_receiver]]:
constant[
Send the pixel register to the chip and store the output.
Loads the values of self['PIXEL_REG'] onto the chip.
Includes enabling the clock, and loading the Control (CTR)
and DAC shadow registers.
if(enable_receiver), stores the output (by byte) in
self['DATA'], retrievable via `chip['DATA'].get_data()`.
]
call[name[self]._clear_strobes, parameter[]]
call[call[name[self]][constant[PIXEL_RX]].set_en, parameter[name[enable_receiver]]]
variable[px_size] assign[=] call[name[len], parameter[call[call[name[self]][constant[PIXEL_REG]]][<ast.Slice object at 0x7da1b0506530>]]]
call[call[call[name[self]][constant[SEQ]]][constant[SHIFT_IN]]][<ast.Slice object at 0x7da1b0505000>] assign[=] call[call[name[self]][constant[PIXEL_REG]]][<ast.Slice object at 0x7da1b05056f0>]
call[call[call[name[self]][constant[SEQ]]][constant[PIXEL_SHIFT_EN]]][<ast.Slice object at 0x7da1b0505de0>] assign[=] call[name[bitarray], parameter[binary_operation[name[px_size] * constant[1]]]]
call[name[self]._run_seq, parameter[binary_operation[name[px_size] + constant[1]]]] | keyword[def] identifier[program_pixel_reg] ( identifier[self] , identifier[enable_receiver] = keyword[True] ):
literal[string]
identifier[self] . identifier[_clear_strobes] ()
identifier[self] [ literal[string] ]. identifier[set_en] ( identifier[enable_receiver] )
identifier[px_size] = identifier[len] ( identifier[self] [ literal[string] ][:])
identifier[self] [ literal[string] ][ literal[string] ][ literal[int] : identifier[px_size] ]= identifier[self] [ literal[string] ][:]
identifier[self] [ literal[string] ][ literal[string] ][ literal[int] : identifier[px_size] ]= identifier[bitarray] ( identifier[px_size] * literal[string] )
identifier[self] . identifier[_run_seq] ( identifier[px_size] + literal[int] ) | def program_pixel_reg(self, enable_receiver=True):
"""
Send the pixel register to the chip and store the output.
Loads the values of self['PIXEL_REG'] onto the chip.
Includes enabling the clock, and loading the Control (CTR)
and DAC shadow registers.
if(enable_receiver), stores the output (by byte) in
self['DATA'], retrievable via `chip['DATA'].get_data()`.
"""
self._clear_strobes()
# enable receiver it work only if pixel register is enabled/clocked
self['PIXEL_RX'].set_en(enable_receiver)
px_size = len(self['PIXEL_REG'][:]) # get the size
self['SEQ']['SHIFT_IN'][0:px_size] = self['PIXEL_REG'][:] # this will be shifted out
self['SEQ']['PIXEL_SHIFT_EN'][0:px_size] = bitarray(px_size * '1') # this is to enable clock
self._run_seq(px_size + 1) |
def externals(trgt, **kwargs):
"""Return a list of direct external dependencies of ``pkgname``.
Called for the ``pydeps --externals`` command.
"""
kw = dict(
T='svg', config=None, debug=False, display=None, exclude=[], externals=True,
format='svg', max_bacon=2**65, no_config=True, nodot=False,
noise_level=2**65, noshow=True, output=None, pylib=True, pylib_all=True,
show=False, show_cycles=False, show_deps=False, show_dot=False,
show_raw_deps=False, verbose=0, include_missing=True,
)
kw.update(kwargs)
depgraph = py2depgraph.py2dep(trgt, **kw)
pkgname = trgt.fname
log.info("DEPGRAPH: %s", depgraph)
pkgname = os.path.splitext(pkgname)[0]
res = {}
ext = set()
for k, src in list(depgraph.sources.items()):
if k.startswith('_'):
continue
if not k.startswith(pkgname):
continue
if src.imports:
imps = [imp for imp in src.imports if not imp.startswith(pkgname)]
if imps:
for imp in imps:
ext.add(imp.split('.')[0])
res[k] = imps
# return res # debug
return list(sorted(ext)) | def function[externals, parameter[trgt]]:
constant[Return a list of direct external dependencies of ``pkgname``.
Called for the ``pydeps --externals`` command.
]
variable[kw] assign[=] call[name[dict], parameter[]]
call[name[kw].update, parameter[name[kwargs]]]
variable[depgraph] assign[=] call[name[py2depgraph].py2dep, parameter[name[trgt]]]
variable[pkgname] assign[=] name[trgt].fname
call[name[log].info, parameter[constant[DEPGRAPH: %s], name[depgraph]]]
variable[pkgname] assign[=] call[call[name[os].path.splitext, parameter[name[pkgname]]]][constant[0]]
variable[res] assign[=] dictionary[[], []]
variable[ext] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b20ab310>, <ast.Name object at 0x7da1b20a8670>]]] in starred[call[name[list], parameter[call[name[depgraph].sources.items, parameter[]]]]] begin[:]
if call[name[k].startswith, parameter[constant[_]]] begin[:]
continue
if <ast.UnaryOp object at 0x7da1b20ab1c0> begin[:]
continue
if name[src].imports begin[:]
variable[imps] assign[=] <ast.ListComp object at 0x7da20c794520>
if name[imps] begin[:]
for taget[name[imp]] in starred[name[imps]] begin[:]
call[name[ext].add, parameter[call[call[name[imp].split, parameter[constant[.]]]][constant[0]]]]
call[name[res]][name[k]] assign[=] name[imps]
return[call[name[list], parameter[call[name[sorted], parameter[name[ext]]]]]] | keyword[def] identifier[externals] ( identifier[trgt] ,** identifier[kwargs] ):
literal[string]
identifier[kw] = identifier[dict] (
identifier[T] = literal[string] , identifier[config] = keyword[None] , identifier[debug] = keyword[False] , identifier[display] = keyword[None] , identifier[exclude] =[], identifier[externals] = keyword[True] ,
identifier[format] = literal[string] , identifier[max_bacon] = literal[int] ** literal[int] , identifier[no_config] = keyword[True] , identifier[nodot] = keyword[False] ,
identifier[noise_level] = literal[int] ** literal[int] , identifier[noshow] = keyword[True] , identifier[output] = keyword[None] , identifier[pylib] = keyword[True] , identifier[pylib_all] = keyword[True] ,
identifier[show] = keyword[False] , identifier[show_cycles] = keyword[False] , identifier[show_deps] = keyword[False] , identifier[show_dot] = keyword[False] ,
identifier[show_raw_deps] = keyword[False] , identifier[verbose] = literal[int] , identifier[include_missing] = keyword[True] ,
)
identifier[kw] . identifier[update] ( identifier[kwargs] )
identifier[depgraph] = identifier[py2depgraph] . identifier[py2dep] ( identifier[trgt] ,** identifier[kw] )
identifier[pkgname] = identifier[trgt] . identifier[fname]
identifier[log] . identifier[info] ( literal[string] , identifier[depgraph] )
identifier[pkgname] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[pkgname] )[ literal[int] ]
identifier[res] ={}
identifier[ext] = identifier[set] ()
keyword[for] identifier[k] , identifier[src] keyword[in] identifier[list] ( identifier[depgraph] . identifier[sources] . identifier[items] ()):
keyword[if] identifier[k] . identifier[startswith] ( literal[string] ):
keyword[continue]
keyword[if] keyword[not] identifier[k] . identifier[startswith] ( identifier[pkgname] ):
keyword[continue]
keyword[if] identifier[src] . identifier[imports] :
identifier[imps] =[ identifier[imp] keyword[for] identifier[imp] keyword[in] identifier[src] . identifier[imports] keyword[if] keyword[not] identifier[imp] . identifier[startswith] ( identifier[pkgname] )]
keyword[if] identifier[imps] :
keyword[for] identifier[imp] keyword[in] identifier[imps] :
identifier[ext] . identifier[add] ( identifier[imp] . identifier[split] ( literal[string] )[ literal[int] ])
identifier[res] [ identifier[k] ]= identifier[imps]
keyword[return] identifier[list] ( identifier[sorted] ( identifier[ext] )) | def externals(trgt, **kwargs):
"""Return a list of direct external dependencies of ``pkgname``.
Called for the ``pydeps --externals`` command.
"""
kw = dict(T='svg', config=None, debug=False, display=None, exclude=[], externals=True, format='svg', max_bacon=2 ** 65, no_config=True, nodot=False, noise_level=2 ** 65, noshow=True, output=None, pylib=True, pylib_all=True, show=False, show_cycles=False, show_deps=False, show_dot=False, show_raw_deps=False, verbose=0, include_missing=True)
kw.update(kwargs)
depgraph = py2depgraph.py2dep(trgt, **kw)
pkgname = trgt.fname
log.info('DEPGRAPH: %s', depgraph)
pkgname = os.path.splitext(pkgname)[0]
res = {}
ext = set()
for (k, src) in list(depgraph.sources.items()):
if k.startswith('_'):
continue # depends on [control=['if'], data=[]]
if not k.startswith(pkgname):
continue # depends on [control=['if'], data=[]]
if src.imports:
imps = [imp for imp in src.imports if not imp.startswith(pkgname)]
if imps:
for imp in imps:
ext.add(imp.split('.')[0]) # depends on [control=['for'], data=['imp']]
res[k] = imps # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# return res # debug
return list(sorted(ext)) |
def parse_option(self, option, block_name, *values):
""" Parse duration option for timer.
"""
try:
if len(values) != 1:
raise TypeError
self.total_duration = int(values[0])
if self.total_duration <= 0:
raise ValueError
except ValueError:
pattern = u'"{0}" must be an integer > 0'
raise ValueError(pattern.format(option)) | def function[parse_option, parameter[self, option, block_name]]:
constant[ Parse duration option for timer.
]
<ast.Try object at 0x7da1b13b8d00> | keyword[def] identifier[parse_option] ( identifier[self] , identifier[option] , identifier[block_name] ,* identifier[values] ):
literal[string]
keyword[try] :
keyword[if] identifier[len] ( identifier[values] )!= literal[int] :
keyword[raise] identifier[TypeError]
identifier[self] . identifier[total_duration] = identifier[int] ( identifier[values] [ literal[int] ])
keyword[if] identifier[self] . identifier[total_duration] <= literal[int] :
keyword[raise] identifier[ValueError]
keyword[except] identifier[ValueError] :
identifier[pattern] = literal[string]
keyword[raise] identifier[ValueError] ( identifier[pattern] . identifier[format] ( identifier[option] )) | def parse_option(self, option, block_name, *values):
""" Parse duration option for timer.
"""
try:
if len(values) != 1:
raise TypeError # depends on [control=['if'], data=[]]
self.total_duration = int(values[0])
if self.total_duration <= 0:
raise ValueError # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ValueError:
pattern = u'"{0}" must be an integer > 0'
raise ValueError(pattern.format(option)) # depends on [control=['except'], data=[]] |
def parse_psql_array(inp):
"""
:param inp: a string encoding an array
:return: the array of elements as represented by the input
"""
inp = unescape_sql(inp)
# Strip '{' and '}'
if inp.startswith("{") and inp.endswith("}"):
inp = inp[1:-1]
lst = []
elem = ""
in_quotes, escaped = False, False
for ch in inp:
if escaped:
elem += ch
escaped = False
elif ch == '"':
in_quotes = not in_quotes
escaped = False
elif ch == '\\':
escaped = True
else:
if in_quotes:
elem += ch
elif ch == ',':
lst.append(elem)
elem = ""
else:
elem += ch
escaped = False
if len(elem) > 0:
lst.append(elem)
return lst | def function[parse_psql_array, parameter[inp]]:
constant[
:param inp: a string encoding an array
:return: the array of elements as represented by the input
]
variable[inp] assign[=] call[name[unescape_sql], parameter[name[inp]]]
if <ast.BoolOp object at 0x7da1b10802b0> begin[:]
variable[inp] assign[=] call[name[inp]][<ast.Slice object at 0x7da1b1081870>]
variable[lst] assign[=] list[[]]
variable[elem] assign[=] constant[]
<ast.Tuple object at 0x7da1b10803a0> assign[=] tuple[[<ast.Constant object at 0x7da1b10839a0>, <ast.Constant object at 0x7da1b10800d0>]]
for taget[name[ch]] in starred[name[inp]] begin[:]
if name[escaped] begin[:]
<ast.AugAssign object at 0x7da1b1080f10>
variable[escaped] assign[=] constant[False]
if compare[call[name[len], parameter[name[elem]]] greater[>] constant[0]] begin[:]
call[name[lst].append, parameter[name[elem]]]
return[name[lst]] | keyword[def] identifier[parse_psql_array] ( identifier[inp] ):
literal[string]
identifier[inp] = identifier[unescape_sql] ( identifier[inp] )
keyword[if] identifier[inp] . identifier[startswith] ( literal[string] ) keyword[and] identifier[inp] . identifier[endswith] ( literal[string] ):
identifier[inp] = identifier[inp] [ literal[int] :- literal[int] ]
identifier[lst] =[]
identifier[elem] = literal[string]
identifier[in_quotes] , identifier[escaped] = keyword[False] , keyword[False]
keyword[for] identifier[ch] keyword[in] identifier[inp] :
keyword[if] identifier[escaped] :
identifier[elem] += identifier[ch]
identifier[escaped] = keyword[False]
keyword[elif] identifier[ch] == literal[string] :
identifier[in_quotes] = keyword[not] identifier[in_quotes]
identifier[escaped] = keyword[False]
keyword[elif] identifier[ch] == literal[string] :
identifier[escaped] = keyword[True]
keyword[else] :
keyword[if] identifier[in_quotes] :
identifier[elem] += identifier[ch]
keyword[elif] identifier[ch] == literal[string] :
identifier[lst] . identifier[append] ( identifier[elem] )
identifier[elem] = literal[string]
keyword[else] :
identifier[elem] += identifier[ch]
identifier[escaped] = keyword[False]
keyword[if] identifier[len] ( identifier[elem] )> literal[int] :
identifier[lst] . identifier[append] ( identifier[elem] )
keyword[return] identifier[lst] | def parse_psql_array(inp):
"""
:param inp: a string encoding an array
:return: the array of elements as represented by the input
"""
inp = unescape_sql(inp)
# Strip '{' and '}'
if inp.startswith('{') and inp.endswith('}'):
inp = inp[1:-1] # depends on [control=['if'], data=[]]
lst = []
elem = ''
(in_quotes, escaped) = (False, False)
for ch in inp:
if escaped:
elem += ch
escaped = False # depends on [control=['if'], data=[]]
elif ch == '"':
in_quotes = not in_quotes
escaped = False # depends on [control=['if'], data=[]]
elif ch == '\\':
escaped = True # depends on [control=['if'], data=[]]
else:
if in_quotes:
elem += ch # depends on [control=['if'], data=[]]
elif ch == ',':
lst.append(elem)
elem = '' # depends on [control=['if'], data=[]]
else:
elem += ch
escaped = False # depends on [control=['for'], data=['ch']]
if len(elem) > 0:
lst.append(elem) # depends on [control=['if'], data=[]]
return lst |
def update_from_shiftfile(shiftfile,wcsname=None,force=False):
"""
Update headers of all images specified in shiftfile with shifts
from shiftfile.
Parameters
----------
shiftfile : str
Filename of shiftfile.
wcsname : str
Label to give to new WCS solution being created by this fit. If
a value of None is given, it will automatically use 'TWEAK' as the
label. [Default =None]
force : bool
Update header even though WCS already exists with this solution or
wcsname? [Default=False]
"""
f = open(fileutil.osfn(shiftfile))
shift_lines = [x.strip() for x in f.readlines()]
f.close()
# interpret header of shift file
for line in shift_lines:
if 'refimage' in line or 'reference' in line:
refimage = line.split(':')[-1]
refimage = refimage[:refimage.find('[wcs]')].lstrip()
break
# Determine the max length in the first column (filenames)
fnames = []
for row in shift_lines:
if row[0] == '#': continue
fnames.append(len(row.split(' ')[0]))
fname_fmt = 'S{0}'.format(max(fnames))
# Now read in numerical values from shiftfile
type_list = {'names':('fnames','xsh','ysh','rot','scale','xrms','yrms'),
'formats':(fname_fmt,'f4','f4','f4','f4','f4','f4')}
try:
sdict = np.loadtxt(shiftfile,dtype=type_list,unpack=False)
except IndexError:
tlist = {'names':('fnames','xsh','ysh','rot','scale'),
'formats':(fname_fmt,'f4','f4','f4','f4')}
s = np.loadtxt(shiftfile,dtype=tlist,unpack=False)
sdict = np.zeros([s['fnames'].shape[0],],dtype=type_list)
for sname in s.dtype.names:
sdict[sname] = s[sname]
for img in sdict:
updatewcs_with_shift(img['fnames'], refimage, wcsname=wcsname,
rot=img['rot'], scale=img['scale'],
xsh=img['xsh'], ysh=img['ysh'],
xrms=img['xrms'], yrms=img['yrms'],
force=force) | def function[update_from_shiftfile, parameter[shiftfile, wcsname, force]]:
constant[
Update headers of all images specified in shiftfile with shifts
from shiftfile.
Parameters
----------
shiftfile : str
Filename of shiftfile.
wcsname : str
Label to give to new WCS solution being created by this fit. If
a value of None is given, it will automatically use 'TWEAK' as the
label. [Default =None]
force : bool
Update header even though WCS already exists with this solution or
wcsname? [Default=False]
]
variable[f] assign[=] call[name[open], parameter[call[name[fileutil].osfn, parameter[name[shiftfile]]]]]
variable[shift_lines] assign[=] <ast.ListComp object at 0x7da1b1bb8160>
call[name[f].close, parameter[]]
for taget[name[line]] in starred[name[shift_lines]] begin[:]
if <ast.BoolOp object at 0x7da1b1bb8d00> begin[:]
variable[refimage] assign[=] call[call[name[line].split, parameter[constant[:]]]][<ast.UnaryOp object at 0x7da1b1bbb1c0>]
variable[refimage] assign[=] call[call[name[refimage]][<ast.Slice object at 0x7da1b1bbad10>].lstrip, parameter[]]
break
variable[fnames] assign[=] list[[]]
for taget[name[row]] in starred[name[shift_lines]] begin[:]
if compare[call[name[row]][constant[0]] equal[==] constant[#]] begin[:]
continue
call[name[fnames].append, parameter[call[name[len], parameter[call[call[name[row].split, parameter[constant[ ]]]][constant[0]]]]]]
variable[fname_fmt] assign[=] call[constant[S{0}].format, parameter[call[name[max], parameter[name[fnames]]]]]
variable[type_list] assign[=] dictionary[[<ast.Constant object at 0x7da1b1bb9330>, <ast.Constant object at 0x7da1b1bbb880>], [<ast.Tuple object at 0x7da1b1bb9990>, <ast.Tuple object at 0x7da1b1bbb9d0>]]
<ast.Try object at 0x7da1b1bbbf10>
for taget[name[img]] in starred[name[sdict]] begin[:]
call[name[updatewcs_with_shift], parameter[call[name[img]][constant[fnames]], name[refimage]]] | keyword[def] identifier[update_from_shiftfile] ( identifier[shiftfile] , identifier[wcsname] = keyword[None] , identifier[force] = keyword[False] ):
literal[string]
identifier[f] = identifier[open] ( identifier[fileutil] . identifier[osfn] ( identifier[shiftfile] ))
identifier[shift_lines] =[ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[f] . identifier[readlines] ()]
identifier[f] . identifier[close] ()
keyword[for] identifier[line] keyword[in] identifier[shift_lines] :
keyword[if] literal[string] keyword[in] identifier[line] keyword[or] literal[string] keyword[in] identifier[line] :
identifier[refimage] = identifier[line] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[refimage] = identifier[refimage] [: identifier[refimage] . identifier[find] ( literal[string] )]. identifier[lstrip] ()
keyword[break]
identifier[fnames] =[]
keyword[for] identifier[row] keyword[in] identifier[shift_lines] :
keyword[if] identifier[row] [ literal[int] ]== literal[string] : keyword[continue]
identifier[fnames] . identifier[append] ( identifier[len] ( identifier[row] . identifier[split] ( literal[string] )[ literal[int] ]))
identifier[fname_fmt] = literal[string] . identifier[format] ( identifier[max] ( identifier[fnames] ))
identifier[type_list] ={ literal[string] :( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ),
literal[string] :( identifier[fname_fmt] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )}
keyword[try] :
identifier[sdict] = identifier[np] . identifier[loadtxt] ( identifier[shiftfile] , identifier[dtype] = identifier[type_list] , identifier[unpack] = keyword[False] )
keyword[except] identifier[IndexError] :
identifier[tlist] ={ literal[string] :( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ),
literal[string] :( identifier[fname_fmt] , literal[string] , literal[string] , literal[string] , literal[string] )}
identifier[s] = identifier[np] . identifier[loadtxt] ( identifier[shiftfile] , identifier[dtype] = identifier[tlist] , identifier[unpack] = keyword[False] )
identifier[sdict] = identifier[np] . identifier[zeros] ([ identifier[s] [ literal[string] ]. identifier[shape] [ literal[int] ],], identifier[dtype] = identifier[type_list] )
keyword[for] identifier[sname] keyword[in] identifier[s] . identifier[dtype] . identifier[names] :
identifier[sdict] [ identifier[sname] ]= identifier[s] [ identifier[sname] ]
keyword[for] identifier[img] keyword[in] identifier[sdict] :
identifier[updatewcs_with_shift] ( identifier[img] [ literal[string] ], identifier[refimage] , identifier[wcsname] = identifier[wcsname] ,
identifier[rot] = identifier[img] [ literal[string] ], identifier[scale] = identifier[img] [ literal[string] ],
identifier[xsh] = identifier[img] [ literal[string] ], identifier[ysh] = identifier[img] [ literal[string] ],
identifier[xrms] = identifier[img] [ literal[string] ], identifier[yrms] = identifier[img] [ literal[string] ],
identifier[force] = identifier[force] ) | def update_from_shiftfile(shiftfile, wcsname=None, force=False):
"""
Update headers of all images specified in shiftfile with shifts
from shiftfile.
Parameters
----------
shiftfile : str
Filename of shiftfile.
wcsname : str
Label to give to new WCS solution being created by this fit. If
a value of None is given, it will automatically use 'TWEAK' as the
label. [Default =None]
force : bool
Update header even though WCS already exists with this solution or
wcsname? [Default=False]
"""
f = open(fileutil.osfn(shiftfile))
shift_lines = [x.strip() for x in f.readlines()]
f.close()
# interpret header of shift file
for line in shift_lines:
if 'refimage' in line or 'reference' in line:
refimage = line.split(':')[-1]
refimage = refimage[:refimage.find('[wcs]')].lstrip()
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
# Determine the max length in the first column (filenames)
fnames = []
for row in shift_lines:
if row[0] == '#':
continue # depends on [control=['if'], data=[]]
fnames.append(len(row.split(' ')[0])) # depends on [control=['for'], data=['row']]
fname_fmt = 'S{0}'.format(max(fnames))
# Now read in numerical values from shiftfile
type_list = {'names': ('fnames', 'xsh', 'ysh', 'rot', 'scale', 'xrms', 'yrms'), 'formats': (fname_fmt, 'f4', 'f4', 'f4', 'f4', 'f4', 'f4')}
try:
sdict = np.loadtxt(shiftfile, dtype=type_list, unpack=False) # depends on [control=['try'], data=[]]
except IndexError:
tlist = {'names': ('fnames', 'xsh', 'ysh', 'rot', 'scale'), 'formats': (fname_fmt, 'f4', 'f4', 'f4', 'f4')}
s = np.loadtxt(shiftfile, dtype=tlist, unpack=False)
sdict = np.zeros([s['fnames'].shape[0]], dtype=type_list)
for sname in s.dtype.names:
sdict[sname] = s[sname] # depends on [control=['for'], data=['sname']] # depends on [control=['except'], data=[]]
for img in sdict:
updatewcs_with_shift(img['fnames'], refimage, wcsname=wcsname, rot=img['rot'], scale=img['scale'], xsh=img['xsh'], ysh=img['ysh'], xrms=img['xrms'], yrms=img['yrms'], force=force) # depends on [control=['for'], data=['img']] |
def fuzzy_index_match(possiblities, label, **kwargs):
"""Find the closest matching column label, key, or integer indexed value
Returns:
type(label): sequence of immutable objects corresponding to best matches to each object in label
if label is an int returns the object (value) in the list of possibilities at that index
if label is a str returns the closest str match in possibilities
>>> from collections import OrderedDict as odict
>>> fuzzy_index_match(pd.DataFrame(pd.np.random.randn(9,4), columns=list('ABCD'), index=range(9)), 'b')
'B'
>>> fuzzy_index_match(odict(zip('12345','ABCDE')), 'r2d2')
'2'
>>> fuzzy_index_match(odict(zip('12345','ABCDE')), 1)
'2'
>>> fuzzy_index_match(odict(zip('12345','ABCDE')), -1)
'5'
>>> fuzzy_index_match(odict(zip(range(4),'FOUR')), -4)
0
"""
possibilities = list(possiblities)
if isinstance(label, basestring):
return fuzzy_get(possibilities, label, **kwargs)
if isinstance(label, int):
return possibilities[label]
if isinstance(label, list):
return [fuzzy_get(possibilities, lbl) for lbl in label] | def function[fuzzy_index_match, parameter[possiblities, label]]:
constant[Find the closest matching column label, key, or integer indexed value
Returns:
type(label): sequence of immutable objects corresponding to best matches to each object in label
if label is an int returns the object (value) in the list of possibilities at that index
if label is a str returns the closest str match in possibilities
>>> from collections import OrderedDict as odict
>>> fuzzy_index_match(pd.DataFrame(pd.np.random.randn(9,4), columns=list('ABCD'), index=range(9)), 'b')
'B'
>>> fuzzy_index_match(odict(zip('12345','ABCDE')), 'r2d2')
'2'
>>> fuzzy_index_match(odict(zip('12345','ABCDE')), 1)
'2'
>>> fuzzy_index_match(odict(zip('12345','ABCDE')), -1)
'5'
>>> fuzzy_index_match(odict(zip(range(4),'FOUR')), -4)
0
]
variable[possibilities] assign[=] call[name[list], parameter[name[possiblities]]]
if call[name[isinstance], parameter[name[label], name[basestring]]] begin[:]
return[call[name[fuzzy_get], parameter[name[possibilities], name[label]]]]
if call[name[isinstance], parameter[name[label], name[int]]] begin[:]
return[call[name[possibilities]][name[label]]]
if call[name[isinstance], parameter[name[label], name[list]]] begin[:]
return[<ast.ListComp object at 0x7da1b1437be0>] | keyword[def] identifier[fuzzy_index_match] ( identifier[possiblities] , identifier[label] ,** identifier[kwargs] ):
literal[string]
identifier[possibilities] = identifier[list] ( identifier[possiblities] )
keyword[if] identifier[isinstance] ( identifier[label] , identifier[basestring] ):
keyword[return] identifier[fuzzy_get] ( identifier[possibilities] , identifier[label] ,** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[label] , identifier[int] ):
keyword[return] identifier[possibilities] [ identifier[label] ]
keyword[if] identifier[isinstance] ( identifier[label] , identifier[list] ):
keyword[return] [ identifier[fuzzy_get] ( identifier[possibilities] , identifier[lbl] ) keyword[for] identifier[lbl] keyword[in] identifier[label] ] | def fuzzy_index_match(possiblities, label, **kwargs):
"""Find the closest matching column label, key, or integer indexed value
Returns:
type(label): sequence of immutable objects corresponding to best matches to each object in label
if label is an int returns the object (value) in the list of possibilities at that index
if label is a str returns the closest str match in possibilities
>>> from collections import OrderedDict as odict
>>> fuzzy_index_match(pd.DataFrame(pd.np.random.randn(9,4), columns=list('ABCD'), index=range(9)), 'b')
'B'
>>> fuzzy_index_match(odict(zip('12345','ABCDE')), 'r2d2')
'2'
>>> fuzzy_index_match(odict(zip('12345','ABCDE')), 1)
'2'
>>> fuzzy_index_match(odict(zip('12345','ABCDE')), -1)
'5'
>>> fuzzy_index_match(odict(zip(range(4),'FOUR')), -4)
0
"""
possibilities = list(possiblities)
if isinstance(label, basestring):
return fuzzy_get(possibilities, label, **kwargs) # depends on [control=['if'], data=[]]
if isinstance(label, int):
return possibilities[label] # depends on [control=['if'], data=[]]
if isinstance(label, list):
return [fuzzy_get(possibilities, lbl) for lbl in label] # depends on [control=['if'], data=[]] |
def is_readable(fp, size=1):
"""
Check if the file-like object is readable.
:param fp: file-like object
:param size: byte size
:return: bool
"""
read_size = len(fp.read(size))
fp.seek(-read_size, 1)
return read_size == size | def function[is_readable, parameter[fp, size]]:
constant[
Check if the file-like object is readable.
:param fp: file-like object
:param size: byte size
:return: bool
]
variable[read_size] assign[=] call[name[len], parameter[call[name[fp].read, parameter[name[size]]]]]
call[name[fp].seek, parameter[<ast.UnaryOp object at 0x7da1b1dfa2c0>, constant[1]]]
return[compare[name[read_size] equal[==] name[size]]] | keyword[def] identifier[is_readable] ( identifier[fp] , identifier[size] = literal[int] ):
literal[string]
identifier[read_size] = identifier[len] ( identifier[fp] . identifier[read] ( identifier[size] ))
identifier[fp] . identifier[seek] (- identifier[read_size] , literal[int] )
keyword[return] identifier[read_size] == identifier[size] | def is_readable(fp, size=1):
"""
Check if the file-like object is readable.
:param fp: file-like object
:param size: byte size
:return: bool
"""
read_size = len(fp.read(size))
fp.seek(-read_size, 1)
return read_size == size |
def build_statusbar(self):
"""construct and return statusbar widget"""
info = {}
cb = self.current_buffer
btype = None
if cb is not None:
info = cb.get_info()
btype = cb.modename
info['buffer_no'] = self.buffers.index(cb)
info['buffer_type'] = btype
info['total_messages'] = self.dbman.count_messages('*')
info['pending_writes'] = len(self.dbman.writequeue)
info['input_queue'] = ' '.join(self.input_queue)
lefttxt = righttxt = u''
if cb is not None:
lefttxt, righttxt = settings.get(btype + '_statusbar', (u'', u''))
lefttxt = string_decode(lefttxt, 'UTF-8')
lefttxt = lefttxt.format(**info)
righttxt = string_decode(righttxt, 'UTF-8')
righttxt = righttxt.format(**info)
footerleft = urwid.Text(lefttxt, align='left')
pending_writes = len(self.dbman.writequeue)
if pending_writes > 0:
righttxt = ('|' * pending_writes) + ' ' + righttxt
footerright = urwid.Text(righttxt, align='right')
columns = urwid.Columns([
footerleft,
('pack', footerright)])
footer_att = settings.get_theming_attribute('global', 'footer')
return urwid.AttrMap(columns, footer_att) | def function[build_statusbar, parameter[self]]:
constant[construct and return statusbar widget]
variable[info] assign[=] dictionary[[], []]
variable[cb] assign[=] name[self].current_buffer
variable[btype] assign[=] constant[None]
if compare[name[cb] is_not constant[None]] begin[:]
variable[info] assign[=] call[name[cb].get_info, parameter[]]
variable[btype] assign[=] name[cb].modename
call[name[info]][constant[buffer_no]] assign[=] call[name[self].buffers.index, parameter[name[cb]]]
call[name[info]][constant[buffer_type]] assign[=] name[btype]
call[name[info]][constant[total_messages]] assign[=] call[name[self].dbman.count_messages, parameter[constant[*]]]
call[name[info]][constant[pending_writes]] assign[=] call[name[len], parameter[name[self].dbman.writequeue]]
call[name[info]][constant[input_queue]] assign[=] call[constant[ ].join, parameter[name[self].input_queue]]
variable[lefttxt] assign[=] constant[]
if compare[name[cb] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b0721bd0> assign[=] call[name[settings].get, parameter[binary_operation[name[btype] + constant[_statusbar]], tuple[[<ast.Constant object at 0x7da1b0722530>, <ast.Constant object at 0x7da1b0721c60>]]]]
variable[lefttxt] assign[=] call[name[string_decode], parameter[name[lefttxt], constant[UTF-8]]]
variable[lefttxt] assign[=] call[name[lefttxt].format, parameter[]]
variable[righttxt] assign[=] call[name[string_decode], parameter[name[righttxt], constant[UTF-8]]]
variable[righttxt] assign[=] call[name[righttxt].format, parameter[]]
variable[footerleft] assign[=] call[name[urwid].Text, parameter[name[lefttxt]]]
variable[pending_writes] assign[=] call[name[len], parameter[name[self].dbman.writequeue]]
if compare[name[pending_writes] greater[>] constant[0]] begin[:]
variable[righttxt] assign[=] binary_operation[binary_operation[binary_operation[constant[|] * name[pending_writes]] + constant[ ]] + name[righttxt]]
variable[footerright] assign[=] call[name[urwid].Text, parameter[name[righttxt]]]
variable[columns] assign[=] call[name[urwid].Columns, parameter[list[[<ast.Name object at 0x7da1b0723d60>, <ast.Tuple object at 0x7da1b0723ca0>]]]]
variable[footer_att] assign[=] call[name[settings].get_theming_attribute, parameter[constant[global], constant[footer]]]
return[call[name[urwid].AttrMap, parameter[name[columns], name[footer_att]]]] | keyword[def] identifier[build_statusbar] ( identifier[self] ):
literal[string]
identifier[info] ={}
identifier[cb] = identifier[self] . identifier[current_buffer]
identifier[btype] = keyword[None]
keyword[if] identifier[cb] keyword[is] keyword[not] keyword[None] :
identifier[info] = identifier[cb] . identifier[get_info] ()
identifier[btype] = identifier[cb] . identifier[modename]
identifier[info] [ literal[string] ]= identifier[self] . identifier[buffers] . identifier[index] ( identifier[cb] )
identifier[info] [ literal[string] ]= identifier[btype]
identifier[info] [ literal[string] ]= identifier[self] . identifier[dbman] . identifier[count_messages] ( literal[string] )
identifier[info] [ literal[string] ]= identifier[len] ( identifier[self] . identifier[dbman] . identifier[writequeue] )
identifier[info] [ literal[string] ]= literal[string] . identifier[join] ( identifier[self] . identifier[input_queue] )
identifier[lefttxt] = identifier[righttxt] = literal[string]
keyword[if] identifier[cb] keyword[is] keyword[not] keyword[None] :
identifier[lefttxt] , identifier[righttxt] = identifier[settings] . identifier[get] ( identifier[btype] + literal[string] ,( literal[string] , literal[string] ))
identifier[lefttxt] = identifier[string_decode] ( identifier[lefttxt] , literal[string] )
identifier[lefttxt] = identifier[lefttxt] . identifier[format] (** identifier[info] )
identifier[righttxt] = identifier[string_decode] ( identifier[righttxt] , literal[string] )
identifier[righttxt] = identifier[righttxt] . identifier[format] (** identifier[info] )
identifier[footerleft] = identifier[urwid] . identifier[Text] ( identifier[lefttxt] , identifier[align] = literal[string] )
identifier[pending_writes] = identifier[len] ( identifier[self] . identifier[dbman] . identifier[writequeue] )
keyword[if] identifier[pending_writes] > literal[int] :
identifier[righttxt] =( literal[string] * identifier[pending_writes] )+ literal[string] + identifier[righttxt]
identifier[footerright] = identifier[urwid] . identifier[Text] ( identifier[righttxt] , identifier[align] = literal[string] )
identifier[columns] = identifier[urwid] . identifier[Columns] ([
identifier[footerleft] ,
( literal[string] , identifier[footerright] )])
identifier[footer_att] = identifier[settings] . identifier[get_theming_attribute] ( literal[string] , literal[string] )
keyword[return] identifier[urwid] . identifier[AttrMap] ( identifier[columns] , identifier[footer_att] ) | def build_statusbar(self):
"""construct and return statusbar widget"""
info = {}
cb = self.current_buffer
btype = None
if cb is not None:
info = cb.get_info()
btype = cb.modename
info['buffer_no'] = self.buffers.index(cb)
info['buffer_type'] = btype # depends on [control=['if'], data=['cb']]
info['total_messages'] = self.dbman.count_messages('*')
info['pending_writes'] = len(self.dbman.writequeue)
info['input_queue'] = ' '.join(self.input_queue)
lefttxt = righttxt = u''
if cb is not None:
(lefttxt, righttxt) = settings.get(btype + '_statusbar', (u'', u''))
lefttxt = string_decode(lefttxt, 'UTF-8')
lefttxt = lefttxt.format(**info)
righttxt = string_decode(righttxt, 'UTF-8')
righttxt = righttxt.format(**info) # depends on [control=['if'], data=[]]
footerleft = urwid.Text(lefttxt, align='left')
pending_writes = len(self.dbman.writequeue)
if pending_writes > 0:
righttxt = '|' * pending_writes + ' ' + righttxt # depends on [control=['if'], data=['pending_writes']]
footerright = urwid.Text(righttxt, align='right')
columns = urwid.Columns([footerleft, ('pack', footerright)])
footer_att = settings.get_theming_attribute('global', 'footer')
return urwid.AttrMap(columns, footer_att) |
def is_current(self, paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version) | def function[is_current, parameter[self, paths]]:
constant[Return true if dependency is present and up-to-date on 'paths']
variable[version] assign[=] call[name[self].get_version, parameter[name[paths]]]
if compare[name[version] is constant[None]] begin[:]
return[constant[False]]
return[call[name[self].version_ok, parameter[name[version]]]] | keyword[def] identifier[is_current] ( identifier[self] , identifier[paths] = keyword[None] ):
literal[string]
identifier[version] = identifier[self] . identifier[get_version] ( identifier[paths] )
keyword[if] identifier[version] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[return] identifier[self] . identifier[version_ok] ( identifier[version] ) | def is_current(self, paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False # depends on [control=['if'], data=[]]
return self.version_ok(version) |
def parallel(view, dist='b', block=None, ordered=True, **flags):
"""Turn a function into a parallel remote function.
This method can be used for map:
In [1]: @parallel(view, block=True)
...: def func(a):
...: pass
"""
def parallel_function(f):
return ParallelFunction(view, f, dist=dist, block=block, ordered=ordered, **flags)
return parallel_function | def function[parallel, parameter[view, dist, block, ordered]]:
constant[Turn a function into a parallel remote function.
This method can be used for map:
In [1]: @parallel(view, block=True)
...: def func(a):
...: pass
]
def function[parallel_function, parameter[f]]:
return[call[name[ParallelFunction], parameter[name[view], name[f]]]]
return[name[parallel_function]] | keyword[def] identifier[parallel] ( identifier[view] , identifier[dist] = literal[string] , identifier[block] = keyword[None] , identifier[ordered] = keyword[True] ,** identifier[flags] ):
literal[string]
keyword[def] identifier[parallel_function] ( identifier[f] ):
keyword[return] identifier[ParallelFunction] ( identifier[view] , identifier[f] , identifier[dist] = identifier[dist] , identifier[block] = identifier[block] , identifier[ordered] = identifier[ordered] ,** identifier[flags] )
keyword[return] identifier[parallel_function] | def parallel(view, dist='b', block=None, ordered=True, **flags):
"""Turn a function into a parallel remote function.
This method can be used for map:
In [1]: @parallel(view, block=True)
...: def func(a):
...: pass
"""
def parallel_function(f):
return ParallelFunction(view, f, dist=dist, block=block, ordered=ordered, **flags)
return parallel_function |
def getShocks(self):
'''
Finds permanent and transitory income "shocks" for each agent this period. As this is a
perfect foresight model, there are no stochastic shocks: PermShkNow = PermGroFac for each
agent (according to their t_cycle) and TranShkNow = 1.0 for all agents.
Parameters
----------
None
Returns
-------
None
'''
PermGroFac = np.array(self.PermGroFac)
self.PermShkNow = PermGroFac[self.t_cycle-1] # cycle time has already been advanced
self.TranShkNow = np.ones(self.AgentCount) | def function[getShocks, parameter[self]]:
constant[
Finds permanent and transitory income "shocks" for each agent this period. As this is a
perfect foresight model, there are no stochastic shocks: PermShkNow = PermGroFac for each
agent (according to their t_cycle) and TranShkNow = 1.0 for all agents.
Parameters
----------
None
Returns
-------
None
]
variable[PermGroFac] assign[=] call[name[np].array, parameter[name[self].PermGroFac]]
name[self].PermShkNow assign[=] call[name[PermGroFac]][binary_operation[name[self].t_cycle - constant[1]]]
name[self].TranShkNow assign[=] call[name[np].ones, parameter[name[self].AgentCount]] | keyword[def] identifier[getShocks] ( identifier[self] ):
literal[string]
identifier[PermGroFac] = identifier[np] . identifier[array] ( identifier[self] . identifier[PermGroFac] )
identifier[self] . identifier[PermShkNow] = identifier[PermGroFac] [ identifier[self] . identifier[t_cycle] - literal[int] ]
identifier[self] . identifier[TranShkNow] = identifier[np] . identifier[ones] ( identifier[self] . identifier[AgentCount] ) | def getShocks(self):
"""
Finds permanent and transitory income "shocks" for each agent this period. As this is a
perfect foresight model, there are no stochastic shocks: PermShkNow = PermGroFac for each
agent (according to their t_cycle) and TranShkNow = 1.0 for all agents.
Parameters
----------
None
Returns
-------
None
"""
PermGroFac = np.array(self.PermGroFac)
self.PermShkNow = PermGroFac[self.t_cycle - 1] # cycle time has already been advanced
self.TranShkNow = np.ones(self.AgentCount) |
def stack(self, slug, chart_obj=None, title=None):
"""
Get the html for a chart and store it
"""
if chart_obj is None:
if self.chart_obj is None:
self.err(
self.stack,
"No chart object set: please provide one in parameters")
return
chart_obj = self.chart_obj
try:
seaborn_chart = None
if self.engine == "chartjs":
html = chart_obj
elif self.engine == "seaborn":
html = ""
seaborn_chart = chart_obj
else:
html = self.get_html(chart_obj, slug)
if html is None and seaborn_chart is None:
self.err(
self.stack, "Can not stack: empty html reveived for " +
str(chart_obj), "-", slug)
return
report = dict(slug=slug, html=html)
if seaborn_chart is not None:
report["seaborn_chart"] = seaborn_chart
if self.engine not in self.report_engines:
self.report_engines.append(self.engine)
self.reports.append(report)
except Exception as e:
self.err(e, self.stack, "Can not stack report")
return
self.ok("Stacked report", slug) | def function[stack, parameter[self, slug, chart_obj, title]]:
constant[
Get the html for a chart and store it
]
if compare[name[chart_obj] is constant[None]] begin[:]
if compare[name[self].chart_obj is constant[None]] begin[:]
call[name[self].err, parameter[name[self].stack, constant[No chart object set: please provide one in parameters]]]
return[None]
variable[chart_obj] assign[=] name[self].chart_obj
<ast.Try object at 0x7da1b253e830>
call[name[self].ok, parameter[constant[Stacked report], name[slug]]] | keyword[def] identifier[stack] ( identifier[self] , identifier[slug] , identifier[chart_obj] = keyword[None] , identifier[title] = keyword[None] ):
literal[string]
keyword[if] identifier[chart_obj] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[chart_obj] keyword[is] keyword[None] :
identifier[self] . identifier[err] (
identifier[self] . identifier[stack] ,
literal[string] )
keyword[return]
identifier[chart_obj] = identifier[self] . identifier[chart_obj]
keyword[try] :
identifier[seaborn_chart] = keyword[None]
keyword[if] identifier[self] . identifier[engine] == literal[string] :
identifier[html] = identifier[chart_obj]
keyword[elif] identifier[self] . identifier[engine] == literal[string] :
identifier[html] = literal[string]
identifier[seaborn_chart] = identifier[chart_obj]
keyword[else] :
identifier[html] = identifier[self] . identifier[get_html] ( identifier[chart_obj] , identifier[slug] )
keyword[if] identifier[html] keyword[is] keyword[None] keyword[and] identifier[seaborn_chart] keyword[is] keyword[None] :
identifier[self] . identifier[err] (
identifier[self] . identifier[stack] , literal[string] +
identifier[str] ( identifier[chart_obj] ), literal[string] , identifier[slug] )
keyword[return]
identifier[report] = identifier[dict] ( identifier[slug] = identifier[slug] , identifier[html] = identifier[html] )
keyword[if] identifier[seaborn_chart] keyword[is] keyword[not] keyword[None] :
identifier[report] [ literal[string] ]= identifier[seaborn_chart]
keyword[if] identifier[self] . identifier[engine] keyword[not] keyword[in] identifier[self] . identifier[report_engines] :
identifier[self] . identifier[report_engines] . identifier[append] ( identifier[self] . identifier[engine] )
identifier[self] . identifier[reports] . identifier[append] ( identifier[report] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[err] ( identifier[e] , identifier[self] . identifier[stack] , literal[string] )
keyword[return]
identifier[self] . identifier[ok] ( literal[string] , identifier[slug] ) | def stack(self, slug, chart_obj=None, title=None):
"""
Get the html for a chart and store it
"""
if chart_obj is None:
if self.chart_obj is None:
self.err(self.stack, 'No chart object set: please provide one in parameters')
return # depends on [control=['if'], data=[]]
chart_obj = self.chart_obj # depends on [control=['if'], data=['chart_obj']]
try:
seaborn_chart = None
if self.engine == 'chartjs':
html = chart_obj # depends on [control=['if'], data=[]]
elif self.engine == 'seaborn':
html = ''
seaborn_chart = chart_obj # depends on [control=['if'], data=[]]
else:
html = self.get_html(chart_obj, slug)
if html is None and seaborn_chart is None:
self.err(self.stack, 'Can not stack: empty html reveived for ' + str(chart_obj), '-', slug)
return # depends on [control=['if'], data=[]]
report = dict(slug=slug, html=html)
if seaborn_chart is not None:
report['seaborn_chart'] = seaborn_chart # depends on [control=['if'], data=['seaborn_chart']]
if self.engine not in self.report_engines:
self.report_engines.append(self.engine) # depends on [control=['if'], data=[]]
self.reports.append(report) # depends on [control=['try'], data=[]]
except Exception as e:
self.err(e, self.stack, 'Can not stack report')
return # depends on [control=['except'], data=['e']]
self.ok('Stacked report', slug) |
def _button_autosave_clicked(self, checked):
"""
Called whenever the button is clicked.
"""
if checked:
# get the path from the user
path = _spinmob.dialogs.save(filters=self.file_type)
# abort if necessary
if not path:
self.button_autosave.set_checked(False)
return
# otherwise, save the info!
self._autosave_directory, filename = _os.path.split(path)
self._label_path.set_text(filename)
self.save_gui_settings() | def function[_button_autosave_clicked, parameter[self, checked]]:
constant[
Called whenever the button is clicked.
]
if name[checked] begin[:]
variable[path] assign[=] call[name[_spinmob].dialogs.save, parameter[]]
if <ast.UnaryOp object at 0x7da18ede5030> begin[:]
call[name[self].button_autosave.set_checked, parameter[constant[False]]]
return[None]
<ast.Tuple object at 0x7da18ede42e0> assign[=] call[name[_os].path.split, parameter[name[path]]]
call[name[self]._label_path.set_text, parameter[name[filename]]]
call[name[self].save_gui_settings, parameter[]] | keyword[def] identifier[_button_autosave_clicked] ( identifier[self] , identifier[checked] ):
literal[string]
keyword[if] identifier[checked] :
identifier[path] = identifier[_spinmob] . identifier[dialogs] . identifier[save] ( identifier[filters] = identifier[self] . identifier[file_type] )
keyword[if] keyword[not] identifier[path] :
identifier[self] . identifier[button_autosave] . identifier[set_checked] ( keyword[False] )
keyword[return]
identifier[self] . identifier[_autosave_directory] , identifier[filename] = identifier[_os] . identifier[path] . identifier[split] ( identifier[path] )
identifier[self] . identifier[_label_path] . identifier[set_text] ( identifier[filename] )
identifier[self] . identifier[save_gui_settings] () | def _button_autosave_clicked(self, checked):
"""
Called whenever the button is clicked.
"""
if checked:
# get the path from the user
path = _spinmob.dialogs.save(filters=self.file_type)
# abort if necessary
if not path:
self.button_autosave.set_checked(False)
return # depends on [control=['if'], data=[]]
# otherwise, save the info!
(self._autosave_directory, filename) = _os.path.split(path)
self._label_path.set_text(filename) # depends on [control=['if'], data=[]]
self.save_gui_settings() |
def consume_socket_output(frames, demux=False):
"""
Iterate through frames read from the socket and return the result.
Args:
demux (bool):
If False, stdout and stderr are multiplexed, and the result is the
concatenation of all the frames. If True, the streams are
demultiplexed, and the result is a 2-tuple where each item is the
concatenation of frames belonging to the same stream.
"""
if demux is False:
# If the streams are multiplexed, the generator returns strings, that
# we just need to concatenate.
return six.binary_type().join(frames)
# If the streams are demultiplexed, the generator yields tuples
# (stdout, stderr)
out = [None, None]
for frame in frames:
# It is guaranteed that for each frame, one and only one stream
# is not None.
assert frame != (None, None)
if frame[0] is not None:
if out[0] is None:
out[0] = frame[0]
else:
out[0] += frame[0]
else:
if out[1] is None:
out[1] = frame[1]
else:
out[1] += frame[1]
return tuple(out) | def function[consume_socket_output, parameter[frames, demux]]:
constant[
Iterate through frames read from the socket and return the result.
Args:
demux (bool):
If False, stdout and stderr are multiplexed, and the result is the
concatenation of all the frames. If True, the streams are
demultiplexed, and the result is a 2-tuple where each item is the
concatenation of frames belonging to the same stream.
]
if compare[name[demux] is constant[False]] begin[:]
return[call[call[name[six].binary_type, parameter[]].join, parameter[name[frames]]]]
variable[out] assign[=] list[[<ast.Constant object at 0x7da18eb56680>, <ast.Constant object at 0x7da18eb569b0>]]
for taget[name[frame]] in starred[name[frames]] begin[:]
assert[compare[name[frame] not_equal[!=] tuple[[<ast.Constant object at 0x7da18eb54730>, <ast.Constant object at 0x7da18eb55f90>]]]]
if compare[call[name[frame]][constant[0]] is_not constant[None]] begin[:]
if compare[call[name[out]][constant[0]] is constant[None]] begin[:]
call[name[out]][constant[0]] assign[=] call[name[frame]][constant[0]]
return[call[name[tuple], parameter[name[out]]]] | keyword[def] identifier[consume_socket_output] ( identifier[frames] , identifier[demux] = keyword[False] ):
literal[string]
keyword[if] identifier[demux] keyword[is] keyword[False] :
keyword[return] identifier[six] . identifier[binary_type] (). identifier[join] ( identifier[frames] )
identifier[out] =[ keyword[None] , keyword[None] ]
keyword[for] identifier[frame] keyword[in] identifier[frames] :
keyword[assert] identifier[frame] !=( keyword[None] , keyword[None] )
keyword[if] identifier[frame] [ literal[int] ] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[out] [ literal[int] ] keyword[is] keyword[None] :
identifier[out] [ literal[int] ]= identifier[frame] [ literal[int] ]
keyword[else] :
identifier[out] [ literal[int] ]+= identifier[frame] [ literal[int] ]
keyword[else] :
keyword[if] identifier[out] [ literal[int] ] keyword[is] keyword[None] :
identifier[out] [ literal[int] ]= identifier[frame] [ literal[int] ]
keyword[else] :
identifier[out] [ literal[int] ]+= identifier[frame] [ literal[int] ]
keyword[return] identifier[tuple] ( identifier[out] ) | def consume_socket_output(frames, demux=False):
"""
Iterate through frames read from the socket and return the result.
Args:
demux (bool):
If False, stdout and stderr are multiplexed, and the result is the
concatenation of all the frames. If True, the streams are
demultiplexed, and the result is a 2-tuple where each item is the
concatenation of frames belonging to the same stream.
"""
if demux is False:
# If the streams are multiplexed, the generator returns strings, that
# we just need to concatenate.
return six.binary_type().join(frames) # depends on [control=['if'], data=[]]
# If the streams are demultiplexed, the generator yields tuples
# (stdout, stderr)
out = [None, None]
for frame in frames:
# It is guaranteed that for each frame, one and only one stream
# is not None.
assert frame != (None, None)
if frame[0] is not None:
if out[0] is None:
out[0] = frame[0] # depends on [control=['if'], data=[]]
else:
out[0] += frame[0] # depends on [control=['if'], data=[]]
elif out[1] is None:
out[1] = frame[1] # depends on [control=['if'], data=[]]
else:
out[1] += frame[1] # depends on [control=['for'], data=['frame']]
return tuple(out) |
def is_oct(ip):
"""Return true if the IP address is in octal notation."""
try:
dec = int(str(ip), 8)
except (TypeError, ValueError):
return False
if dec > 0o37777777777 or dec < 0:
return False
return True | def function[is_oct, parameter[ip]]:
constant[Return true if the IP address is in octal notation.]
<ast.Try object at 0x7da1afe73490>
if <ast.BoolOp object at 0x7da20c6a8f70> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[is_oct] ( identifier[ip] ):
literal[string]
keyword[try] :
identifier[dec] = identifier[int] ( identifier[str] ( identifier[ip] ), literal[int] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[return] keyword[False]
keyword[if] identifier[dec] > literal[int] keyword[or] identifier[dec] < literal[int] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_oct(ip):
"""Return true if the IP address is in octal notation."""
try:
dec = int(str(ip), 8) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
return False # depends on [control=['except'], data=[]]
if dec > 4294967295 or dec < 0:
return False # depends on [control=['if'], data=[]]
return True |
def generate_type_docs(types):
"""Parse an object of types and generate RAML documentation for them.
Expects each type to be either a regular type or a list/array. If a type is a list,
it must specify what type to use for each item.
"""
output = StringIO()
indent = " " # 2
# loop through the basic types and add them to the RAML formatted output
for type_name in types:
if types[type_name].get("type") != None:
output.write(indent + type_name + ":\n")
indent += " " # 4
type_safe = types[type_name]["type"].replace(" ", "_")
# if we are dealing with a list, set type to array and specify type of items
if types[type_name].get("items") != None:
items_safe = types[type_name]["items"].replace(" ", "_")
# if items_safe == "base":
# items_safe = "object"
output.write(indent + "type: " + type_safe + "\n")
output.write(indent + "items: " + items_safe + "\n")
# otherwise, use the type per normal
else:
output.write(indent + "type: " + type_safe + "\n")
# add the description
if types[type_name].get("description") != None:
output.write(indent + "description: " + types[type_name]["description"] + "\n")
indent = indent[:-2] # 2
type_docs = output.getvalue()
output.close()
return type_docs | def function[generate_type_docs, parameter[types]]:
constant[Parse an object of types and generate RAML documentation for them.
Expects each type to be either a regular type or a list/array. If a type is a list,
it must specify what type to use for each item.
]
variable[output] assign[=] call[name[StringIO], parameter[]]
variable[indent] assign[=] constant[ ]
for taget[name[type_name]] in starred[name[types]] begin[:]
if compare[call[call[name[types]][name[type_name]].get, parameter[constant[type]]] not_equal[!=] constant[None]] begin[:]
call[name[output].write, parameter[binary_operation[binary_operation[name[indent] + name[type_name]] + constant[:
]]]]
<ast.AugAssign object at 0x7da20c6e6dd0>
variable[type_safe] assign[=] call[call[call[name[types]][name[type_name]]][constant[type]].replace, parameter[constant[ ], constant[_]]]
if compare[call[call[name[types]][name[type_name]].get, parameter[constant[items]]] not_equal[!=] constant[None]] begin[:]
variable[items_safe] assign[=] call[call[call[name[types]][name[type_name]]][constant[items]].replace, parameter[constant[ ], constant[_]]]
call[name[output].write, parameter[binary_operation[binary_operation[binary_operation[name[indent] + constant[type: ]] + name[type_safe]] + constant[
]]]]
call[name[output].write, parameter[binary_operation[binary_operation[binary_operation[name[indent] + constant[items: ]] + name[items_safe]] + constant[
]]]]
if compare[call[call[name[types]][name[type_name]].get, parameter[constant[description]]] not_equal[!=] constant[None]] begin[:]
call[name[output].write, parameter[binary_operation[binary_operation[binary_operation[name[indent] + constant[description: ]] + call[call[name[types]][name[type_name]]][constant[description]]] + constant[
]]]]
variable[indent] assign[=] call[name[indent]][<ast.Slice object at 0x7da18ede7880>]
variable[type_docs] assign[=] call[name[output].getvalue, parameter[]]
call[name[output].close, parameter[]]
return[name[type_docs]] | keyword[def] identifier[generate_type_docs] ( identifier[types] ):
literal[string]
identifier[output] = identifier[StringIO] ()
identifier[indent] = literal[string]
keyword[for] identifier[type_name] keyword[in] identifier[types] :
keyword[if] identifier[types] [ identifier[type_name] ]. identifier[get] ( literal[string] )!= keyword[None] :
identifier[output] . identifier[write] ( identifier[indent] + identifier[type_name] + literal[string] )
identifier[indent] += literal[string]
identifier[type_safe] = identifier[types] [ identifier[type_name] ][ literal[string] ]. identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[types] [ identifier[type_name] ]. identifier[get] ( literal[string] )!= keyword[None] :
identifier[items_safe] = identifier[types] [ identifier[type_name] ][ literal[string] ]. identifier[replace] ( literal[string] , literal[string] )
identifier[output] . identifier[write] ( identifier[indent] + literal[string] + identifier[type_safe] + literal[string] )
identifier[output] . identifier[write] ( identifier[indent] + literal[string] + identifier[items_safe] + literal[string] )
keyword[else] :
identifier[output] . identifier[write] ( identifier[indent] + literal[string] + identifier[type_safe] + literal[string] )
keyword[if] identifier[types] [ identifier[type_name] ]. identifier[get] ( literal[string] )!= keyword[None] :
identifier[output] . identifier[write] ( identifier[indent] + literal[string] + identifier[types] [ identifier[type_name] ][ literal[string] ]+ literal[string] )
identifier[indent] = identifier[indent] [:- literal[int] ]
identifier[type_docs] = identifier[output] . identifier[getvalue] ()
identifier[output] . identifier[close] ()
keyword[return] identifier[type_docs] | def generate_type_docs(types):
"""Parse an object of types and generate RAML documentation for them.
Expects each type to be either a regular type or a list/array. If a type is a list,
it must specify what type to use for each item.
"""
output = StringIO()
indent = ' ' # 2
# loop through the basic types and add them to the RAML formatted output
for type_name in types:
if types[type_name].get('type') != None:
output.write(indent + type_name + ':\n')
indent += ' ' # 4
type_safe = types[type_name]['type'].replace(' ', '_')
# if we are dealing with a list, set type to array and specify type of items
if types[type_name].get('items') != None:
items_safe = types[type_name]['items'].replace(' ', '_')
# if items_safe == "base":
# items_safe = "object"
output.write(indent + 'type: ' + type_safe + '\n')
output.write(indent + 'items: ' + items_safe + '\n') # depends on [control=['if'], data=[]]
else:
# otherwise, use the type per normal
output.write(indent + 'type: ' + type_safe + '\n')
# add the description
if types[type_name].get('description') != None:
output.write(indent + 'description: ' + types[type_name]['description'] + '\n') # depends on [control=['if'], data=[]]
indent = indent[:-2] # 2 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['type_name']]
type_docs = output.getvalue()
output.close()
return type_docs |
def get_edu_text(text_subtree):
"""return the text of the given EDU subtree, with '_!'-delimiters removed."""
assert text_subtree.label() == 'text', "text_subtree: {}".format(text_subtree)
edu_str = u' '.join(word for word in text_subtree.leaves())
return re.sub('_!(.*?)_!', '\g<1>', edu_str) | def function[get_edu_text, parameter[text_subtree]]:
constant[return the text of the given EDU subtree, with '_!'-delimiters removed.]
assert[compare[call[name[text_subtree].label, parameter[]] equal[==] constant[text]]]
variable[edu_str] assign[=] call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da2044c2ce0>]]
return[call[name[re].sub, parameter[constant[_!(.*?)_!], constant[\g<1>], name[edu_str]]]] | keyword[def] identifier[get_edu_text] ( identifier[text_subtree] ):
literal[string]
keyword[assert] identifier[text_subtree] . identifier[label] ()== literal[string] , literal[string] . identifier[format] ( identifier[text_subtree] )
identifier[edu_str] = literal[string] . identifier[join] ( identifier[word] keyword[for] identifier[word] keyword[in] identifier[text_subtree] . identifier[leaves] ())
keyword[return] identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[edu_str] ) | def get_edu_text(text_subtree):
"""return the text of the given EDU subtree, with '_!'-delimiters removed."""
assert text_subtree.label() == 'text', 'text_subtree: {}'.format(text_subtree)
edu_str = u' '.join((word for word in text_subtree.leaves()))
return re.sub('_!(.*?)_!', '\\g<1>', edu_str) |
def is_adjacent(self, other: ops.Qid) -> bool:
"""Determines if two qubits are adjacent qubits."""
return (isinstance(other, GridQubit) and
abs(self.row - other.row) + abs(self.col - other.col) == 1) | def function[is_adjacent, parameter[self, other]]:
constant[Determines if two qubits are adjacent qubits.]
return[<ast.BoolOp object at 0x7da204621600>] | keyword[def] identifier[is_adjacent] ( identifier[self] , identifier[other] : identifier[ops] . identifier[Qid] )-> identifier[bool] :
literal[string]
keyword[return] ( identifier[isinstance] ( identifier[other] , identifier[GridQubit] ) keyword[and]
identifier[abs] ( identifier[self] . identifier[row] - identifier[other] . identifier[row] )+ identifier[abs] ( identifier[self] . identifier[col] - identifier[other] . identifier[col] )== literal[int] ) | def is_adjacent(self, other: ops.Qid) -> bool:
"""Determines if two qubits are adjacent qubits."""
return isinstance(other, GridQubit) and abs(self.row - other.row) + abs(self.col - other.col) == 1 |
def add_partition(self, spec, location=None):
"""
Add a new table partition, creating any new directories in HDFS if
necessary.
Partition parameters can be set in a single DDL statement, or you can
use alter_partition to set them after the fact.
Returns
-------
None (for now)
"""
part_schema = self.partition_schema()
stmt = ddl.AddPartition(
self._qualified_name, spec, part_schema, location=location
)
return self._execute(stmt) | def function[add_partition, parameter[self, spec, location]]:
constant[
Add a new table partition, creating any new directories in HDFS if
necessary.
Partition parameters can be set in a single DDL statement, or you can
use alter_partition to set them after the fact.
Returns
-------
None (for now)
]
variable[part_schema] assign[=] call[name[self].partition_schema, parameter[]]
variable[stmt] assign[=] call[name[ddl].AddPartition, parameter[name[self]._qualified_name, name[spec], name[part_schema]]]
return[call[name[self]._execute, parameter[name[stmt]]]] | keyword[def] identifier[add_partition] ( identifier[self] , identifier[spec] , identifier[location] = keyword[None] ):
literal[string]
identifier[part_schema] = identifier[self] . identifier[partition_schema] ()
identifier[stmt] = identifier[ddl] . identifier[AddPartition] (
identifier[self] . identifier[_qualified_name] , identifier[spec] , identifier[part_schema] , identifier[location] = identifier[location]
)
keyword[return] identifier[self] . identifier[_execute] ( identifier[stmt] ) | def add_partition(self, spec, location=None):
"""
Add a new table partition, creating any new directories in HDFS if
necessary.
Partition parameters can be set in a single DDL statement, or you can
use alter_partition to set them after the fact.
Returns
-------
None (for now)
"""
part_schema = self.partition_schema()
stmt = ddl.AddPartition(self._qualified_name, spec, part_schema, location=location)
return self._execute(stmt) |
def libvlc_media_new_as_node(p_instance, psz_name):
'''Create a media as an empty node with a given name.
See L{libvlc_media_release}.
@param p_instance: the instance.
@param psz_name: the name of the node.
@return: the new empty media or NULL on error.
'''
f = _Cfunctions.get('libvlc_media_new_as_node', None) or \
_Cfunction('libvlc_media_new_as_node', ((1,), (1,),), class_result(Media),
ctypes.c_void_p, Instance, ctypes.c_char_p)
return f(p_instance, psz_name) | def function[libvlc_media_new_as_node, parameter[p_instance, psz_name]]:
constant[Create a media as an empty node with a given name.
See L{libvlc_media_release}.
@param p_instance: the instance.
@param psz_name: the name of the node.
@return: the new empty media or NULL on error.
]
variable[f] assign[=] <ast.BoolOp object at 0x7da1b1602980>
return[call[name[f], parameter[name[p_instance], name[psz_name]]]] | keyword[def] identifier[libvlc_media_new_as_node] ( identifier[p_instance] , identifier[psz_name] ):
literal[string]
identifier[f] = identifier[_Cfunctions] . identifier[get] ( literal[string] , keyword[None] ) keyword[or] identifier[_Cfunction] ( literal[string] ,(( literal[int] ,),( literal[int] ,),), identifier[class_result] ( identifier[Media] ),
identifier[ctypes] . identifier[c_void_p] , identifier[Instance] , identifier[ctypes] . identifier[c_char_p] )
keyword[return] identifier[f] ( identifier[p_instance] , identifier[psz_name] ) | def libvlc_media_new_as_node(p_instance, psz_name):
"""Create a media as an empty node with a given name.
See L{libvlc_media_release}.
@param p_instance: the instance.
@param psz_name: the name of the node.
@return: the new empty media or NULL on error.
"""
f = _Cfunctions.get('libvlc_media_new_as_node', None) or _Cfunction('libvlc_media_new_as_node', ((1,), (1,)), class_result(Media), ctypes.c_void_p, Instance, ctypes.c_char_p)
return f(p_instance, psz_name) |
def calculate_taper_function(obs_threshold_moment, sel_threshold_moment,
corner_moment, beta):
'''
Calculates the tapering function of the tapered Gutenberg & Richter model:
as described in Bird & Liu (2007)::
taper_function = (M_0(M_T) / M_0(M_T^{CMT}))^-beta x exp((M_0(m_T^CMT) -
M_0(m_T)) / M_0(m_c))
:param numpy.ndarray obs_threshold_moment:
Moment of the threshold magnitude of the observed earthquake catalogue
:param numpy.ndarray sel_threshold_moment:
Moment of the target magnitude
:param float corner_momnet:
Corner moment of the Tapered Gutenberg-Richter Function
:param float beta:
Beta value (b * ln(10.)) of the Tapered Gutenberg-Richter Function
:returns:
Relative moment rate
'''
argument = (obs_threshold_moment - sel_threshold_moment) /\
corner_moment
if argument < -100.0:
g_function = 0.0
else:
g_function = ((sel_threshold_moment / obs_threshold_moment) **
-beta) * exp(argument)
return g_function | def function[calculate_taper_function, parameter[obs_threshold_moment, sel_threshold_moment, corner_moment, beta]]:
constant[
Calculates the tapering function of the tapered Gutenberg & Richter model:
as described in Bird & Liu (2007)::
taper_function = (M_0(M_T) / M_0(M_T^{CMT}))^-beta x exp((M_0(m_T^CMT) -
M_0(m_T)) / M_0(m_c))
:param numpy.ndarray obs_threshold_moment:
Moment of the threshold magnitude of the observed earthquake catalogue
:param numpy.ndarray sel_threshold_moment:
Moment of the target magnitude
:param float corner_momnet:
Corner moment of the Tapered Gutenberg-Richter Function
:param float beta:
Beta value (b * ln(10.)) of the Tapered Gutenberg-Richter Function
:returns:
Relative moment rate
]
variable[argument] assign[=] binary_operation[binary_operation[name[obs_threshold_moment] - name[sel_threshold_moment]] / name[corner_moment]]
if compare[name[argument] less[<] <ast.UnaryOp object at 0x7da18ede7310>] begin[:]
variable[g_function] assign[=] constant[0.0]
return[name[g_function]] | keyword[def] identifier[calculate_taper_function] ( identifier[obs_threshold_moment] , identifier[sel_threshold_moment] ,
identifier[corner_moment] , identifier[beta] ):
literal[string]
identifier[argument] =( identifier[obs_threshold_moment] - identifier[sel_threshold_moment] )/ identifier[corner_moment]
keyword[if] identifier[argument] <- literal[int] :
identifier[g_function] = literal[int]
keyword[else] :
identifier[g_function] =(( identifier[sel_threshold_moment] / identifier[obs_threshold_moment] )**
- identifier[beta] )* identifier[exp] ( identifier[argument] )
keyword[return] identifier[g_function] | def calculate_taper_function(obs_threshold_moment, sel_threshold_moment, corner_moment, beta):
"""
Calculates the tapering function of the tapered Gutenberg & Richter model:
as described in Bird & Liu (2007)::
taper_function = (M_0(M_T) / M_0(M_T^{CMT}))^-beta x exp((M_0(m_T^CMT) -
M_0(m_T)) / M_0(m_c))
:param numpy.ndarray obs_threshold_moment:
Moment of the threshold magnitude of the observed earthquake catalogue
:param numpy.ndarray sel_threshold_moment:
Moment of the target magnitude
:param float corner_momnet:
Corner moment of the Tapered Gutenberg-Richter Function
:param float beta:
Beta value (b * ln(10.)) of the Tapered Gutenberg-Richter Function
:returns:
Relative moment rate
"""
argument = (obs_threshold_moment - sel_threshold_moment) / corner_moment
if argument < -100.0:
g_function = 0.0 # depends on [control=['if'], data=[]]
else:
g_function = (sel_threshold_moment / obs_threshold_moment) ** (-beta) * exp(argument)
return g_function |
def call(operation_name, *args, **kwargs):
"""Call a libvips operation.
Use this method to call any libvips operation. For example::
black_image = pyvips.Operation.call('black', 10, 10)
See the Introduction for notes on how this works.
"""
logger.debug('VipsOperation.call: operation_name = %s', operation_name)
# logger.debug('VipsOperation.call: args = %s, kwargs =%s',
# args, kwargs)
# pull out the special string_options kwarg
string_options = kwargs.pop('string_options', '')
logger.debug('VipsOperation.call: string_options = %s', string_options)
op = Operation.new_from_name(operation_name)
arguments = op.get_args()
# logger.debug('arguments = %s', arguments)
# make a thing to quickly get flags from an arg name
flags_from_name = {}
# count required input args
n_required = 0
for name, flags in arguments:
flags_from_name[name] = flags
if ((flags & _INPUT) != 0 and
(flags & _REQUIRED) != 0 and
(flags & _DEPRECATED) == 0):
n_required += 1
if n_required != len(args):
raise Error('unable to call {0}: {1} arguments given, '
'but {2} required'.format(operation_name, len(args),
n_required))
# the first image argument is the thing we expand constants to
# match ... look inside tables for images, since we may be passing
# an array of image as a single param
match_image = _find_inside(lambda x:
isinstance(x, pyvips.Image),
args)
logger.debug('VipsOperation.call: match_image = %s', match_image)
# set any string options before any args so they can't be
# overridden
if not op.set_string(string_options):
raise Error('unable to call {0}'.format(operation_name))
# set required and optional args
n = 0
for name, flags in arguments:
if ((flags & _INPUT) != 0 and
(flags & _REQUIRED) != 0 and
(flags & _DEPRECATED) == 0):
op.set(name, flags, match_image, args[n])
n += 1
for name, value in kwargs.items():
if name not in flags_from_name:
raise Error('{0} does not support argument '
'{1}'.format(operation_name, name))
op.set(name, flags_from_name[name], match_image, value)
# build operation
vop = vips_lib.vips_cache_operation_build(op.pointer)
if vop == ffi.NULL:
raise Error('unable to call {0}'.format(operation_name))
op = Operation(vop)
# find all input images and gather up all the references they hold
references = []
def add_reference(x):
if isinstance(x, pyvips.Image):
# += won't work on non-local references
for i in x._references:
references.append(i)
return False
_find_inside(add_reference, args)
for key, value in kwargs.items():
_find_inside(add_reference, value)
# fetch required output args, plus modified input images
result = []
for name, flags in arguments:
if ((flags & _OUTPUT) != 0 and
(flags & _REQUIRED) != 0 and
(flags & _DEPRECATED) == 0):
result.append(op.get(name))
if (flags & _INPUT) != 0 and (flags & _MODIFY) != 0:
result.append(op.get(name))
# fetch optional output args
opts = {}
for name, value in kwargs.items():
flags = flags_from_name[name]
if ((flags & _OUTPUT) != 0 and
(flags & _REQUIRED) == 0 and
(flags & _DEPRECATED) == 0):
opts[name] = op.get(name)
vips_lib.vips_object_unref_outputs(op.object)
if len(opts) > 0:
result.append(opts)
# all output images need all input references
def set_reference(x):
if isinstance(x, pyvips.Image):
x._references += references
return False
_find_inside(set_reference, result)
if len(result) == 0:
result = None
elif len(result) == 1:
result = result[0]
logger.debug('VipsOperation.call: result = %s', result)
return result | def function[call, parameter[operation_name]]:
constant[Call a libvips operation.
Use this method to call any libvips operation. For example::
black_image = pyvips.Operation.call('black', 10, 10)
See the Introduction for notes on how this works.
]
call[name[logger].debug, parameter[constant[VipsOperation.call: operation_name = %s], name[operation_name]]]
variable[string_options] assign[=] call[name[kwargs].pop, parameter[constant[string_options], constant[]]]
call[name[logger].debug, parameter[constant[VipsOperation.call: string_options = %s], name[string_options]]]
variable[op] assign[=] call[name[Operation].new_from_name, parameter[name[operation_name]]]
variable[arguments] assign[=] call[name[op].get_args, parameter[]]
variable[flags_from_name] assign[=] dictionary[[], []]
variable[n_required] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da20c6e5240>, <ast.Name object at 0x7da20c6e66e0>]]] in starred[name[arguments]] begin[:]
call[name[flags_from_name]][name[name]] assign[=] name[flags]
if <ast.BoolOp object at 0x7da20c6e4d90> begin[:]
<ast.AugAssign object at 0x7da20c6e7af0>
if compare[name[n_required] not_equal[!=] call[name[len], parameter[name[args]]]] begin[:]
<ast.Raise object at 0x7da20c6e57b0>
variable[match_image] assign[=] call[name[_find_inside], parameter[<ast.Lambda object at 0x7da20c6e5c60>, name[args]]]
call[name[logger].debug, parameter[constant[VipsOperation.call: match_image = %s], name[match_image]]]
if <ast.UnaryOp object at 0x7da20c6e63e0> begin[:]
<ast.Raise object at 0x7da20c6e51e0>
variable[n] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da20c6e72e0>, <ast.Name object at 0x7da20c6e7970>]]] in starred[name[arguments]] begin[:]
if <ast.BoolOp object at 0x7da20c6e5420> begin[:]
call[name[op].set, parameter[name[name], name[flags], name[match_image], call[name[args]][name[n]]]]
<ast.AugAssign object at 0x7da20c6e76a0>
for taget[tuple[[<ast.Name object at 0x7da20c6e4dc0>, <ast.Name object at 0x7da20c6e6a10>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[flags_from_name]] begin[:]
<ast.Raise object at 0x7da20c6e7e50>
call[name[op].set, parameter[name[name], call[name[flags_from_name]][name[name]], name[match_image], name[value]]]
variable[vop] assign[=] call[name[vips_lib].vips_cache_operation_build, parameter[name[op].pointer]]
if compare[name[vop] equal[==] name[ffi].NULL] begin[:]
<ast.Raise object at 0x7da20c6e49a0>
variable[op] assign[=] call[name[Operation], parameter[name[vop]]]
variable[references] assign[=] list[[]]
def function[add_reference, parameter[x]]:
if call[name[isinstance], parameter[name[x], name[pyvips].Image]] begin[:]
for taget[name[i]] in starred[name[x]._references] begin[:]
call[name[references].append, parameter[name[i]]]
return[constant[False]]
call[name[_find_inside], parameter[name[add_reference], name[args]]]
for taget[tuple[[<ast.Name object at 0x7da1b2347b80>, <ast.Name object at 0x7da1b2344250>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
call[name[_find_inside], parameter[name[add_reference], name[value]]]
variable[result] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b2346f50>, <ast.Name object at 0x7da1b23468c0>]]] in starred[name[arguments]] begin[:]
if <ast.BoolOp object at 0x7da1b2347130> begin[:]
call[name[result].append, parameter[call[name[op].get, parameter[name[name]]]]]
if <ast.BoolOp object at 0x7da1b2345ff0> begin[:]
call[name[result].append, parameter[call[name[op].get, parameter[name[name]]]]]
variable[opts] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b2347340>, <ast.Name object at 0x7da1b23465c0>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
variable[flags] assign[=] call[name[flags_from_name]][name[name]]
if <ast.BoolOp object at 0x7da1b2346ce0> begin[:]
call[name[opts]][name[name]] assign[=] call[name[op].get, parameter[name[name]]]
call[name[vips_lib].vips_object_unref_outputs, parameter[name[op].object]]
if compare[call[name[len], parameter[name[opts]]] greater[>] constant[0]] begin[:]
call[name[result].append, parameter[name[opts]]]
def function[set_reference, parameter[x]]:
if call[name[isinstance], parameter[name[x], name[pyvips].Image]] begin[:]
<ast.AugAssign object at 0x7da1b2347700>
return[constant[False]]
call[name[_find_inside], parameter[name[set_reference], name[result]]]
if compare[call[name[len], parameter[name[result]]] equal[==] constant[0]] begin[:]
variable[result] assign[=] constant[None]
call[name[logger].debug, parameter[constant[VipsOperation.call: result = %s], name[result]]]
return[name[result]] | keyword[def] identifier[call] ( identifier[operation_name] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[operation_name] )
identifier[string_options] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[string_options] )
identifier[op] = identifier[Operation] . identifier[new_from_name] ( identifier[operation_name] )
identifier[arguments] = identifier[op] . identifier[get_args] ()
identifier[flags_from_name] ={}
identifier[n_required] = literal[int]
keyword[for] identifier[name] , identifier[flags] keyword[in] identifier[arguments] :
identifier[flags_from_name] [ identifier[name] ]= identifier[flags]
keyword[if] (( identifier[flags] & identifier[_INPUT] )!= literal[int] keyword[and]
( identifier[flags] & identifier[_REQUIRED] )!= literal[int] keyword[and]
( identifier[flags] & identifier[_DEPRECATED] )== literal[int] ):
identifier[n_required] += literal[int]
keyword[if] identifier[n_required] != identifier[len] ( identifier[args] ):
keyword[raise] identifier[Error] ( literal[string]
literal[string] . identifier[format] ( identifier[operation_name] , identifier[len] ( identifier[args] ),
identifier[n_required] ))
identifier[match_image] = identifier[_find_inside] ( keyword[lambda] identifier[x] :
identifier[isinstance] ( identifier[x] , identifier[pyvips] . identifier[Image] ),
identifier[args] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[match_image] )
keyword[if] keyword[not] identifier[op] . identifier[set_string] ( identifier[string_options] ):
keyword[raise] identifier[Error] ( literal[string] . identifier[format] ( identifier[operation_name] ))
identifier[n] = literal[int]
keyword[for] identifier[name] , identifier[flags] keyword[in] identifier[arguments] :
keyword[if] (( identifier[flags] & identifier[_INPUT] )!= literal[int] keyword[and]
( identifier[flags] & identifier[_REQUIRED] )!= literal[int] keyword[and]
( identifier[flags] & identifier[_DEPRECATED] )== literal[int] ):
identifier[op] . identifier[set] ( identifier[name] , identifier[flags] , identifier[match_image] , identifier[args] [ identifier[n] ])
identifier[n] += literal[int]
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] ():
keyword[if] identifier[name] keyword[not] keyword[in] identifier[flags_from_name] :
keyword[raise] identifier[Error] ( literal[string]
literal[string] . identifier[format] ( identifier[operation_name] , identifier[name] ))
identifier[op] . identifier[set] ( identifier[name] , identifier[flags_from_name] [ identifier[name] ], identifier[match_image] , identifier[value] )
identifier[vop] = identifier[vips_lib] . identifier[vips_cache_operation_build] ( identifier[op] . identifier[pointer] )
keyword[if] identifier[vop] == identifier[ffi] . identifier[NULL] :
keyword[raise] identifier[Error] ( literal[string] . identifier[format] ( identifier[operation_name] ))
identifier[op] = identifier[Operation] ( identifier[vop] )
identifier[references] =[]
keyword[def] identifier[add_reference] ( identifier[x] ):
keyword[if] identifier[isinstance] ( identifier[x] , identifier[pyvips] . identifier[Image] ):
keyword[for] identifier[i] keyword[in] identifier[x] . identifier[_references] :
identifier[references] . identifier[append] ( identifier[i] )
keyword[return] keyword[False]
identifier[_find_inside] ( identifier[add_reference] , identifier[args] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] ():
identifier[_find_inside] ( identifier[add_reference] , identifier[value] )
identifier[result] =[]
keyword[for] identifier[name] , identifier[flags] keyword[in] identifier[arguments] :
keyword[if] (( identifier[flags] & identifier[_OUTPUT] )!= literal[int] keyword[and]
( identifier[flags] & identifier[_REQUIRED] )!= literal[int] keyword[and]
( identifier[flags] & identifier[_DEPRECATED] )== literal[int] ):
identifier[result] . identifier[append] ( identifier[op] . identifier[get] ( identifier[name] ))
keyword[if] ( identifier[flags] & identifier[_INPUT] )!= literal[int] keyword[and] ( identifier[flags] & identifier[_MODIFY] )!= literal[int] :
identifier[result] . identifier[append] ( identifier[op] . identifier[get] ( identifier[name] ))
identifier[opts] ={}
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] ():
identifier[flags] = identifier[flags_from_name] [ identifier[name] ]
keyword[if] (( identifier[flags] & identifier[_OUTPUT] )!= literal[int] keyword[and]
( identifier[flags] & identifier[_REQUIRED] )== literal[int] keyword[and]
( identifier[flags] & identifier[_DEPRECATED] )== literal[int] ):
identifier[opts] [ identifier[name] ]= identifier[op] . identifier[get] ( identifier[name] )
identifier[vips_lib] . identifier[vips_object_unref_outputs] ( identifier[op] . identifier[object] )
keyword[if] identifier[len] ( identifier[opts] )> literal[int] :
identifier[result] . identifier[append] ( identifier[opts] )
keyword[def] identifier[set_reference] ( identifier[x] ):
keyword[if] identifier[isinstance] ( identifier[x] , identifier[pyvips] . identifier[Image] ):
identifier[x] . identifier[_references] += identifier[references]
keyword[return] keyword[False]
identifier[_find_inside] ( identifier[set_reference] , identifier[result] )
keyword[if] identifier[len] ( identifier[result] )== literal[int] :
identifier[result] = keyword[None]
keyword[elif] identifier[len] ( identifier[result] )== literal[int] :
identifier[result] = identifier[result] [ literal[int] ]
identifier[logger] . identifier[debug] ( literal[string] , identifier[result] )
keyword[return] identifier[result] | def call(operation_name, *args, **kwargs):
"""Call a libvips operation.
Use this method to call any libvips operation. For example::
black_image = pyvips.Operation.call('black', 10, 10)
See the Introduction for notes on how this works.
"""
logger.debug('VipsOperation.call: operation_name = %s', operation_name)
# logger.debug('VipsOperation.call: args = %s, kwargs =%s',
# args, kwargs)
# pull out the special string_options kwarg
string_options = kwargs.pop('string_options', '')
logger.debug('VipsOperation.call: string_options = %s', string_options)
op = Operation.new_from_name(operation_name)
arguments = op.get_args()
# logger.debug('arguments = %s', arguments)
# make a thing to quickly get flags from an arg name
flags_from_name = {}
# count required input args
n_required = 0
for (name, flags) in arguments:
flags_from_name[name] = flags
if flags & _INPUT != 0 and flags & _REQUIRED != 0 and (flags & _DEPRECATED == 0):
n_required += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if n_required != len(args):
raise Error('unable to call {0}: {1} arguments given, but {2} required'.format(operation_name, len(args), n_required)) # depends on [control=['if'], data=['n_required']]
# the first image argument is the thing we expand constants to
# match ... look inside tables for images, since we may be passing
# an array of image as a single param
match_image = _find_inside(lambda x: isinstance(x, pyvips.Image), args)
logger.debug('VipsOperation.call: match_image = %s', match_image)
# set any string options before any args so they can't be
# overridden
if not op.set_string(string_options):
raise Error('unable to call {0}'.format(operation_name)) # depends on [control=['if'], data=[]]
# set required and optional args
n = 0
for (name, flags) in arguments:
if flags & _INPUT != 0 and flags & _REQUIRED != 0 and (flags & _DEPRECATED == 0):
op.set(name, flags, match_image, args[n])
n += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for (name, value) in kwargs.items():
if name not in flags_from_name:
raise Error('{0} does not support argument {1}'.format(operation_name, name)) # depends on [control=['if'], data=['name']]
op.set(name, flags_from_name[name], match_image, value) # depends on [control=['for'], data=[]]
# build operation
vop = vips_lib.vips_cache_operation_build(op.pointer)
if vop == ffi.NULL:
raise Error('unable to call {0}'.format(operation_name)) # depends on [control=['if'], data=[]]
op = Operation(vop)
# find all input images and gather up all the references they hold
references = []
def add_reference(x):
if isinstance(x, pyvips.Image):
# += won't work on non-local references
for i in x._references:
references.append(i) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
return False
_find_inside(add_reference, args)
for (key, value) in kwargs.items():
_find_inside(add_reference, value) # depends on [control=['for'], data=[]]
# fetch required output args, plus modified input images
result = []
for (name, flags) in arguments:
if flags & _OUTPUT != 0 and flags & _REQUIRED != 0 and (flags & _DEPRECATED == 0):
result.append(op.get(name)) # depends on [control=['if'], data=[]]
if flags & _INPUT != 0 and flags & _MODIFY != 0:
result.append(op.get(name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# fetch optional output args
opts = {}
for (name, value) in kwargs.items():
flags = flags_from_name[name]
if flags & _OUTPUT != 0 and flags & _REQUIRED == 0 and (flags & _DEPRECATED == 0):
opts[name] = op.get(name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
vips_lib.vips_object_unref_outputs(op.object)
if len(opts) > 0:
result.append(opts) # depends on [control=['if'], data=[]]
# all output images need all input references
def set_reference(x):
if isinstance(x, pyvips.Image):
x._references += references # depends on [control=['if'], data=[]]
return False
_find_inside(set_reference, result)
if len(result) == 0:
result = None # depends on [control=['if'], data=[]]
elif len(result) == 1:
result = result[0] # depends on [control=['if'], data=[]]
logger.debug('VipsOperation.call: result = %s', result)
return result |
def inspect_work_unit(self, work_spec_name, work_unit_key):
'''Get the data for some work unit.
Returns the data for that work unit, or `None` if it really
can't be found.
:param str work_spec_name: name of the work spec
:param str work_unit_key: name of the work unit
:return: definition of the work unit, or `None`
'''
with self.registry.lock(identifier=self.worker_id) as session:
work_unit_data = session.get(
WORK_UNITS_ + work_spec_name, work_unit_key)
if not work_unit_data:
work_unit_data = session.get(
WORK_UNITS_ + work_spec_name + _BLOCKED, work_unit_key)
if not work_unit_data:
work_unit_data = session.get(
WORK_UNITS_ + work_spec_name + _FINISHED, work_unit_key)
if not work_unit_data:
work_unit_data = session.get(
WORK_UNITS_ + work_spec_name + _FAILED, work_unit_key)
return work_unit_data | def function[inspect_work_unit, parameter[self, work_spec_name, work_unit_key]]:
constant[Get the data for some work unit.
Returns the data for that work unit, or `None` if it really
can't be found.
:param str work_spec_name: name of the work spec
:param str work_unit_key: name of the work unit
:return: definition of the work unit, or `None`
]
with call[name[self].registry.lock, parameter[]] begin[:]
variable[work_unit_data] assign[=] call[name[session].get, parameter[binary_operation[name[WORK_UNITS_] + name[work_spec_name]], name[work_unit_key]]]
if <ast.UnaryOp object at 0x7da1b146ded0> begin[:]
variable[work_unit_data] assign[=] call[name[session].get, parameter[binary_operation[binary_operation[name[WORK_UNITS_] + name[work_spec_name]] + name[_BLOCKED]], name[work_unit_key]]]
if <ast.UnaryOp object at 0x7da1b146d030> begin[:]
variable[work_unit_data] assign[=] call[name[session].get, parameter[binary_operation[binary_operation[name[WORK_UNITS_] + name[work_spec_name]] + name[_FINISHED]], name[work_unit_key]]]
if <ast.UnaryOp object at 0x7da1b146d9c0> begin[:]
variable[work_unit_data] assign[=] call[name[session].get, parameter[binary_operation[binary_operation[name[WORK_UNITS_] + name[work_spec_name]] + name[_FAILED]], name[work_unit_key]]]
return[name[work_unit_data]] | keyword[def] identifier[inspect_work_unit] ( identifier[self] , identifier[work_spec_name] , identifier[work_unit_key] ):
literal[string]
keyword[with] identifier[self] . identifier[registry] . identifier[lock] ( identifier[identifier] = identifier[self] . identifier[worker_id] ) keyword[as] identifier[session] :
identifier[work_unit_data] = identifier[session] . identifier[get] (
identifier[WORK_UNITS_] + identifier[work_spec_name] , identifier[work_unit_key] )
keyword[if] keyword[not] identifier[work_unit_data] :
identifier[work_unit_data] = identifier[session] . identifier[get] (
identifier[WORK_UNITS_] + identifier[work_spec_name] + identifier[_BLOCKED] , identifier[work_unit_key] )
keyword[if] keyword[not] identifier[work_unit_data] :
identifier[work_unit_data] = identifier[session] . identifier[get] (
identifier[WORK_UNITS_] + identifier[work_spec_name] + identifier[_FINISHED] , identifier[work_unit_key] )
keyword[if] keyword[not] identifier[work_unit_data] :
identifier[work_unit_data] = identifier[session] . identifier[get] (
identifier[WORK_UNITS_] + identifier[work_spec_name] + identifier[_FAILED] , identifier[work_unit_key] )
keyword[return] identifier[work_unit_data] | def inspect_work_unit(self, work_spec_name, work_unit_key):
"""Get the data for some work unit.
Returns the data for that work unit, or `None` if it really
can't be found.
:param str work_spec_name: name of the work spec
:param str work_unit_key: name of the work unit
:return: definition of the work unit, or `None`
"""
with self.registry.lock(identifier=self.worker_id) as session:
work_unit_data = session.get(WORK_UNITS_ + work_spec_name, work_unit_key)
if not work_unit_data:
work_unit_data = session.get(WORK_UNITS_ + work_spec_name + _BLOCKED, work_unit_key) # depends on [control=['if'], data=[]]
if not work_unit_data:
work_unit_data = session.get(WORK_UNITS_ + work_spec_name + _FINISHED, work_unit_key) # depends on [control=['if'], data=[]]
if not work_unit_data:
work_unit_data = session.get(WORK_UNITS_ + work_spec_name + _FAILED, work_unit_key) # depends on [control=['if'], data=[]]
return work_unit_data # depends on [control=['with'], data=['session']] |
def feature_extraction(self, algorithms):
"""Get a list of features.
Every algorithm has to return the features as a list."""
assert type(algorithms) is list
features = []
for algorithm in algorithms:
new_features = algorithm(self)
assert len(new_features) == algorithm.get_dimension(), \
"Expected %i features from algorithm %s, got %i features" % \
(algorithm.get_dimension(), str(algorithm), len(new_features))
features += new_features
return features | def function[feature_extraction, parameter[self, algorithms]]:
constant[Get a list of features.
Every algorithm has to return the features as a list.]
assert[compare[call[name[type], parameter[name[algorithms]]] is name[list]]]
variable[features] assign[=] list[[]]
for taget[name[algorithm]] in starred[name[algorithms]] begin[:]
variable[new_features] assign[=] call[name[algorithm], parameter[name[self]]]
assert[compare[call[name[len], parameter[name[new_features]]] equal[==] call[name[algorithm].get_dimension, parameter[]]]]
<ast.AugAssign object at 0x7da1b2608b50>
return[name[features]] | keyword[def] identifier[feature_extraction] ( identifier[self] , identifier[algorithms] ):
literal[string]
keyword[assert] identifier[type] ( identifier[algorithms] ) keyword[is] identifier[list]
identifier[features] =[]
keyword[for] identifier[algorithm] keyword[in] identifier[algorithms] :
identifier[new_features] = identifier[algorithm] ( identifier[self] )
keyword[assert] identifier[len] ( identifier[new_features] )== identifier[algorithm] . identifier[get_dimension] (), literal[string] %( identifier[algorithm] . identifier[get_dimension] (), identifier[str] ( identifier[algorithm] ), identifier[len] ( identifier[new_features] ))
identifier[features] += identifier[new_features]
keyword[return] identifier[features] | def feature_extraction(self, algorithms):
"""Get a list of features.
Every algorithm has to return the features as a list."""
assert type(algorithms) is list
features = []
for algorithm in algorithms:
new_features = algorithm(self)
assert len(new_features) == algorithm.get_dimension(), 'Expected %i features from algorithm %s, got %i features' % (algorithm.get_dimension(), str(algorithm), len(new_features))
features += new_features # depends on [control=['for'], data=['algorithm']]
return features |
def extend_left_to(self, window, max_size):
"""Adjust the offset to start where the given window on our left ends if possible,
but don't make yourself larger than max_size.
The resize will assure that the new window still contains the old window area"""
rofs = self.ofs - window.ofs_end()
nsize = rofs + self.size
rofs -= nsize - min(nsize, max_size)
self.ofs = self.ofs - rofs
self.size += rofs | def function[extend_left_to, parameter[self, window, max_size]]:
constant[Adjust the offset to start where the given window on our left ends if possible,
but don't make yourself larger than max_size.
The resize will assure that the new window still contains the old window area]
variable[rofs] assign[=] binary_operation[name[self].ofs - call[name[window].ofs_end, parameter[]]]
variable[nsize] assign[=] binary_operation[name[rofs] + name[self].size]
<ast.AugAssign object at 0x7da18bc702b0>
name[self].ofs assign[=] binary_operation[name[self].ofs - name[rofs]]
<ast.AugAssign object at 0x7da18bc73760> | keyword[def] identifier[extend_left_to] ( identifier[self] , identifier[window] , identifier[max_size] ):
literal[string]
identifier[rofs] = identifier[self] . identifier[ofs] - identifier[window] . identifier[ofs_end] ()
identifier[nsize] = identifier[rofs] + identifier[self] . identifier[size]
identifier[rofs] -= identifier[nsize] - identifier[min] ( identifier[nsize] , identifier[max_size] )
identifier[self] . identifier[ofs] = identifier[self] . identifier[ofs] - identifier[rofs]
identifier[self] . identifier[size] += identifier[rofs] | def extend_left_to(self, window, max_size):
"""Adjust the offset to start where the given window on our left ends if possible,
but don't make yourself larger than max_size.
The resize will assure that the new window still contains the old window area"""
rofs = self.ofs - window.ofs_end()
nsize = rofs + self.size
rofs -= nsize - min(nsize, max_size)
self.ofs = self.ofs - rofs
self.size += rofs |
def render(self, request):
"""
Render a request by forwarding it to the proxied server.
"""
# set up and evaluate a connection to the target server
if self.port == 80:
host = self.host
else:
host = "%s:%d" % (self.host, self.port)
request.requestHeaders.addRawHeader('host', host)
request.content.seek(0, 0)
qs = urlparse.urlparse(request.uri)[4]
if qs:
rest = self.path + '?' + qs
else:
rest = self.path
global_self = self.getGlobalSelf()
clientFactory = self.proxyClientFactoryClass(
request.method, rest, request.clientproto,
request.getAllHeaders(), request.content.read(), request,
global_self # this is new
)
self.reactor.connectTCP(self.host, self.port, clientFactory)
return NOT_DONE_YET | def function[render, parameter[self, request]]:
constant[
Render a request by forwarding it to the proxied server.
]
if compare[name[self].port equal[==] constant[80]] begin[:]
variable[host] assign[=] name[self].host
call[name[request].requestHeaders.addRawHeader, parameter[constant[host], name[host]]]
call[name[request].content.seek, parameter[constant[0], constant[0]]]
variable[qs] assign[=] call[call[name[urlparse].urlparse, parameter[name[request].uri]]][constant[4]]
if name[qs] begin[:]
variable[rest] assign[=] binary_operation[binary_operation[name[self].path + constant[?]] + name[qs]]
variable[global_self] assign[=] call[name[self].getGlobalSelf, parameter[]]
variable[clientFactory] assign[=] call[name[self].proxyClientFactoryClass, parameter[name[request].method, name[rest], name[request].clientproto, call[name[request].getAllHeaders, parameter[]], call[name[request].content.read, parameter[]], name[request], name[global_self]]]
call[name[self].reactor.connectTCP, parameter[name[self].host, name[self].port, name[clientFactory]]]
return[name[NOT_DONE_YET]] | keyword[def] identifier[render] ( identifier[self] , identifier[request] ):
literal[string]
keyword[if] identifier[self] . identifier[port] == literal[int] :
identifier[host] = identifier[self] . identifier[host]
keyword[else] :
identifier[host] = literal[string] %( identifier[self] . identifier[host] , identifier[self] . identifier[port] )
identifier[request] . identifier[requestHeaders] . identifier[addRawHeader] ( literal[string] , identifier[host] )
identifier[request] . identifier[content] . identifier[seek] ( literal[int] , literal[int] )
identifier[qs] = identifier[urlparse] . identifier[urlparse] ( identifier[request] . identifier[uri] )[ literal[int] ]
keyword[if] identifier[qs] :
identifier[rest] = identifier[self] . identifier[path] + literal[string] + identifier[qs]
keyword[else] :
identifier[rest] = identifier[self] . identifier[path]
identifier[global_self] = identifier[self] . identifier[getGlobalSelf] ()
identifier[clientFactory] = identifier[self] . identifier[proxyClientFactoryClass] (
identifier[request] . identifier[method] , identifier[rest] , identifier[request] . identifier[clientproto] ,
identifier[request] . identifier[getAllHeaders] (), identifier[request] . identifier[content] . identifier[read] (), identifier[request] ,
identifier[global_self]
)
identifier[self] . identifier[reactor] . identifier[connectTCP] ( identifier[self] . identifier[host] , identifier[self] . identifier[port] , identifier[clientFactory] )
keyword[return] identifier[NOT_DONE_YET] | def render(self, request):
"""
Render a request by forwarding it to the proxied server.
"""
# set up and evaluate a connection to the target server
if self.port == 80:
host = self.host # depends on [control=['if'], data=[]]
else:
host = '%s:%d' % (self.host, self.port)
request.requestHeaders.addRawHeader('host', host)
request.content.seek(0, 0)
qs = urlparse.urlparse(request.uri)[4]
if qs:
rest = self.path + '?' + qs # depends on [control=['if'], data=[]]
else:
rest = self.path
global_self = self.getGlobalSelf() # this is new
clientFactory = self.proxyClientFactoryClass(request.method, rest, request.clientproto, request.getAllHeaders(), request.content.read(), request, global_self)
self.reactor.connectTCP(self.host, self.port, clientFactory)
return NOT_DONE_YET |
def create(self):
"""Create this database within its instance
Inclues any configured schema assigned to :attr:`ddl_statements`.
See
https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase
:rtype: :class:`~google.api_core.operation.Operation`
:returns: a future used to poll the status of the create request
:raises Conflict: if the database already exists
:raises NotFound: if the instance owning the database does not exist
"""
api = self._instance._client.database_admin_api
metadata = _metadata_with_prefix(self.name)
db_name = self.database_id
if "-" in db_name:
db_name = "`%s`" % (db_name,)
future = api.create_database(
parent=self._instance.name,
create_statement="CREATE DATABASE %s" % (db_name,),
extra_statements=list(self._ddl_statements),
metadata=metadata,
)
return future | def function[create, parameter[self]]:
constant[Create this database within its instance
Inclues any configured schema assigned to :attr:`ddl_statements`.
See
https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase
:rtype: :class:`~google.api_core.operation.Operation`
:returns: a future used to poll the status of the create request
:raises Conflict: if the database already exists
:raises NotFound: if the instance owning the database does not exist
]
variable[api] assign[=] name[self]._instance._client.database_admin_api
variable[metadata] assign[=] call[name[_metadata_with_prefix], parameter[name[self].name]]
variable[db_name] assign[=] name[self].database_id
if compare[constant[-] in name[db_name]] begin[:]
variable[db_name] assign[=] binary_operation[constant[`%s`] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c76e680>]]]
variable[future] assign[=] call[name[api].create_database, parameter[]]
return[name[future]] | keyword[def] identifier[create] ( identifier[self] ):
literal[string]
identifier[api] = identifier[self] . identifier[_instance] . identifier[_client] . identifier[database_admin_api]
identifier[metadata] = identifier[_metadata_with_prefix] ( identifier[self] . identifier[name] )
identifier[db_name] = identifier[self] . identifier[database_id]
keyword[if] literal[string] keyword[in] identifier[db_name] :
identifier[db_name] = literal[string] %( identifier[db_name] ,)
identifier[future] = identifier[api] . identifier[create_database] (
identifier[parent] = identifier[self] . identifier[_instance] . identifier[name] ,
identifier[create_statement] = literal[string] %( identifier[db_name] ,),
identifier[extra_statements] = identifier[list] ( identifier[self] . identifier[_ddl_statements] ),
identifier[metadata] = identifier[metadata] ,
)
keyword[return] identifier[future] | def create(self):
"""Create this database within its instance
Inclues any configured schema assigned to :attr:`ddl_statements`.
See
https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase
:rtype: :class:`~google.api_core.operation.Operation`
:returns: a future used to poll the status of the create request
:raises Conflict: if the database already exists
:raises NotFound: if the instance owning the database does not exist
"""
api = self._instance._client.database_admin_api
metadata = _metadata_with_prefix(self.name)
db_name = self.database_id
if '-' in db_name:
db_name = '`%s`' % (db_name,) # depends on [control=['if'], data=['db_name']]
future = api.create_database(parent=self._instance.name, create_statement='CREATE DATABASE %s' % (db_name,), extra_statements=list(self._ddl_statements), metadata=metadata)
return future |
def _get_total_read_size(self):
"""How much event data to process at once."""
if self.read_size:
read_size = EVENT_SIZE * self.read_size
else:
read_size = EVENT_SIZE
return read_size | def function[_get_total_read_size, parameter[self]]:
constant[How much event data to process at once.]
if name[self].read_size begin[:]
variable[read_size] assign[=] binary_operation[name[EVENT_SIZE] * name[self].read_size]
return[name[read_size]] | keyword[def] identifier[_get_total_read_size] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[read_size] :
identifier[read_size] = identifier[EVENT_SIZE] * identifier[self] . identifier[read_size]
keyword[else] :
identifier[read_size] = identifier[EVENT_SIZE]
keyword[return] identifier[read_size] | def _get_total_read_size(self):
"""How much event data to process at once."""
if self.read_size:
read_size = EVENT_SIZE * self.read_size # depends on [control=['if'], data=[]]
else:
read_size = EVENT_SIZE
return read_size |
def should_checkpoint(self):
"""Whether this trial is due for checkpointing."""
result = self.last_result or {}
if result.get(DONE) and self.checkpoint_at_end:
return True
if self.checkpoint_freq:
return result.get(TRAINING_ITERATION,
0) % self.checkpoint_freq == 0
else:
return False | def function[should_checkpoint, parameter[self]]:
constant[Whether this trial is due for checkpointing.]
variable[result] assign[=] <ast.BoolOp object at 0x7da1b2345b70>
if <ast.BoolOp object at 0x7da1b23473d0> begin[:]
return[constant[True]]
if name[self].checkpoint_freq begin[:]
return[compare[binary_operation[call[name[result].get, parameter[name[TRAINING_ITERATION], constant[0]]] <ast.Mod object at 0x7da2590d6920> name[self].checkpoint_freq] equal[==] constant[0]]] | keyword[def] identifier[should_checkpoint] ( identifier[self] ):
literal[string]
identifier[result] = identifier[self] . identifier[last_result] keyword[or] {}
keyword[if] identifier[result] . identifier[get] ( identifier[DONE] ) keyword[and] identifier[self] . identifier[checkpoint_at_end] :
keyword[return] keyword[True]
keyword[if] identifier[self] . identifier[checkpoint_freq] :
keyword[return] identifier[result] . identifier[get] ( identifier[TRAINING_ITERATION] ,
literal[int] )% identifier[self] . identifier[checkpoint_freq] == literal[int]
keyword[else] :
keyword[return] keyword[False] | def should_checkpoint(self):
"""Whether this trial is due for checkpointing."""
result = self.last_result or {}
if result.get(DONE) and self.checkpoint_at_end:
return True # depends on [control=['if'], data=[]]
if self.checkpoint_freq:
return result.get(TRAINING_ITERATION, 0) % self.checkpoint_freq == 0 # depends on [control=['if'], data=[]]
else:
return False |
def get_data(path):
"""
Returns data from a package directory.
'path' should be an absolute path.
"""
# Run the imported setup to get the metadata.
with FakeContext(path):
with SetupMonkey() as sm:
try:
distro = run_setup('setup.py', stop_after='config')
metadata = {'_setuptools': sm.used_setuptools}
for k, v in distro.metadata.__dict__.items():
if k[0] == '_' or not v:
continue
if all(not x for x in v):
continue
metadata[k] = v
if sm.used_setuptools:
for extras in ['cmdclass', 'zip_safe', 'test_suite']:
v = getattr(distro, extras, None)
if v is not None and v not in ([], {}):
metadata[extras] = v
except ImportError as e:
# Either there is no setup py, or it's broken.
logging.exception(e)
metadata = {}
return metadata | def function[get_data, parameter[path]]:
constant[
Returns data from a package directory.
'path' should be an absolute path.
]
with call[name[FakeContext], parameter[name[path]]] begin[:]
with call[name[SetupMonkey], parameter[]] begin[:]
<ast.Try object at 0x7da1b025e230>
return[name[metadata]] | keyword[def] identifier[get_data] ( identifier[path] ):
literal[string]
keyword[with] identifier[FakeContext] ( identifier[path] ):
keyword[with] identifier[SetupMonkey] () keyword[as] identifier[sm] :
keyword[try] :
identifier[distro] = identifier[run_setup] ( literal[string] , identifier[stop_after] = literal[string] )
identifier[metadata] ={ literal[string] : identifier[sm] . identifier[used_setuptools] }
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[distro] . identifier[metadata] . identifier[__dict__] . identifier[items] ():
keyword[if] identifier[k] [ literal[int] ]== literal[string] keyword[or] keyword[not] identifier[v] :
keyword[continue]
keyword[if] identifier[all] ( keyword[not] identifier[x] keyword[for] identifier[x] keyword[in] identifier[v] ):
keyword[continue]
identifier[metadata] [ identifier[k] ]= identifier[v]
keyword[if] identifier[sm] . identifier[used_setuptools] :
keyword[for] identifier[extras] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[v] = identifier[getattr] ( identifier[distro] , identifier[extras] , keyword[None] )
keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] keyword[and] identifier[v] keyword[not] keyword[in] ([],{}):
identifier[metadata] [ identifier[extras] ]= identifier[v]
keyword[except] identifier[ImportError] keyword[as] identifier[e] :
identifier[logging] . identifier[exception] ( identifier[e] )
identifier[metadata] ={}
keyword[return] identifier[metadata] | def get_data(path):
"""
Returns data from a package directory.
'path' should be an absolute path.
"""
# Run the imported setup to get the metadata.
with FakeContext(path):
with SetupMonkey() as sm:
try:
distro = run_setup('setup.py', stop_after='config')
metadata = {'_setuptools': sm.used_setuptools}
for (k, v) in distro.metadata.__dict__.items():
if k[0] == '_' or not v:
continue # depends on [control=['if'], data=[]]
if all((not x for x in v)):
continue # depends on [control=['if'], data=[]]
metadata[k] = v # depends on [control=['for'], data=[]]
if sm.used_setuptools:
for extras in ['cmdclass', 'zip_safe', 'test_suite']:
v = getattr(distro, extras, None)
if v is not None and v not in ([], {}):
metadata[extras] = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['extras']] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ImportError as e:
# Either there is no setup py, or it's broken.
logging.exception(e)
metadata = {} # depends on [control=['except'], data=['e']] # depends on [control=['with'], data=['sm']] # depends on [control=['with'], data=[]]
return metadata |
def primary_suffix(name,
suffix=None,
updates=False):
'''
.. versionadded:: 2014.7.0
Configure the global primary DNS suffix of a DHCP client.
suffix : None
The suffix which is advertised for this client when acquiring a DHCP lease
When none is set, the explicitly configured DNS suffix will be removed.
updates : False
Allow syncing the DNS suffix with the AD domain when the client's AD domain membership changes
.. code-block:: yaml
primary_dns_suffix:
win_dns_client.primary_suffix:
- suffix: sub.domain.tld
- updates: True
'''
ret = {
'name': name,
'changes': {},
'result': True,
'comment': 'No changes needed'
}
suffix = str(suffix)
if not isinstance(updates, bool):
ret['result'] = False
ret['comment'] = '\'updates\' must be a boolean value'
return ret
# TODO: waiting for an implementation of
# https://github.com/saltstack/salt/issues/6792 to be able to handle the
# requirement for a reboot to actually apply this state.
# Until then, this method will only be able to verify that the required
# value has been written to the registry and rebooting needs to be handled
# manually
reg_data = {
'suffix': {
'hive': 'HKEY_LOCAL_MACHINE',
'key': r'SYSTEM\CurrentControlSet\services\Tcpip\Parameters',
'vname': 'NV Domain',
'vtype': 'REG_SZ',
'old': None,
'new': suffix
},
'updates': {
'hive': 'HKEY_LOCAL_MACHINE',
'key': r'SYSTEM\CurrentControlSet\services\Tcpip\Parameters',
'vname': 'SyncDomainWithMembership',
'vtype': 'REG_DWORD',
'old': None,
'new': updates
}
}
reg_data['suffix']['old'] = __utils__['reg.read_value'](
reg_data['suffix']['hive'],
reg_data['suffix']['key'],
reg_data['suffix']['vname'],)['vdata']
reg_data['updates']['old'] = bool(__utils__['reg.read_value'](
reg_data['updates']['hive'],
reg_data['updates']['key'],
reg_data['updates']['vname'],)['vdata'])
updates_operation = 'enabled' if reg_data['updates']['new'] else 'disabled'
# No changes to suffix needed
if reg_data['suffix']['new'] == reg_data['suffix']['old']:
# No changes to updates policy needed
if reg_data['updates']['new'] == reg_data['updates']['old']:
return ret
# Changes to update policy needed
else:
ret['comment'] = '{0} suffix updates'.format(updates_operation)
ret['changes'] = {
'old': {
'updates': reg_data['updates']['old']},
'new': {
'updates': reg_data['updates']['new']}}
# Changes to suffix needed
else:
# Changes to updates policy needed
if reg_data['updates']['new'] != reg_data['updates']['old']:
ret['comment'] = 'Updated primary DNS suffix ({0}) and {1} suffix updates'.format(suffix, updates_operation)
ret['changes'] = {
'old': {
'suffix': reg_data['suffix']['old'],
'updates': reg_data['updates']['old']},
'new': {
'suffix': reg_data['suffix']['new'],
'updates': reg_data['updates']['new']}}
# No changes to updates policy needed
else:
ret['comment'] = 'Updated primary DNS suffix ({0})'.format(suffix)
ret['changes'] = {
'old': {
'suffix': reg_data['suffix']['old']},
'new': {
'suffix': reg_data['suffix']['new']}}
suffix_result = __utils__['reg.set_value'](
reg_data['suffix']['hive'],
reg_data['suffix']['key'],
reg_data['suffix']['vname'],
reg_data['suffix']['new'],
reg_data['suffix']['vtype'])
updates_result = __utils__['reg.set_value'](
reg_data['updates']['hive'],
reg_data['updates']['key'],
reg_data['updates']['vname'],
reg_data['updates']['new'],
reg_data['updates']['vtype'])
ret['result'] = suffix_result & updates_result
return ret | def function[primary_suffix, parameter[name, suffix, updates]]:
constant[
.. versionadded:: 2014.7.0
Configure the global primary DNS suffix of a DHCP client.
suffix : None
The suffix which is advertised for this client when acquiring a DHCP lease
When none is set, the explicitly configured DNS suffix will be removed.
updates : False
Allow syncing the DNS suffix with the AD domain when the client's AD domain membership changes
.. code-block:: yaml
primary_dns_suffix:
win_dns_client.primary_suffix:
- suffix: sub.domain.tld
- updates: True
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1ebea10>, <ast.Constant object at 0x7da1b1ebeb60>, <ast.Constant object at 0x7da1b1ebfb50>, <ast.Constant object at 0x7da1b1ebfcd0>], [<ast.Name object at 0x7da1b1ebfdf0>, <ast.Dict object at 0x7da1b1ebeb90>, <ast.Constant object at 0x7da1b1ebfa60>, <ast.Constant object at 0x7da1b1ebfe20>]]
variable[suffix] assign[=] call[name[str], parameter[name[suffix]]]
if <ast.UnaryOp object at 0x7da1b1ebf3d0> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] constant['updates' must be a boolean value]
return[name[ret]]
variable[reg_data] assign[=] dictionary[[<ast.Constant object at 0x7da20c76e080>, <ast.Constant object at 0x7da20c76c5b0>], [<ast.Dict object at 0x7da20c76d9c0>, <ast.Dict object at 0x7da20c76d2d0>]]
call[call[name[reg_data]][constant[suffix]]][constant[old]] assign[=] call[call[call[name[__utils__]][constant[reg.read_value]], parameter[call[call[name[reg_data]][constant[suffix]]][constant[hive]], call[call[name[reg_data]][constant[suffix]]][constant[key]], call[call[name[reg_data]][constant[suffix]]][constant[vname]]]]][constant[vdata]]
call[call[name[reg_data]][constant[updates]]][constant[old]] assign[=] call[name[bool], parameter[call[call[call[name[__utils__]][constant[reg.read_value]], parameter[call[call[name[reg_data]][constant[updates]]][constant[hive]], call[call[name[reg_data]][constant[updates]]][constant[key]], call[call[name[reg_data]][constant[updates]]][constant[vname]]]]][constant[vdata]]]]
variable[updates_operation] assign[=] <ast.IfExp object at 0x7da20c76efe0>
if compare[call[call[name[reg_data]][constant[suffix]]][constant[new]] equal[==] call[call[name[reg_data]][constant[suffix]]][constant[old]]] begin[:]
if compare[call[call[name[reg_data]][constant[updates]]][constant[new]] equal[==] call[call[name[reg_data]][constant[updates]]][constant[old]]] begin[:]
return[name[ret]]
variable[suffix_result] assign[=] call[call[name[__utils__]][constant[reg.set_value]], parameter[call[call[name[reg_data]][constant[suffix]]][constant[hive]], call[call[name[reg_data]][constant[suffix]]][constant[key]], call[call[name[reg_data]][constant[suffix]]][constant[vname]], call[call[name[reg_data]][constant[suffix]]][constant[new]], call[call[name[reg_data]][constant[suffix]]][constant[vtype]]]]
variable[updates_result] assign[=] call[call[name[__utils__]][constant[reg.set_value]], parameter[call[call[name[reg_data]][constant[updates]]][constant[hive]], call[call[name[reg_data]][constant[updates]]][constant[key]], call[call[name[reg_data]][constant[updates]]][constant[vname]], call[call[name[reg_data]][constant[updates]]][constant[new]], call[call[name[reg_data]][constant[updates]]][constant[vtype]]]]
call[name[ret]][constant[result]] assign[=] binary_operation[name[suffix_result] <ast.BitAnd object at 0x7da2590d6b60> name[updates_result]]
return[name[ret]] | keyword[def] identifier[primary_suffix] ( identifier[name] ,
identifier[suffix] = keyword[None] ,
identifier[updates] = keyword[False] ):
literal[string]
identifier[ret] ={
literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : keyword[True] ,
literal[string] : literal[string]
}
identifier[suffix] = identifier[str] ( identifier[suffix] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[updates] , identifier[bool] ):
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
identifier[reg_data] ={
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[None] ,
literal[string] : identifier[suffix]
},
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[None] ,
literal[string] : identifier[updates]
}
}
identifier[reg_data] [ literal[string] ][ literal[string] ]= identifier[__utils__] [ literal[string] ](
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],)[ literal[string] ]
identifier[reg_data] [ literal[string] ][ literal[string] ]= identifier[bool] ( identifier[__utils__] [ literal[string] ](
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],)[ literal[string] ])
identifier[updates_operation] = literal[string] keyword[if] identifier[reg_data] [ literal[string] ][ literal[string] ] keyword[else] literal[string]
keyword[if] identifier[reg_data] [ literal[string] ][ literal[string] ]== identifier[reg_data] [ literal[string] ][ literal[string] ]:
keyword[if] identifier[reg_data] [ literal[string] ][ literal[string] ]== identifier[reg_data] [ literal[string] ][ literal[string] ]:
keyword[return] identifier[ret]
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[updates_operation] )
identifier[ret] [ literal[string] ]={
literal[string] :{
literal[string] : identifier[reg_data] [ literal[string] ][ literal[string] ]},
literal[string] :{
literal[string] : identifier[reg_data] [ literal[string] ][ literal[string] ]}}
keyword[else] :
keyword[if] identifier[reg_data] [ literal[string] ][ literal[string] ]!= identifier[reg_data] [ literal[string] ][ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[suffix] , identifier[updates_operation] )
identifier[ret] [ literal[string] ]={
literal[string] :{
literal[string] : identifier[reg_data] [ literal[string] ][ literal[string] ],
literal[string] : identifier[reg_data] [ literal[string] ][ literal[string] ]},
literal[string] :{
literal[string] : identifier[reg_data] [ literal[string] ][ literal[string] ],
literal[string] : identifier[reg_data] [ literal[string] ][ literal[string] ]}}
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[suffix] )
identifier[ret] [ literal[string] ]={
literal[string] :{
literal[string] : identifier[reg_data] [ literal[string] ][ literal[string] ]},
literal[string] :{
literal[string] : identifier[reg_data] [ literal[string] ][ literal[string] ]}}
identifier[suffix_result] = identifier[__utils__] [ literal[string] ](
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ])
identifier[updates_result] = identifier[__utils__] [ literal[string] ](
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ],
identifier[reg_data] [ literal[string] ][ literal[string] ])
identifier[ret] [ literal[string] ]= identifier[suffix_result] & identifier[updates_result]
keyword[return] identifier[ret] | def primary_suffix(name, suffix=None, updates=False):
"""
.. versionadded:: 2014.7.0
Configure the global primary DNS suffix of a DHCP client.
suffix : None
The suffix which is advertised for this client when acquiring a DHCP lease
When none is set, the explicitly configured DNS suffix will be removed.
updates : False
Allow syncing the DNS suffix with the AD domain when the client's AD domain membership changes
.. code-block:: yaml
primary_dns_suffix:
win_dns_client.primary_suffix:
- suffix: sub.domain.tld
- updates: True
"""
ret = {'name': name, 'changes': {}, 'result': True, 'comment': 'No changes needed'}
suffix = str(suffix)
if not isinstance(updates, bool):
ret['result'] = False
ret['comment'] = "'updates' must be a boolean value"
return ret # depends on [control=['if'], data=[]]
# TODO: waiting for an implementation of
# https://github.com/saltstack/salt/issues/6792 to be able to handle the
# requirement for a reboot to actually apply this state.
# Until then, this method will only be able to verify that the required
# value has been written to the registry and rebooting needs to be handled
# manually
reg_data = {'suffix': {'hive': 'HKEY_LOCAL_MACHINE', 'key': 'SYSTEM\\CurrentControlSet\\services\\Tcpip\\Parameters', 'vname': 'NV Domain', 'vtype': 'REG_SZ', 'old': None, 'new': suffix}, 'updates': {'hive': 'HKEY_LOCAL_MACHINE', 'key': 'SYSTEM\\CurrentControlSet\\services\\Tcpip\\Parameters', 'vname': 'SyncDomainWithMembership', 'vtype': 'REG_DWORD', 'old': None, 'new': updates}}
reg_data['suffix']['old'] = __utils__['reg.read_value'](reg_data['suffix']['hive'], reg_data['suffix']['key'], reg_data['suffix']['vname'])['vdata']
reg_data['updates']['old'] = bool(__utils__['reg.read_value'](reg_data['updates']['hive'], reg_data['updates']['key'], reg_data['updates']['vname'])['vdata'])
updates_operation = 'enabled' if reg_data['updates']['new'] else 'disabled'
# No changes to suffix needed
if reg_data['suffix']['new'] == reg_data['suffix']['old']:
# No changes to updates policy needed
if reg_data['updates']['new'] == reg_data['updates']['old']:
return ret # depends on [control=['if'], data=[]]
else:
# Changes to update policy needed
ret['comment'] = '{0} suffix updates'.format(updates_operation)
ret['changes'] = {'old': {'updates': reg_data['updates']['old']}, 'new': {'updates': reg_data['updates']['new']}} # depends on [control=['if'], data=[]]
# Changes to suffix needed
# Changes to updates policy needed
elif reg_data['updates']['new'] != reg_data['updates']['old']:
ret['comment'] = 'Updated primary DNS suffix ({0}) and {1} suffix updates'.format(suffix, updates_operation)
ret['changes'] = {'old': {'suffix': reg_data['suffix']['old'], 'updates': reg_data['updates']['old']}, 'new': {'suffix': reg_data['suffix']['new'], 'updates': reg_data['updates']['new']}} # depends on [control=['if'], data=[]]
else:
# No changes to updates policy needed
ret['comment'] = 'Updated primary DNS suffix ({0})'.format(suffix)
ret['changes'] = {'old': {'suffix': reg_data['suffix']['old']}, 'new': {'suffix': reg_data['suffix']['new']}}
suffix_result = __utils__['reg.set_value'](reg_data['suffix']['hive'], reg_data['suffix']['key'], reg_data['suffix']['vname'], reg_data['suffix']['new'], reg_data['suffix']['vtype'])
updates_result = __utils__['reg.set_value'](reg_data['updates']['hive'], reg_data['updates']['key'], reg_data['updates']['vname'], reg_data['updates']['new'], reg_data['updates']['vtype'])
ret['result'] = suffix_result & updates_result
return ret |
def users(self, institute=None):
"""Return all users from the database
Args:
institute(str): A institute_id
Returns:
res(pymongo.Cursor): A cursor with users
"""
query = {}
if institute:
LOG.info("Fetching all users from institute %s", institute)
query = {'institutes': {'$in': [institute]}}
else:
LOG.info("Fetching all users")
res = self.user_collection.find(query)
return res | def function[users, parameter[self, institute]]:
constant[Return all users from the database
Args:
institute(str): A institute_id
Returns:
res(pymongo.Cursor): A cursor with users
]
variable[query] assign[=] dictionary[[], []]
if name[institute] begin[:]
call[name[LOG].info, parameter[constant[Fetching all users from institute %s], name[institute]]]
variable[query] assign[=] dictionary[[<ast.Constant object at 0x7da20e954790>], [<ast.Dict object at 0x7da20e956020>]]
variable[res] assign[=] call[name[self].user_collection.find, parameter[name[query]]]
return[name[res]] | keyword[def] identifier[users] ( identifier[self] , identifier[institute] = keyword[None] ):
literal[string]
identifier[query] ={}
keyword[if] identifier[institute] :
identifier[LOG] . identifier[info] ( literal[string] , identifier[institute] )
identifier[query] ={ literal[string] :{ literal[string] :[ identifier[institute] ]}}
keyword[else] :
identifier[LOG] . identifier[info] ( literal[string] )
identifier[res] = identifier[self] . identifier[user_collection] . identifier[find] ( identifier[query] )
keyword[return] identifier[res] | def users(self, institute=None):
"""Return all users from the database
Args:
institute(str): A institute_id
Returns:
res(pymongo.Cursor): A cursor with users
"""
query = {}
if institute:
LOG.info('Fetching all users from institute %s', institute)
query = {'institutes': {'$in': [institute]}} # depends on [control=['if'], data=[]]
else:
LOG.info('Fetching all users')
res = self.user_collection.find(query)
return res |
def set_maxrad(self,newrad):
"""
Sets max allowed radius in populations.
Doesn't operate via the :class:`stars.Constraint`
protocol; rather just rescales the sky positions
for the background objects and recalculates
sky area, etc.
"""
if not isinstance(newrad, Quantity):
newrad = newrad * u.arcsec
#if 'Rsky' not in self.constraints:
# self.constraints.append('Rsky')
for pop in self.poplist:
if not pop.is_specific:
try:
pop.maxrad = newrad
except AttributeError:
pass | def function[set_maxrad, parameter[self, newrad]]:
constant[
Sets max allowed radius in populations.
Doesn't operate via the :class:`stars.Constraint`
protocol; rather just rescales the sky positions
for the background objects and recalculates
sky area, etc.
]
if <ast.UnaryOp object at 0x7da1b26b77f0> begin[:]
variable[newrad] assign[=] binary_operation[name[newrad] * name[u].arcsec]
for taget[name[pop]] in starred[name[self].poplist] begin[:]
if <ast.UnaryOp object at 0x7da1b26b4c10> begin[:]
<ast.Try object at 0x7da1b26b6e00> | keyword[def] identifier[set_maxrad] ( identifier[self] , identifier[newrad] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[newrad] , identifier[Quantity] ):
identifier[newrad] = identifier[newrad] * identifier[u] . identifier[arcsec]
keyword[for] identifier[pop] keyword[in] identifier[self] . identifier[poplist] :
keyword[if] keyword[not] identifier[pop] . identifier[is_specific] :
keyword[try] :
identifier[pop] . identifier[maxrad] = identifier[newrad]
keyword[except] identifier[AttributeError] :
keyword[pass] | def set_maxrad(self, newrad):
"""
Sets max allowed radius in populations.
Doesn't operate via the :class:`stars.Constraint`
protocol; rather just rescales the sky positions
for the background objects and recalculates
sky area, etc.
"""
if not isinstance(newrad, Quantity):
newrad = newrad * u.arcsec # depends on [control=['if'], data=[]]
#if 'Rsky' not in self.constraints:
# self.constraints.append('Rsky')
for pop in self.poplist:
if not pop.is_specific:
try:
pop.maxrad = newrad # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pop']] |
def connect_default_driver_wrapper(cls, config_files=None):
"""Get default driver wrapper, configure it and connect driver
:param config_files: driver wrapper specific config files
:returns: default driver wrapper
:rtype: toolium.driver_wrapper.DriverWrapper
"""
driver_wrapper = cls.get_default_wrapper()
if not driver_wrapper.driver:
config_files = DriverWrappersPool.initialize_config_files(config_files)
driver_wrapper.configure(config_files)
driver_wrapper.connect()
return driver_wrapper | def function[connect_default_driver_wrapper, parameter[cls, config_files]]:
constant[Get default driver wrapper, configure it and connect driver
:param config_files: driver wrapper specific config files
:returns: default driver wrapper
:rtype: toolium.driver_wrapper.DriverWrapper
]
variable[driver_wrapper] assign[=] call[name[cls].get_default_wrapper, parameter[]]
if <ast.UnaryOp object at 0x7da18dc050f0> begin[:]
variable[config_files] assign[=] call[name[DriverWrappersPool].initialize_config_files, parameter[name[config_files]]]
call[name[driver_wrapper].configure, parameter[name[config_files]]]
call[name[driver_wrapper].connect, parameter[]]
return[name[driver_wrapper]] | keyword[def] identifier[connect_default_driver_wrapper] ( identifier[cls] , identifier[config_files] = keyword[None] ):
literal[string]
identifier[driver_wrapper] = identifier[cls] . identifier[get_default_wrapper] ()
keyword[if] keyword[not] identifier[driver_wrapper] . identifier[driver] :
identifier[config_files] = identifier[DriverWrappersPool] . identifier[initialize_config_files] ( identifier[config_files] )
identifier[driver_wrapper] . identifier[configure] ( identifier[config_files] )
identifier[driver_wrapper] . identifier[connect] ()
keyword[return] identifier[driver_wrapper] | def connect_default_driver_wrapper(cls, config_files=None):
"""Get default driver wrapper, configure it and connect driver
:param config_files: driver wrapper specific config files
:returns: default driver wrapper
:rtype: toolium.driver_wrapper.DriverWrapper
"""
driver_wrapper = cls.get_default_wrapper()
if not driver_wrapper.driver:
config_files = DriverWrappersPool.initialize_config_files(config_files)
driver_wrapper.configure(config_files)
driver_wrapper.connect() # depends on [control=['if'], data=[]]
return driver_wrapper |
def _format_native_types(self, na_rep='NaT', date_format=None, **kwargs):
"""
actually format my specific types
"""
values = self.astype(object)
if date_format:
formatter = lambda dt: dt.strftime(date_format)
else:
formatter = lambda dt: '%s' % dt
if self._hasnans:
mask = self._isnan
values[mask] = na_rep
imask = ~mask
values[imask] = np.array([formatter(dt) for dt
in values[imask]])
else:
values = np.array([formatter(dt) for dt in values])
return values | def function[_format_native_types, parameter[self, na_rep, date_format]]:
constant[
actually format my specific types
]
variable[values] assign[=] call[name[self].astype, parameter[name[object]]]
if name[date_format] begin[:]
variable[formatter] assign[=] <ast.Lambda object at 0x7da20c6c7eb0>
if name[self]._hasnans begin[:]
variable[mask] assign[=] name[self]._isnan
call[name[values]][name[mask]] assign[=] name[na_rep]
variable[imask] assign[=] <ast.UnaryOp object at 0x7da20c6c6ef0>
call[name[values]][name[imask]] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20c6c6260>]]
return[name[values]] | keyword[def] identifier[_format_native_types] ( identifier[self] , identifier[na_rep] = literal[string] , identifier[date_format] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[values] = identifier[self] . identifier[astype] ( identifier[object] )
keyword[if] identifier[date_format] :
identifier[formatter] = keyword[lambda] identifier[dt] : identifier[dt] . identifier[strftime] ( identifier[date_format] )
keyword[else] :
identifier[formatter] = keyword[lambda] identifier[dt] : literal[string] % identifier[dt]
keyword[if] identifier[self] . identifier[_hasnans] :
identifier[mask] = identifier[self] . identifier[_isnan]
identifier[values] [ identifier[mask] ]= identifier[na_rep]
identifier[imask] =~ identifier[mask]
identifier[values] [ identifier[imask] ]= identifier[np] . identifier[array] ([ identifier[formatter] ( identifier[dt] ) keyword[for] identifier[dt]
keyword[in] identifier[values] [ identifier[imask] ]])
keyword[else] :
identifier[values] = identifier[np] . identifier[array] ([ identifier[formatter] ( identifier[dt] ) keyword[for] identifier[dt] keyword[in] identifier[values] ])
keyword[return] identifier[values] | def _format_native_types(self, na_rep='NaT', date_format=None, **kwargs):
"""
actually format my specific types
"""
values = self.astype(object)
if date_format:
formatter = lambda dt: dt.strftime(date_format) # depends on [control=['if'], data=[]]
else:
formatter = lambda dt: '%s' % dt
if self._hasnans:
mask = self._isnan
values[mask] = na_rep
imask = ~mask
values[imask] = np.array([formatter(dt) for dt in values[imask]]) # depends on [control=['if'], data=[]]
else:
values = np.array([formatter(dt) for dt in values])
return values |
def LegacyKextload(self, cf_bundle_url, dependency_kext):
"""Load a kext by forking into kextload."""
_ = dependency_kext
error_code = OS_SUCCESS
cf_path = self.dll.CFURLCopyFileSystemPath(cf_bundle_url, POSIX_PATH_STYLE)
path = self.CFStringToPystring(cf_path)
self.dll.CFRelease(cf_path)
try:
subprocess.check_call(['/sbin/kextload', path])
except subprocess.CalledProcessError as cpe:
logging.debug('failed to load %s:%s', path, str(cpe))
error_code = -1
return error_code | def function[LegacyKextload, parameter[self, cf_bundle_url, dependency_kext]]:
constant[Load a kext by forking into kextload.]
variable[_] assign[=] name[dependency_kext]
variable[error_code] assign[=] name[OS_SUCCESS]
variable[cf_path] assign[=] call[name[self].dll.CFURLCopyFileSystemPath, parameter[name[cf_bundle_url], name[POSIX_PATH_STYLE]]]
variable[path] assign[=] call[name[self].CFStringToPystring, parameter[name[cf_path]]]
call[name[self].dll.CFRelease, parameter[name[cf_path]]]
<ast.Try object at 0x7da1b1c0d120>
return[name[error_code]] | keyword[def] identifier[LegacyKextload] ( identifier[self] , identifier[cf_bundle_url] , identifier[dependency_kext] ):
literal[string]
identifier[_] = identifier[dependency_kext]
identifier[error_code] = identifier[OS_SUCCESS]
identifier[cf_path] = identifier[self] . identifier[dll] . identifier[CFURLCopyFileSystemPath] ( identifier[cf_bundle_url] , identifier[POSIX_PATH_STYLE] )
identifier[path] = identifier[self] . identifier[CFStringToPystring] ( identifier[cf_path] )
identifier[self] . identifier[dll] . identifier[CFRelease] ( identifier[cf_path] )
keyword[try] :
identifier[subprocess] . identifier[check_call] ([ literal[string] , identifier[path] ])
keyword[except] identifier[subprocess] . identifier[CalledProcessError] keyword[as] identifier[cpe] :
identifier[logging] . identifier[debug] ( literal[string] , identifier[path] , identifier[str] ( identifier[cpe] ))
identifier[error_code] =- literal[int]
keyword[return] identifier[error_code] | def LegacyKextload(self, cf_bundle_url, dependency_kext):
"""Load a kext by forking into kextload."""
_ = dependency_kext
error_code = OS_SUCCESS
cf_path = self.dll.CFURLCopyFileSystemPath(cf_bundle_url, POSIX_PATH_STYLE)
path = self.CFStringToPystring(cf_path)
self.dll.CFRelease(cf_path)
try:
subprocess.check_call(['/sbin/kextload', path]) # depends on [control=['try'], data=[]]
except subprocess.CalledProcessError as cpe:
logging.debug('failed to load %s:%s', path, str(cpe))
error_code = -1 # depends on [control=['except'], data=['cpe']]
return error_code |
def term_echo(command, nindent=0, env=None, fpointer=None, cols=60):
"""
Print STDOUT resulting from a Bash shell command formatted in reStructuredText.
:param command: Bash shell command
:type command: string
:param nindent: Indentation level
:type nindent: integer
:param env: Environment variable replacement dictionary. The Bash
command is pre-processed and any environment variable
represented in the full notation (:bash:`${...}`) is replaced.
The dictionary key is the environment variable name and the
dictionary value is the replacement value. For example, if
**command** is :code:`'${PYTHON_CMD} -m "x=5"'` and **env**
is :code:`{'PYTHON_CMD':'python3'}` the actual command issued
is :code:`'python3 -m "x=5"'`
:type env: dictionary
:param fpointer: Output function pointer. Normally is :code:`cog.out` but
:code:`print` or other functions can be used for
debugging
:type fpointer: function object
:param cols: Number of columns of output
:type cols: integer
"""
# pylint: disable=R0204
# Set argparse width so that output does not need horizontal scroll
# bar in narrow windows or displays
os.environ["COLUMNS"] = str(cols)
command_int = command
if env:
for var, repl in env.items():
command_int = command_int.replace("${" + var + "}", repl)
tokens = command_int.split(" ")
# Add Python interpreter executable for Python scripts on Windows since
# the shebang does not work
if (platform.system().lower() == "windows") and (tokens[0].endswith(".py")):
tokens = [sys.executable] + tokens
proc = subprocess.Popen(tokens, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout = proc.communicate()[0]
if sys.hexversion >= 0x03000000:
stdout = stdout.decode("utf-8")
stdout = stdout.split("\n")
indent = nindent * " "
fpointer("\n", dedent=False)
fpointer("{0}.. code-block:: bash\n".format(indent), dedent=False)
fpointer("\n", dedent=False)
fpointer("{0} $ {1}\n".format(indent, command), dedent=False)
for line in stdout:
if line.strip():
fpointer(indent + " " + line.replace("\t", " ") + "\n", dedent=False)
else:
fpointer("\n", dedent=False)
fpointer("\n", dedent=False) | def function[term_echo, parameter[command, nindent, env, fpointer, cols]]:
constant[
Print STDOUT resulting from a Bash shell command formatted in reStructuredText.
:param command: Bash shell command
:type command: string
:param nindent: Indentation level
:type nindent: integer
:param env: Environment variable replacement dictionary. The Bash
command is pre-processed and any environment variable
represented in the full notation (:bash:`${...}`) is replaced.
The dictionary key is the environment variable name and the
dictionary value is the replacement value. For example, if
**command** is :code:`'${PYTHON_CMD} -m "x=5"'` and **env**
is :code:`{'PYTHON_CMD':'python3'}` the actual command issued
is :code:`'python3 -m "x=5"'`
:type env: dictionary
:param fpointer: Output function pointer. Normally is :code:`cog.out` but
:code:`print` or other functions can be used for
debugging
:type fpointer: function object
:param cols: Number of columns of output
:type cols: integer
]
call[name[os].environ][constant[COLUMNS]] assign[=] call[name[str], parameter[name[cols]]]
variable[command_int] assign[=] name[command]
if name[env] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0349e10>, <ast.Name object at 0x7da1b0348430>]]] in starred[call[name[env].items, parameter[]]] begin[:]
variable[command_int] assign[=] call[name[command_int].replace, parameter[binary_operation[binary_operation[constant[${] + name[var]] + constant[}]], name[repl]]]
variable[tokens] assign[=] call[name[command_int].split, parameter[constant[ ]]]
if <ast.BoolOp object at 0x7da1b034ac80> begin[:]
variable[tokens] assign[=] binary_operation[list[[<ast.Attribute object at 0x7da1b03489a0>]] + name[tokens]]
variable[proc] assign[=] call[name[subprocess].Popen, parameter[name[tokens]]]
variable[stdout] assign[=] call[call[name[proc].communicate, parameter[]]][constant[0]]
if compare[name[sys].hexversion greater_or_equal[>=] constant[50331648]] begin[:]
variable[stdout] assign[=] call[name[stdout].decode, parameter[constant[utf-8]]]
variable[stdout] assign[=] call[name[stdout].split, parameter[constant[
]]]
variable[indent] assign[=] binary_operation[name[nindent] * constant[ ]]
call[name[fpointer], parameter[constant[
]]]
call[name[fpointer], parameter[call[constant[{0}.. code-block:: bash
].format, parameter[name[indent]]]]]
call[name[fpointer], parameter[constant[
]]]
call[name[fpointer], parameter[call[constant[{0} $ {1}
].format, parameter[name[indent], name[command]]]]]
for taget[name[line]] in starred[name[stdout]] begin[:]
if call[name[line].strip, parameter[]] begin[:]
call[name[fpointer], parameter[binary_operation[binary_operation[binary_operation[name[indent] + constant[ ]] + call[name[line].replace, parameter[constant[ ], constant[ ]]]] + constant[
]]]]
call[name[fpointer], parameter[constant[
]]] | keyword[def] identifier[term_echo] ( identifier[command] , identifier[nindent] = literal[int] , identifier[env] = keyword[None] , identifier[fpointer] = keyword[None] , identifier[cols] = literal[int] ):
literal[string]
identifier[os] . identifier[environ] [ literal[string] ]= identifier[str] ( identifier[cols] )
identifier[command_int] = identifier[command]
keyword[if] identifier[env] :
keyword[for] identifier[var] , identifier[repl] keyword[in] identifier[env] . identifier[items] ():
identifier[command_int] = identifier[command_int] . identifier[replace] ( literal[string] + identifier[var] + literal[string] , identifier[repl] )
identifier[tokens] = identifier[command_int] . identifier[split] ( literal[string] )
keyword[if] ( identifier[platform] . identifier[system] (). identifier[lower] ()== literal[string] ) keyword[and] ( identifier[tokens] [ literal[int] ]. identifier[endswith] ( literal[string] )):
identifier[tokens] =[ identifier[sys] . identifier[executable] ]+ identifier[tokens]
identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[tokens] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[STDOUT] )
identifier[stdout] = identifier[proc] . identifier[communicate] ()[ literal[int] ]
keyword[if] identifier[sys] . identifier[hexversion] >= literal[int] :
identifier[stdout] = identifier[stdout] . identifier[decode] ( literal[string] )
identifier[stdout] = identifier[stdout] . identifier[split] ( literal[string] )
identifier[indent] = identifier[nindent] * literal[string]
identifier[fpointer] ( literal[string] , identifier[dedent] = keyword[False] )
identifier[fpointer] ( literal[string] . identifier[format] ( identifier[indent] ), identifier[dedent] = keyword[False] )
identifier[fpointer] ( literal[string] , identifier[dedent] = keyword[False] )
identifier[fpointer] ( literal[string] . identifier[format] ( identifier[indent] , identifier[command] ), identifier[dedent] = keyword[False] )
keyword[for] identifier[line] keyword[in] identifier[stdout] :
keyword[if] identifier[line] . identifier[strip] ():
identifier[fpointer] ( identifier[indent] + literal[string] + identifier[line] . identifier[replace] ( literal[string] , literal[string] )+ literal[string] , identifier[dedent] = keyword[False] )
keyword[else] :
identifier[fpointer] ( literal[string] , identifier[dedent] = keyword[False] )
identifier[fpointer] ( literal[string] , identifier[dedent] = keyword[False] ) | def term_echo(command, nindent=0, env=None, fpointer=None, cols=60):
"""
Print STDOUT resulting from a Bash shell command formatted in reStructuredText.
:param command: Bash shell command
:type command: string
:param nindent: Indentation level
:type nindent: integer
:param env: Environment variable replacement dictionary. The Bash
command is pre-processed and any environment variable
represented in the full notation (:bash:`${...}`) is replaced.
The dictionary key is the environment variable name and the
dictionary value is the replacement value. For example, if
**command** is :code:`'${PYTHON_CMD} -m "x=5"'` and **env**
is :code:`{'PYTHON_CMD':'python3'}` the actual command issued
is :code:`'python3 -m "x=5"'`
:type env: dictionary
:param fpointer: Output function pointer. Normally is :code:`cog.out` but
:code:`print` or other functions can be used for
debugging
:type fpointer: function object
:param cols: Number of columns of output
:type cols: integer
"""
# pylint: disable=R0204
# Set argparse width so that output does not need horizontal scroll
# bar in narrow windows or displays
os.environ['COLUMNS'] = str(cols)
command_int = command
if env:
for (var, repl) in env.items():
command_int = command_int.replace('${' + var + '}', repl) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
tokens = command_int.split(' ')
# Add Python interpreter executable for Python scripts on Windows since
# the shebang does not work
if platform.system().lower() == 'windows' and tokens[0].endswith('.py'):
tokens = [sys.executable] + tokens # depends on [control=['if'], data=[]]
proc = subprocess.Popen(tokens, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout = proc.communicate()[0]
if sys.hexversion >= 50331648:
stdout = stdout.decode('utf-8') # depends on [control=['if'], data=[]]
stdout = stdout.split('\n')
indent = nindent * ' '
fpointer('\n', dedent=False)
fpointer('{0}.. code-block:: bash\n'.format(indent), dedent=False)
fpointer('\n', dedent=False)
fpointer('{0} $ {1}\n'.format(indent, command), dedent=False)
for line in stdout:
if line.strip():
fpointer(indent + ' ' + line.replace('\t', ' ') + '\n', dedent=False) # depends on [control=['if'], data=[]]
else:
fpointer('\n', dedent=False) # depends on [control=['for'], data=['line']]
fpointer('\n', dedent=False) |
def loadRecord(self, domain, type, zone=None, callback=None,
errback=None, **kwargs):
"""
Load an existing record into a high level Record object.
:param str domain: domain name of the record in the zone, for example \
'myrecord'. You may leave off the zone, since it must be \
specified in the zone parameter
:param str type: record type, such as 'A', 'MX', 'AAAA', etc.
:param str zone: zone name, like 'example.com'
:rtype: :py:class:`ns1.records`
"""
import ns1.zones
if zone is None:
# extract from record string
parts = domain.split('.')
if len(parts) <= 2:
zone = '.'.join(parts)
else:
zone = '.'.join(parts[1:])
z = ns1.zones.Zone(self.config, zone)
return z.loadRecord(domain, type, callback=callback, errback=errback,
**kwargs) | def function[loadRecord, parameter[self, domain, type, zone, callback, errback]]:
constant[
Load an existing record into a high level Record object.
:param str domain: domain name of the record in the zone, for example 'myrecord'. You may leave off the zone, since it must be specified in the zone parameter
:param str type: record type, such as 'A', 'MX', 'AAAA', etc.
:param str zone: zone name, like 'example.com'
:rtype: :py:class:`ns1.records`
]
import module[ns1.zones]
if compare[name[zone] is constant[None]] begin[:]
variable[parts] assign[=] call[name[domain].split, parameter[constant[.]]]
if compare[call[name[len], parameter[name[parts]]] less_or_equal[<=] constant[2]] begin[:]
variable[zone] assign[=] call[constant[.].join, parameter[name[parts]]]
variable[z] assign[=] call[name[ns1].zones.Zone, parameter[name[self].config, name[zone]]]
return[call[name[z].loadRecord, parameter[name[domain], name[type]]]] | keyword[def] identifier[loadRecord] ( identifier[self] , identifier[domain] , identifier[type] , identifier[zone] = keyword[None] , identifier[callback] = keyword[None] ,
identifier[errback] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[ns1] . identifier[zones]
keyword[if] identifier[zone] keyword[is] keyword[None] :
identifier[parts] = identifier[domain] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[parts] )<= literal[int] :
identifier[zone] = literal[string] . identifier[join] ( identifier[parts] )
keyword[else] :
identifier[zone] = literal[string] . identifier[join] ( identifier[parts] [ literal[int] :])
identifier[z] = identifier[ns1] . identifier[zones] . identifier[Zone] ( identifier[self] . identifier[config] , identifier[zone] )
keyword[return] identifier[z] . identifier[loadRecord] ( identifier[domain] , identifier[type] , identifier[callback] = identifier[callback] , identifier[errback] = identifier[errback] ,
** identifier[kwargs] ) | def loadRecord(self, domain, type, zone=None, callback=None, errback=None, **kwargs):
"""
Load an existing record into a high level Record object.
:param str domain: domain name of the record in the zone, for example 'myrecord'. You may leave off the zone, since it must be specified in the zone parameter
:param str type: record type, such as 'A', 'MX', 'AAAA', etc.
:param str zone: zone name, like 'example.com'
:rtype: :py:class:`ns1.records`
"""
import ns1.zones
if zone is None:
# extract from record string
parts = domain.split('.')
if len(parts) <= 2:
zone = '.'.join(parts) # depends on [control=['if'], data=[]]
else:
zone = '.'.join(parts[1:]) # depends on [control=['if'], data=['zone']]
z = ns1.zones.Zone(self.config, zone)
return z.loadRecord(domain, type, callback=callback, errback=errback, **kwargs) |
def group_batches(xs):
"""Group samples into batches for simultaneous variant calling.
Identify all samples to call together: those in the same batch
and variant caller.
Pull together all BAM files from this batch and process together,
Provide details to pull these finalized files back into individual
expected files.
Only batches files if joint calling not specified.
"""
def _caller_batches(data):
caller = tz.get_in(("config", "algorithm", "variantcaller"), data)
jointcaller = tz.get_in(("config", "algorithm", "jointcaller"), data)
batch = tz.get_in(("metadata", "batch"), data) if not jointcaller else None
return caller, batch
def _prep_data(data, items):
data["region_bams"] = [x["region_bams"] for x in items]
return data
return _group_batches_shared(xs, _caller_batches, _prep_data) | def function[group_batches, parameter[xs]]:
constant[Group samples into batches for simultaneous variant calling.
Identify all samples to call together: those in the same batch
and variant caller.
Pull together all BAM files from this batch and process together,
Provide details to pull these finalized files back into individual
expected files.
Only batches files if joint calling not specified.
]
def function[_caller_batches, parameter[data]]:
variable[caller] assign[=] call[name[tz].get_in, parameter[tuple[[<ast.Constant object at 0x7da1b18d1ed0>, <ast.Constant object at 0x7da1b18d15d0>, <ast.Constant object at 0x7da1b18d14b0>]], name[data]]]
variable[jointcaller] assign[=] call[name[tz].get_in, parameter[tuple[[<ast.Constant object at 0x7da1b18d1270>, <ast.Constant object at 0x7da1b18d3160>, <ast.Constant object at 0x7da1b18d20e0>]], name[data]]]
variable[batch] assign[=] <ast.IfExp object at 0x7da1b18d1930>
return[tuple[[<ast.Name object at 0x7da1b1832710>, <ast.Name object at 0x7da1b1831900>]]]
def function[_prep_data, parameter[data, items]]:
call[name[data]][constant[region_bams]] assign[=] <ast.ListComp object at 0x7da1b18829e0>
return[name[data]]
return[call[name[_group_batches_shared], parameter[name[xs], name[_caller_batches], name[_prep_data]]]] | keyword[def] identifier[group_batches] ( identifier[xs] ):
literal[string]
keyword[def] identifier[_caller_batches] ( identifier[data] ):
identifier[caller] = identifier[tz] . identifier[get_in] (( literal[string] , literal[string] , literal[string] ), identifier[data] )
identifier[jointcaller] = identifier[tz] . identifier[get_in] (( literal[string] , literal[string] , literal[string] ), identifier[data] )
identifier[batch] = identifier[tz] . identifier[get_in] (( literal[string] , literal[string] ), identifier[data] ) keyword[if] keyword[not] identifier[jointcaller] keyword[else] keyword[None]
keyword[return] identifier[caller] , identifier[batch]
keyword[def] identifier[_prep_data] ( identifier[data] , identifier[items] ):
identifier[data] [ literal[string] ]=[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[items] ]
keyword[return] identifier[data]
keyword[return] identifier[_group_batches_shared] ( identifier[xs] , identifier[_caller_batches] , identifier[_prep_data] ) | def group_batches(xs):
"""Group samples into batches for simultaneous variant calling.
Identify all samples to call together: those in the same batch
and variant caller.
Pull together all BAM files from this batch and process together,
Provide details to pull these finalized files back into individual
expected files.
Only batches files if joint calling not specified.
"""
def _caller_batches(data):
caller = tz.get_in(('config', 'algorithm', 'variantcaller'), data)
jointcaller = tz.get_in(('config', 'algorithm', 'jointcaller'), data)
batch = tz.get_in(('metadata', 'batch'), data) if not jointcaller else None
return (caller, batch)
def _prep_data(data, items):
data['region_bams'] = [x['region_bams'] for x in items]
return data
return _group_batches_shared(xs, _caller_batches, _prep_data) |
def predict_fixation_duration(
durations, angles, length_diffs, dataset=None, params=None):
"""
Fits a non-linear piecewise regression to fixtaion durations for a fixmat.
Returns corrected fixation durations.
"""
if dataset is None:
dataset = np.ones(durations.shape)
corrected_durations = np.nan * np.ones(durations.shape)
for i, ds in enumerate(np.unique(dataset)):
e = lambda v, x, y, z: (leastsq_dual_model(x, z, *v) - y)
v0 = [120, 220.0, -.1, 0.5, .1, .1]
id_ds = dataset == ds
idnan = (
~np.isnan(angles)) & (
~np.isnan(durations)) & (
~np.isnan(length_diffs))
v, s = leastsq(
e, v0, args=(
angles[
idnan & id_ds], durations[
idnan & id_ds], length_diffs[
idnan & id_ds]), maxfev=10000)
corrected_durations[id_ds] = (durations[id_ds] -
(leastsq_dual_model(angles[id_ds], length_diffs[id_ds], *v)))
if params is not None:
params['v' + str(i)] = v
params['s' + str(i)] = s
return corrected_durations | def function[predict_fixation_duration, parameter[durations, angles, length_diffs, dataset, params]]:
constant[
Fits a non-linear piecewise regression to fixtaion durations for a fixmat.
Returns corrected fixation durations.
]
if compare[name[dataset] is constant[None]] begin[:]
variable[dataset] assign[=] call[name[np].ones, parameter[name[durations].shape]]
variable[corrected_durations] assign[=] binary_operation[name[np].nan * call[name[np].ones, parameter[name[durations].shape]]]
for taget[tuple[[<ast.Name object at 0x7da18f09cc40>, <ast.Name object at 0x7da18f09d810>]]] in starred[call[name[enumerate], parameter[call[name[np].unique, parameter[name[dataset]]]]]] begin[:]
variable[e] assign[=] <ast.Lambda object at 0x7da18f09c5e0>
variable[v0] assign[=] list[[<ast.Constant object at 0x7da18f09fb50>, <ast.Constant object at 0x7da18f09eec0>, <ast.UnaryOp object at 0x7da18f09eb30>, <ast.Constant object at 0x7da18f09caf0>, <ast.Constant object at 0x7da18f09e200>, <ast.Constant object at 0x7da18f09ec20>]]
variable[id_ds] assign[=] compare[name[dataset] equal[==] name[ds]]
variable[idnan] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18f09f520> <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da18f09fd30>] <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da18f09cc70>]
<ast.Tuple object at 0x7da18f09d030> assign[=] call[name[leastsq], parameter[name[e], name[v0]]]
call[name[corrected_durations]][name[id_ds]] assign[=] binary_operation[call[name[durations]][name[id_ds]] - call[name[leastsq_dual_model], parameter[call[name[angles]][name[id_ds]], call[name[length_diffs]][name[id_ds]], <ast.Starred object at 0x7da1b11eed10>]]]
if compare[name[params] is_not constant[None]] begin[:]
call[name[params]][binary_operation[constant[v] + call[name[str], parameter[name[i]]]]] assign[=] name[v]
call[name[params]][binary_operation[constant[s] + call[name[str], parameter[name[i]]]]] assign[=] name[s]
return[name[corrected_durations]] | keyword[def] identifier[predict_fixation_duration] (
identifier[durations] , identifier[angles] , identifier[length_diffs] , identifier[dataset] = keyword[None] , identifier[params] = keyword[None] ):
literal[string]
keyword[if] identifier[dataset] keyword[is] keyword[None] :
identifier[dataset] = identifier[np] . identifier[ones] ( identifier[durations] . identifier[shape] )
identifier[corrected_durations] = identifier[np] . identifier[nan] * identifier[np] . identifier[ones] ( identifier[durations] . identifier[shape] )
keyword[for] identifier[i] , identifier[ds] keyword[in] identifier[enumerate] ( identifier[np] . identifier[unique] ( identifier[dataset] )):
identifier[e] = keyword[lambda] identifier[v] , identifier[x] , identifier[y] , identifier[z] :( identifier[leastsq_dual_model] ( identifier[x] , identifier[z] ,* identifier[v] )- identifier[y] )
identifier[v0] =[ literal[int] , literal[int] ,- literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[id_ds] = identifier[dataset] == identifier[ds]
identifier[idnan] =(
~ identifier[np] . identifier[isnan] ( identifier[angles] ))&(
~ identifier[np] . identifier[isnan] ( identifier[durations] ))&(
~ identifier[np] . identifier[isnan] ( identifier[length_diffs] ))
identifier[v] , identifier[s] = identifier[leastsq] (
identifier[e] , identifier[v0] , identifier[args] =(
identifier[angles] [
identifier[idnan] & identifier[id_ds] ], identifier[durations] [
identifier[idnan] & identifier[id_ds] ], identifier[length_diffs] [
identifier[idnan] & identifier[id_ds] ]), identifier[maxfev] = literal[int] )
identifier[corrected_durations] [ identifier[id_ds] ]=( identifier[durations] [ identifier[id_ds] ]-
( identifier[leastsq_dual_model] ( identifier[angles] [ identifier[id_ds] ], identifier[length_diffs] [ identifier[id_ds] ],* identifier[v] )))
keyword[if] identifier[params] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] + identifier[str] ( identifier[i] )]= identifier[v]
identifier[params] [ literal[string] + identifier[str] ( identifier[i] )]= identifier[s]
keyword[return] identifier[corrected_durations] | def predict_fixation_duration(durations, angles, length_diffs, dataset=None, params=None):
"""
Fits a non-linear piecewise regression to fixtaion durations for a fixmat.
Returns corrected fixation durations.
"""
if dataset is None:
dataset = np.ones(durations.shape) # depends on [control=['if'], data=['dataset']]
corrected_durations = np.nan * np.ones(durations.shape)
for (i, ds) in enumerate(np.unique(dataset)):
e = lambda v, x, y, z: leastsq_dual_model(x, z, *v) - y
v0 = [120, 220.0, -0.1, 0.5, 0.1, 0.1]
id_ds = dataset == ds
idnan = ~np.isnan(angles) & ~np.isnan(durations) & ~np.isnan(length_diffs)
(v, s) = leastsq(e, v0, args=(angles[idnan & id_ds], durations[idnan & id_ds], length_diffs[idnan & id_ds]), maxfev=10000)
corrected_durations[id_ds] = durations[id_ds] - leastsq_dual_model(angles[id_ds], length_diffs[id_ds], *v)
if params is not None:
params['v' + str(i)] = v
params['s' + str(i)] = s # depends on [control=['if'], data=['params']] # depends on [control=['for'], data=[]]
return corrected_durations |
def get_site(self, plot_voronoi_sites=False):
"""Return primaty (top, bridge, hollow, 4fold) and
secondary (chemical elements in close environment) site designation"""
if self.dissociated:
return 'dissociated', ''
if self.is_desorbed():
return 'desorbed', ''
if self.is_subsurface():
return 'subsurface', ''
C0 = self.B[-1:] * (3, 3, 1)
ads_pos = C0.positions[4]
C = self.B.copy() * (3, 3, 1)
# Use top layer and adsorbate to map sites
Dict = self.get_site_dict(ads_pos[:2])
primary_site = None
dis = self.B.get_cell()[0][0]
Kind = None
values = [np.linalg.norm(ads_pos[:2] - d['pos'][:2])
for d in list(Dict.values())]
if len(values) == 0:
return 'N/A', ''
idx = np.argmin(values)
dis = values[idx]
kind = list(Dict.keys())[idx]
primary_site = kind.split('_')[0]
if plot_voronoi_sites: # View sampled sites
X = self.B.copy()
X = X * (3, 3, 1)
del X[-1]
for pos in Dict.values():
add_adsorbate(X, 'X', position=(pos['pos'][:2]), height=0.2)
view(X)
if primary_site == 'top':
site_type = Dict[kind]['sym']
if primary_site == 'bridge':
site_type = Dict[kind]['sym'] + '|' + self.get_under_bridge()
elif primary_site == 'hollow':
site_type = Dict[kind]['sym'] + '|' + self.get_under_hollow()
elif primary_site == '4fold':
site_type = Dict[kind]['sym']
if dis > 0.5:
primary_site += '-tilt'
print('Warning: A strong site match could not be found!')
print(' structure labeled as {}'.format(primary_site))
return primary_site, site_type | def function[get_site, parameter[self, plot_voronoi_sites]]:
constant[Return primaty (top, bridge, hollow, 4fold) and
secondary (chemical elements in close environment) site designation]
if name[self].dissociated begin[:]
return[tuple[[<ast.Constant object at 0x7da204566920>, <ast.Constant object at 0x7da204567a00>]]]
if call[name[self].is_desorbed, parameter[]] begin[:]
return[tuple[[<ast.Constant object at 0x7da204564a00>, <ast.Constant object at 0x7da2045653f0>]]]
if call[name[self].is_subsurface, parameter[]] begin[:]
return[tuple[[<ast.Constant object at 0x7da204566b30>, <ast.Constant object at 0x7da2045679a0>]]]
variable[C0] assign[=] binary_operation[call[name[self].B][<ast.Slice object at 0x7da204564730>] * tuple[[<ast.Constant object at 0x7da204567eb0>, <ast.Constant object at 0x7da204564c70>, <ast.Constant object at 0x7da204567c40>]]]
variable[ads_pos] assign[=] call[name[C0].positions][constant[4]]
variable[C] assign[=] binary_operation[call[name[self].B.copy, parameter[]] * tuple[[<ast.Constant object at 0x7da204564af0>, <ast.Constant object at 0x7da204564430>, <ast.Constant object at 0x7da204564e80>]]]
variable[Dict] assign[=] call[name[self].get_site_dict, parameter[call[name[ads_pos]][<ast.Slice object at 0x7da204564550>]]]
variable[primary_site] assign[=] constant[None]
variable[dis] assign[=] call[call[call[name[self].B.get_cell, parameter[]]][constant[0]]][constant[0]]
variable[Kind] assign[=] constant[None]
variable[values] assign[=] <ast.ListComp object at 0x7da204567fd0>
if compare[call[name[len], parameter[name[values]]] equal[==] constant[0]] begin[:]
return[tuple[[<ast.Constant object at 0x7da20e7497b0>, <ast.Constant object at 0x7da20e74bd90>]]]
variable[idx] assign[=] call[name[np].argmin, parameter[name[values]]]
variable[dis] assign[=] call[name[values]][name[idx]]
variable[kind] assign[=] call[call[name[list], parameter[call[name[Dict].keys, parameter[]]]]][name[idx]]
variable[primary_site] assign[=] call[call[name[kind].split, parameter[constant[_]]]][constant[0]]
if name[plot_voronoi_sites] begin[:]
variable[X] assign[=] call[name[self].B.copy, parameter[]]
variable[X] assign[=] binary_operation[name[X] * tuple[[<ast.Constant object at 0x7da204565a50>, <ast.Constant object at 0x7da204565b10>, <ast.Constant object at 0x7da204565570>]]]
<ast.Delete object at 0x7da204565720>
for taget[name[pos]] in starred[call[name[Dict].values, parameter[]]] begin[:]
call[name[add_adsorbate], parameter[name[X], constant[X]]]
call[name[view], parameter[name[X]]]
if compare[name[primary_site] equal[==] constant[top]] begin[:]
variable[site_type] assign[=] call[call[name[Dict]][name[kind]]][constant[sym]]
if compare[name[primary_site] equal[==] constant[bridge]] begin[:]
variable[site_type] assign[=] binary_operation[binary_operation[call[call[name[Dict]][name[kind]]][constant[sym]] + constant[|]] + call[name[self].get_under_bridge, parameter[]]]
if compare[name[dis] greater[>] constant[0.5]] begin[:]
<ast.AugAssign object at 0x7da204565780>
call[name[print], parameter[constant[Warning: A strong site match could not be found!]]]
call[name[print], parameter[call[constant[ structure labeled as {}].format, parameter[name[primary_site]]]]]
return[tuple[[<ast.Name object at 0x7da204565210>, <ast.Name object at 0x7da204564700>]]] | keyword[def] identifier[get_site] ( identifier[self] , identifier[plot_voronoi_sites] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[dissociated] :
keyword[return] literal[string] , literal[string]
keyword[if] identifier[self] . identifier[is_desorbed] ():
keyword[return] literal[string] , literal[string]
keyword[if] identifier[self] . identifier[is_subsurface] ():
keyword[return] literal[string] , literal[string]
identifier[C0] = identifier[self] . identifier[B] [- literal[int] :]*( literal[int] , literal[int] , literal[int] )
identifier[ads_pos] = identifier[C0] . identifier[positions] [ literal[int] ]
identifier[C] = identifier[self] . identifier[B] . identifier[copy] ()*( literal[int] , literal[int] , literal[int] )
identifier[Dict] = identifier[self] . identifier[get_site_dict] ( identifier[ads_pos] [: literal[int] ])
identifier[primary_site] = keyword[None]
identifier[dis] = identifier[self] . identifier[B] . identifier[get_cell] ()[ literal[int] ][ literal[int] ]
identifier[Kind] = keyword[None]
identifier[values] =[ identifier[np] . identifier[linalg] . identifier[norm] ( identifier[ads_pos] [: literal[int] ]- identifier[d] [ literal[string] ][: literal[int] ])
keyword[for] identifier[d] keyword[in] identifier[list] ( identifier[Dict] . identifier[values] ())]
keyword[if] identifier[len] ( identifier[values] )== literal[int] :
keyword[return] literal[string] , literal[string]
identifier[idx] = identifier[np] . identifier[argmin] ( identifier[values] )
identifier[dis] = identifier[values] [ identifier[idx] ]
identifier[kind] = identifier[list] ( identifier[Dict] . identifier[keys] ())[ identifier[idx] ]
identifier[primary_site] = identifier[kind] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[plot_voronoi_sites] :
identifier[X] = identifier[self] . identifier[B] . identifier[copy] ()
identifier[X] = identifier[X] *( literal[int] , literal[int] , literal[int] )
keyword[del] identifier[X] [- literal[int] ]
keyword[for] identifier[pos] keyword[in] identifier[Dict] . identifier[values] ():
identifier[add_adsorbate] ( identifier[X] , literal[string] , identifier[position] =( identifier[pos] [ literal[string] ][: literal[int] ]), identifier[height] = literal[int] )
identifier[view] ( identifier[X] )
keyword[if] identifier[primary_site] == literal[string] :
identifier[site_type] = identifier[Dict] [ identifier[kind] ][ literal[string] ]
keyword[if] identifier[primary_site] == literal[string] :
identifier[site_type] = identifier[Dict] [ identifier[kind] ][ literal[string] ]+ literal[string] + identifier[self] . identifier[get_under_bridge] ()
keyword[elif] identifier[primary_site] == literal[string] :
identifier[site_type] = identifier[Dict] [ identifier[kind] ][ literal[string] ]+ literal[string] + identifier[self] . identifier[get_under_hollow] ()
keyword[elif] identifier[primary_site] == literal[string] :
identifier[site_type] = identifier[Dict] [ identifier[kind] ][ literal[string] ]
keyword[if] identifier[dis] > literal[int] :
identifier[primary_site] += literal[string]
identifier[print] ( literal[string] )
identifier[print] ( literal[string] . identifier[format] ( identifier[primary_site] ))
keyword[return] identifier[primary_site] , identifier[site_type] | def get_site(self, plot_voronoi_sites=False):
"""Return primaty (top, bridge, hollow, 4fold) and
secondary (chemical elements in close environment) site designation"""
if self.dissociated:
return ('dissociated', '') # depends on [control=['if'], data=[]]
if self.is_desorbed():
return ('desorbed', '') # depends on [control=['if'], data=[]]
if self.is_subsurface():
return ('subsurface', '') # depends on [control=['if'], data=[]]
C0 = self.B[-1:] * (3, 3, 1)
ads_pos = C0.positions[4]
C = self.B.copy() * (3, 3, 1)
# Use top layer and adsorbate to map sites
Dict = self.get_site_dict(ads_pos[:2])
primary_site = None
dis = self.B.get_cell()[0][0]
Kind = None
values = [np.linalg.norm(ads_pos[:2] - d['pos'][:2]) for d in list(Dict.values())]
if len(values) == 0:
return ('N/A', '') # depends on [control=['if'], data=[]]
idx = np.argmin(values)
dis = values[idx]
kind = list(Dict.keys())[idx]
primary_site = kind.split('_')[0]
if plot_voronoi_sites: # View sampled sites
X = self.B.copy()
X = X * (3, 3, 1)
del X[-1]
for pos in Dict.values():
add_adsorbate(X, 'X', position=pos['pos'][:2], height=0.2) # depends on [control=['for'], data=['pos']]
view(X) # depends on [control=['if'], data=[]]
if primary_site == 'top':
site_type = Dict[kind]['sym'] # depends on [control=['if'], data=[]]
if primary_site == 'bridge':
site_type = Dict[kind]['sym'] + '|' + self.get_under_bridge() # depends on [control=['if'], data=[]]
elif primary_site == 'hollow':
site_type = Dict[kind]['sym'] + '|' + self.get_under_hollow() # depends on [control=['if'], data=[]]
elif primary_site == '4fold':
site_type = Dict[kind]['sym'] # depends on [control=['if'], data=[]]
if dis > 0.5:
primary_site += '-tilt'
print('Warning: A strong site match could not be found!')
print(' structure labeled as {}'.format(primary_site)) # depends on [control=['if'], data=[]]
return (primary_site, site_type) |
def list(self):
"""List collection items."""
if self.is_fake:
return
for item in self.collection.list():
yield item.uid + self.content_suffix | def function[list, parameter[self]]:
constant[List collection items.]
if name[self].is_fake begin[:]
return[None]
for taget[name[item]] in starred[call[name[self].collection.list, parameter[]]] begin[:]
<ast.Yield object at 0x7da18c4cfc40> | keyword[def] identifier[list] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[is_fake] :
keyword[return]
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[collection] . identifier[list] ():
keyword[yield] identifier[item] . identifier[uid] + identifier[self] . identifier[content_suffix] | def list(self):
"""List collection items."""
if self.is_fake:
return # depends on [control=['if'], data=[]]
for item in self.collection.list():
yield (item.uid + self.content_suffix) # depends on [control=['for'], data=['item']] |
def _decode(self, s):
'''This converts from the external coding system (as passed to
the constructor) to the internal one (unicode). '''
if self.decoder is not None:
return self.decoder.decode(s)
else:
raise TypeError("This screen was constructed with encoding=None, "
"so it does not handle bytes.") | def function[_decode, parameter[self, s]]:
constant[This converts from the external coding system (as passed to
the constructor) to the internal one (unicode). ]
if compare[name[self].decoder is_not constant[None]] begin[:]
return[call[name[self].decoder.decode, parameter[name[s]]]] | keyword[def] identifier[_decode] ( identifier[self] , identifier[s] ):
literal[string]
keyword[if] identifier[self] . identifier[decoder] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[decoder] . identifier[decode] ( identifier[s] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] ) | def _decode(self, s):
"""This converts from the external coding system (as passed to
the constructor) to the internal one (unicode). """
if self.decoder is not None:
return self.decoder.decode(s) # depends on [control=['if'], data=[]]
else:
raise TypeError('This screen was constructed with encoding=None, so it does not handle bytes.') |
def _set_ospf(self, v, load=False):
"""
Setter method for ospf, mapped from YANG variable /rbridge_id/router/ospf (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ospf is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ospf() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("vrf",ospf.ospf, yang_name="ospf", rest_name="ospf", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='vrf', extensions={u'tailf-common': {u'info': u'Open Shortest Path First (OSPF)', u'cli-run-template-enter': u' router ospf$($(vrf)==default-vrf?: vrf $(vrf))\n', u'sort-priority': u'70', u'cli-suppress-list-no': None, u'cli-full-command': None, u'callpoint': u'OSPFConfigCallPoint', u'cli-mode-name': u'config-router-ospf-vrf-$(vrf)'}}), is_container='list', yang_name="ospf", rest_name="ospf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Open Shortest Path First (OSPF)', u'cli-run-template-enter': u' router ospf$($(vrf)==default-vrf?: vrf $(vrf))\n', u'sort-priority': u'70', u'cli-suppress-list-no': None, u'cli-full-command': None, u'callpoint': u'OSPFConfigCallPoint', u'cli-mode-name': u'config-router-ospf-vrf-$(vrf)'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ospf must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("vrf",ospf.ospf, yang_name="ospf", rest_name="ospf", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='vrf', extensions={u'tailf-common': {u'info': u'Open Shortest Path First (OSPF)', u'cli-run-template-enter': u' router ospf$($(vrf)==default-vrf?: vrf $(vrf))\n', u'sort-priority': u'70', u'cli-suppress-list-no': None, u'cli-full-command': None, u'callpoint': u'OSPFConfigCallPoint', u'cli-mode-name': u'config-router-ospf-vrf-$(vrf)'}}), is_container='list', yang_name="ospf", rest_name="ospf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Open Shortest Path First (OSPF)', u'cli-run-template-enter': u' router ospf$($(vrf)==default-vrf?: vrf $(vrf))\n', u'sort-priority': u'70', u'cli-suppress-list-no': None, u'cli-full-command': None, u'callpoint': u'OSPFConfigCallPoint', u'cli-mode-name': u'config-router-ospf-vrf-$(vrf)'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='list', is_config=True)""",
})
self.__ospf = t
if hasattr(self, '_set'):
self._set() | def function[_set_ospf, parameter[self, v, load]]:
constant[
Setter method for ospf, mapped from YANG variable /rbridge_id/router/ospf (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ospf is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ospf() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da2054a6740>
name[self].__ospf assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_ospf] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[ospf] . identifier[ospf] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__ospf] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_ospf(self, v, load=False):
"""
Setter method for ospf, mapped from YANG variable /rbridge_id/router/ospf (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ospf is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ospf() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('vrf', ospf.ospf, yang_name='ospf', rest_name='ospf', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='vrf', extensions={u'tailf-common': {u'info': u'Open Shortest Path First (OSPF)', u'cli-run-template-enter': u' router ospf$($(vrf)==default-vrf?: vrf $(vrf))\n', u'sort-priority': u'70', u'cli-suppress-list-no': None, u'cli-full-command': None, u'callpoint': u'OSPFConfigCallPoint', u'cli-mode-name': u'config-router-ospf-vrf-$(vrf)'}}), is_container='list', yang_name='ospf', rest_name='ospf', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Open Shortest Path First (OSPF)', u'cli-run-template-enter': u' router ospf$($(vrf)==default-vrf?: vrf $(vrf))\n', u'sort-priority': u'70', u'cli-suppress-list-no': None, u'cli-full-command': None, u'callpoint': u'OSPFConfigCallPoint', u'cli-mode-name': u'config-router-ospf-vrf-$(vrf)'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'ospf must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("vrf",ospf.ospf, yang_name="ospf", rest_name="ospf", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'vrf\', extensions={u\'tailf-common\': {u\'info\': u\'Open Shortest Path First (OSPF)\', u\'cli-run-template-enter\': u\' router ospf$($(vrf)==default-vrf?: vrf $(vrf))\n\', u\'sort-priority\': u\'70\', u\'cli-suppress-list-no\': None, u\'cli-full-command\': None, u\'callpoint\': u\'OSPFConfigCallPoint\', u\'cli-mode-name\': u\'config-router-ospf-vrf-$(vrf)\'}}), is_container=\'list\', yang_name="ospf", rest_name="ospf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Open Shortest Path First (OSPF)\', u\'cli-run-template-enter\': u\' router ospf$($(vrf)==default-vrf?: vrf $(vrf))\n\', u\'sort-priority\': u\'70\', u\'cli-suppress-list-no\': None, u\'cli-full-command\': None, u\'callpoint\': u\'OSPFConfigCallPoint\', u\'cli-mode-name\': u\'config-router-ospf-vrf-$(vrf)\'}}, namespace=\'urn:brocade.com:mgmt:brocade-ospf\', defining_module=\'brocade-ospf\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__ospf = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
async def start_client(self,
sock: anyio.abc.SocketStream,
addr,
path: str,
headers: Optional[List] = None,
subprotocols: Optional[List[str]] = None):
"""Start a client WS connection on this socket.
Returns: the AcceptConnection message.
"""
self._sock = sock
self._connection = WSConnection(ConnectionType.CLIENT)
if headers is None:
headers = []
if subprotocols is None:
subprotocols = []
data = self._connection.send(
Request(
host=addr[0],
target=path,
extra_headers=headers,
subprotocols=subprotocols))
await self._sock.send_all(data)
assert self._scope is None
self._scope = True
try:
event = await self._next_event()
if not isinstance(event, AcceptConnection):
raise ConnectionError("Failed to establish a connection",
event)
return event
finally:
self._scope = None | <ast.AsyncFunctionDef object at 0x7da18ede4310> | keyword[async] keyword[def] identifier[start_client] ( identifier[self] ,
identifier[sock] : identifier[anyio] . identifier[abc] . identifier[SocketStream] ,
identifier[addr] ,
identifier[path] : identifier[str] ,
identifier[headers] : identifier[Optional] [ identifier[List] ]= keyword[None] ,
identifier[subprotocols] : identifier[Optional] [ identifier[List] [ identifier[str] ]]= keyword[None] ):
literal[string]
identifier[self] . identifier[_sock] = identifier[sock]
identifier[self] . identifier[_connection] = identifier[WSConnection] ( identifier[ConnectionType] . identifier[CLIENT] )
keyword[if] identifier[headers] keyword[is] keyword[None] :
identifier[headers] =[]
keyword[if] identifier[subprotocols] keyword[is] keyword[None] :
identifier[subprotocols] =[]
identifier[data] = identifier[self] . identifier[_connection] . identifier[send] (
identifier[Request] (
identifier[host] = identifier[addr] [ literal[int] ],
identifier[target] = identifier[path] ,
identifier[extra_headers] = identifier[headers] ,
identifier[subprotocols] = identifier[subprotocols] ))
keyword[await] identifier[self] . identifier[_sock] . identifier[send_all] ( identifier[data] )
keyword[assert] identifier[self] . identifier[_scope] keyword[is] keyword[None]
identifier[self] . identifier[_scope] = keyword[True]
keyword[try] :
identifier[event] = keyword[await] identifier[self] . identifier[_next_event] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[event] , identifier[AcceptConnection] ):
keyword[raise] identifier[ConnectionError] ( literal[string] ,
identifier[event] )
keyword[return] identifier[event]
keyword[finally] :
identifier[self] . identifier[_scope] = keyword[None] | async def start_client(self, sock: anyio.abc.SocketStream, addr, path: str, headers: Optional[List]=None, subprotocols: Optional[List[str]]=None):
"""Start a client WS connection on this socket.
Returns: the AcceptConnection message.
"""
self._sock = sock
self._connection = WSConnection(ConnectionType.CLIENT)
if headers is None:
headers = [] # depends on [control=['if'], data=['headers']]
if subprotocols is None:
subprotocols = [] # depends on [control=['if'], data=['subprotocols']]
data = self._connection.send(Request(host=addr[0], target=path, extra_headers=headers, subprotocols=subprotocols))
await self._sock.send_all(data)
assert self._scope is None
self._scope = True
try:
event = await self._next_event()
if not isinstance(event, AcceptConnection):
raise ConnectionError('Failed to establish a connection', event) # depends on [control=['if'], data=[]]
return event # depends on [control=['try'], data=[]]
finally:
self._scope = None |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.