code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _doAtomicFileCreation(filePath):
"""Tries to atomically create the requested file."""
try:
_os.close(_os.open(filePath, _os.O_CREAT | _os.O_EXCL))
return True
except OSError as e:
if e.errno == _errno.EEXIST:
return False
else:
raise e | def function[_doAtomicFileCreation, parameter[filePath]]:
constant[Tries to atomically create the requested file.]
<ast.Try object at 0x7da20c794700> | keyword[def] identifier[_doAtomicFileCreation] ( identifier[filePath] ):
literal[string]
keyword[try] :
identifier[_os] . identifier[close] ( identifier[_os] . identifier[open] ( identifier[filePath] , identifier[_os] . identifier[O_CREAT] | identifier[_os] . identifier[O_EXCL] ))
keyword[return] keyword[True]
keyword[except] identifier[OSError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[errno] == identifier[_errno] . identifier[EEXIST] :
keyword[return] keyword[False]
keyword[else] :
keyword[raise] identifier[e] | def _doAtomicFileCreation(filePath):
"""Tries to atomically create the requested file."""
try:
_os.close(_os.open(filePath, _os.O_CREAT | _os.O_EXCL))
return True # depends on [control=['try'], data=[]]
except OSError as e:
if e.errno == _errno.EEXIST:
return False # depends on [control=['if'], data=[]]
else:
raise e # depends on [control=['except'], data=['e']] |
def managing_thread_main_simple():
"""Simpler thread to track whether main thread has been quiet for long enough
that a thread dump should be printed.
"""
import shutit_global
last_msg = ''
while True:
printed_anything = False
if shutit_global.shutit_global_object.log_trace_when_idle and time.time() - shutit_global.shutit_global_object.last_log_time > 10:
this_msg = ''
this_header = ''
for thread_id, stack in sys._current_frames().items():
# ignore own thread:
if thread_id == threading.current_thread().ident:
continue
printed_thread_started = False
for filename, lineno, name, line in traceback.extract_stack(stack):
if not printed_anything:
printed_anything = True
this_header += '\n='*80 + '\n'
this_header += 'STACK TRACES PRINTED ON IDLE: THREAD_ID: ' + str(thread_id) + ' at ' + time.strftime('%c') + '\n'
this_header += '='*80 + '\n'
if not printed_thread_started:
printed_thread_started = True
this_msg += '%s:%d:%s' % (filename, lineno, name) + '\n'
if line:
this_msg += ' %s' % (line,) + '\n'
if printed_anything:
this_msg += '='*80 + '\n'
this_msg += 'STACK TRACES DONE\n'
this_msg += '='*80 + '\n'
if this_msg != last_msg:
print(this_header + this_msg)
last_msg = this_msg
time.sleep(5) | def function[managing_thread_main_simple, parameter[]]:
constant[Simpler thread to track whether main thread has been quiet for long enough
that a thread dump should be printed.
]
import module[shutit_global]
variable[last_msg] assign[=] constant[]
while constant[True] begin[:]
variable[printed_anything] assign[=] constant[False]
if <ast.BoolOp object at 0x7da20e9631f0> begin[:]
variable[this_msg] assign[=] constant[]
variable[this_header] assign[=] constant[]
for taget[tuple[[<ast.Name object at 0x7da20e962500>, <ast.Name object at 0x7da20e962980>]]] in starred[call[call[name[sys]._current_frames, parameter[]].items, parameter[]]] begin[:]
if compare[name[thread_id] equal[==] call[name[threading].current_thread, parameter[]].ident] begin[:]
continue
variable[printed_thread_started] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da20e963ac0>, <ast.Name object at 0x7da20e963e20>, <ast.Name object at 0x7da20e962110>, <ast.Name object at 0x7da20e963fa0>]]] in starred[call[name[traceback].extract_stack, parameter[name[stack]]]] begin[:]
if <ast.UnaryOp object at 0x7da20e9619f0> begin[:]
variable[printed_anything] assign[=] constant[True]
<ast.AugAssign object at 0x7da20e9618a0>
<ast.AugAssign object at 0x7da20e962890>
<ast.AugAssign object at 0x7da2047e9d20>
if <ast.UnaryOp object at 0x7da2047e8760> begin[:]
variable[printed_thread_started] assign[=] constant[True]
<ast.AugAssign object at 0x7da2047e99f0>
if name[line] begin[:]
<ast.AugAssign object at 0x7da20e963430>
if name[printed_anything] begin[:]
<ast.AugAssign object at 0x7da20e963610>
<ast.AugAssign object at 0x7da20e962bc0>
<ast.AugAssign object at 0x7da20e963250>
if compare[name[this_msg] not_equal[!=] name[last_msg]] begin[:]
call[name[print], parameter[binary_operation[name[this_header] + name[this_msg]]]]
variable[last_msg] assign[=] name[this_msg]
call[name[time].sleep, parameter[constant[5]]] | keyword[def] identifier[managing_thread_main_simple] ():
literal[string]
keyword[import] identifier[shutit_global]
identifier[last_msg] = literal[string]
keyword[while] keyword[True] :
identifier[printed_anything] = keyword[False]
keyword[if] identifier[shutit_global] . identifier[shutit_global_object] . identifier[log_trace_when_idle] keyword[and] identifier[time] . identifier[time] ()- identifier[shutit_global] . identifier[shutit_global_object] . identifier[last_log_time] > literal[int] :
identifier[this_msg] = literal[string]
identifier[this_header] = literal[string]
keyword[for] identifier[thread_id] , identifier[stack] keyword[in] identifier[sys] . identifier[_current_frames] (). identifier[items] ():
keyword[if] identifier[thread_id] == identifier[threading] . identifier[current_thread] (). identifier[ident] :
keyword[continue]
identifier[printed_thread_started] = keyword[False]
keyword[for] identifier[filename] , identifier[lineno] , identifier[name] , identifier[line] keyword[in] identifier[traceback] . identifier[extract_stack] ( identifier[stack] ):
keyword[if] keyword[not] identifier[printed_anything] :
identifier[printed_anything] = keyword[True]
identifier[this_header] += literal[string] * literal[int] + literal[string]
identifier[this_header] += literal[string] + identifier[str] ( identifier[thread_id] )+ literal[string] + identifier[time] . identifier[strftime] ( literal[string] )+ literal[string]
identifier[this_header] += literal[string] * literal[int] + literal[string]
keyword[if] keyword[not] identifier[printed_thread_started] :
identifier[printed_thread_started] = keyword[True]
identifier[this_msg] += literal[string] %( identifier[filename] , identifier[lineno] , identifier[name] )+ literal[string]
keyword[if] identifier[line] :
identifier[this_msg] += literal[string] %( identifier[line] ,)+ literal[string]
keyword[if] identifier[printed_anything] :
identifier[this_msg] += literal[string] * literal[int] + literal[string]
identifier[this_msg] += literal[string]
identifier[this_msg] += literal[string] * literal[int] + literal[string]
keyword[if] identifier[this_msg] != identifier[last_msg] :
identifier[print] ( identifier[this_header] + identifier[this_msg] )
identifier[last_msg] = identifier[this_msg]
identifier[time] . identifier[sleep] ( literal[int] ) | def managing_thread_main_simple():
"""Simpler thread to track whether main thread has been quiet for long enough
that a thread dump should be printed.
"""
import shutit_global
last_msg = ''
while True:
printed_anything = False
if shutit_global.shutit_global_object.log_trace_when_idle and time.time() - shutit_global.shutit_global_object.last_log_time > 10:
this_msg = ''
this_header = ''
for (thread_id, stack) in sys._current_frames().items(): # ignore own thread:
if thread_id == threading.current_thread().ident:
continue # depends on [control=['if'], data=[]]
printed_thread_started = False
for (filename, lineno, name, line) in traceback.extract_stack(stack):
if not printed_anything:
printed_anything = True
this_header += '\n=' * 80 + '\n'
this_header += 'STACK TRACES PRINTED ON IDLE: THREAD_ID: ' + str(thread_id) + ' at ' + time.strftime('%c') + '\n'
this_header += '=' * 80 + '\n' # depends on [control=['if'], data=[]]
if not printed_thread_started:
printed_thread_started = True # depends on [control=['if'], data=[]]
this_msg += '%s:%d:%s' % (filename, lineno, name) + '\n'
if line:
this_msg += ' %s' % (line,) + '\n' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
if printed_anything:
this_msg += '=' * 80 + '\n'
this_msg += 'STACK TRACES DONE\n'
this_msg += '=' * 80 + '\n' # depends on [control=['if'], data=[]]
if this_msg != last_msg:
print(this_header + this_msg)
last_msg = this_msg # depends on [control=['if'], data=['this_msg', 'last_msg']] # depends on [control=['if'], data=[]]
time.sleep(5) # depends on [control=['while'], data=[]] |
def create(self, name, desc):
"""Create a new security group.
:returns: SecurityGroup object created
"""
body = {'security_group': {'name': name,
'description': desc,
'tenant_id': self.request.user.project_id}}
secgroup = self.client.create_security_group(body)
return SecurityGroup(secgroup.get('security_group')) | def function[create, parameter[self, name, desc]]:
constant[Create a new security group.
:returns: SecurityGroup object created
]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b1982770>], [<ast.Dict object at 0x7da1b1982f80>]]
variable[secgroup] assign[=] call[name[self].client.create_security_group, parameter[name[body]]]
return[call[name[SecurityGroup], parameter[call[name[secgroup].get, parameter[constant[security_group]]]]]] | keyword[def] identifier[create] ( identifier[self] , identifier[name] , identifier[desc] ):
literal[string]
identifier[body] ={ literal[string] :{ literal[string] : identifier[name] ,
literal[string] : identifier[desc] ,
literal[string] : identifier[self] . identifier[request] . identifier[user] . identifier[project_id] }}
identifier[secgroup] = identifier[self] . identifier[client] . identifier[create_security_group] ( identifier[body] )
keyword[return] identifier[SecurityGroup] ( identifier[secgroup] . identifier[get] ( literal[string] )) | def create(self, name, desc):
"""Create a new security group.
:returns: SecurityGroup object created
"""
body = {'security_group': {'name': name, 'description': desc, 'tenant_id': self.request.user.project_id}}
secgroup = self.client.create_security_group(body)
return SecurityGroup(secgroup.get('security_group')) |
def installed(name, default=False, user=None):
'''
Verify that the specified ruby is installed with rbenv. Rbenv is
installed if necessary.
name
The version of ruby to install
default : False
Whether to make this ruby the default.
user: None
The user to run rbenv as.
.. versionadded:: 0.17.0
.. versionadded:: 0.16.0
'''
ret = {'name': name, 'result': None, 'comment': '', 'changes': {}}
rbenv_installed_ret = copy.deepcopy(ret)
if name.startswith('ruby-'):
name = re.sub(r'^ruby-', '', name)
if __opts__['test']:
ret = _ruby_installed(ret, name, user=user)
if not ret['result']:
ret['comment'] = 'Ruby {0} is set to be installed'.format(name)
else:
ret['comment'] = 'Ruby {0} is already installed'.format(name)
return ret
rbenv_installed_ret = _check_and_install_rbenv(rbenv_installed_ret, user)
if rbenv_installed_ret['result'] is False:
ret['result'] = False
ret['comment'] = 'Rbenv failed to install'
return ret
else:
return _check_and_install_ruby(ret, name, default, user=user) | def function[installed, parameter[name, default, user]]:
constant[
Verify that the specified ruby is installed with rbenv. Rbenv is
installed if necessary.
name
The version of ruby to install
default : False
Whether to make this ruby the default.
user: None
The user to run rbenv as.
.. versionadded:: 0.17.0
.. versionadded:: 0.16.0
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18dc058d0>, <ast.Constant object at 0x7da18dc07f10>, <ast.Constant object at 0x7da18dc04f40>, <ast.Constant object at 0x7da18dc067d0>], [<ast.Name object at 0x7da18dc042b0>, <ast.Constant object at 0x7da18dc07730>, <ast.Constant object at 0x7da18dc05660>, <ast.Dict object at 0x7da18dc05f90>]]
variable[rbenv_installed_ret] assign[=] call[name[copy].deepcopy, parameter[name[ret]]]
if call[name[name].startswith, parameter[constant[ruby-]]] begin[:]
variable[name] assign[=] call[name[re].sub, parameter[constant[^ruby-], constant[], name[name]]]
if call[name[__opts__]][constant[test]] begin[:]
variable[ret] assign[=] call[name[_ruby_installed], parameter[name[ret], name[name]]]
if <ast.UnaryOp object at 0x7da18eb55ff0> begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Ruby {0} is set to be installed].format, parameter[name[name]]]
return[name[ret]]
variable[rbenv_installed_ret] assign[=] call[name[_check_and_install_rbenv], parameter[name[rbenv_installed_ret], name[user]]]
if compare[call[name[rbenv_installed_ret]][constant[result]] is constant[False]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] constant[Rbenv failed to install]
return[name[ret]] | keyword[def] identifier[installed] ( identifier[name] , identifier[default] = keyword[False] , identifier[user] = keyword[None] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] :{}}
identifier[rbenv_installed_ret] = identifier[copy] . identifier[deepcopy] ( identifier[ret] )
keyword[if] identifier[name] . identifier[startswith] ( literal[string] ):
identifier[name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[name] )
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] = identifier[_ruby_installed] ( identifier[ret] , identifier[name] , identifier[user] = identifier[user] )
keyword[if] keyword[not] identifier[ret] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret]
identifier[rbenv_installed_ret] = identifier[_check_and_install_rbenv] ( identifier[rbenv_installed_ret] , identifier[user] )
keyword[if] identifier[rbenv_installed_ret] [ literal[string] ] keyword[is] keyword[False] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[else] :
keyword[return] identifier[_check_and_install_ruby] ( identifier[ret] , identifier[name] , identifier[default] , identifier[user] = identifier[user] ) | def installed(name, default=False, user=None):
"""
Verify that the specified ruby is installed with rbenv. Rbenv is
installed if necessary.
name
The version of ruby to install
default : False
Whether to make this ruby the default.
user: None
The user to run rbenv as.
.. versionadded:: 0.17.0
.. versionadded:: 0.16.0
"""
ret = {'name': name, 'result': None, 'comment': '', 'changes': {}}
rbenv_installed_ret = copy.deepcopy(ret)
if name.startswith('ruby-'):
name = re.sub('^ruby-', '', name) # depends on [control=['if'], data=[]]
if __opts__['test']:
ret = _ruby_installed(ret, name, user=user)
if not ret['result']:
ret['comment'] = 'Ruby {0} is set to be installed'.format(name) # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Ruby {0} is already installed'.format(name)
return ret # depends on [control=['if'], data=[]]
rbenv_installed_ret = _check_and_install_rbenv(rbenv_installed_ret, user)
if rbenv_installed_ret['result'] is False:
ret['result'] = False
ret['comment'] = 'Rbenv failed to install'
return ret # depends on [control=['if'], data=[]]
else:
return _check_and_install_ruby(ret, name, default, user=user) |
def wrap(string, length, indent):
""" Wrap a string at a line length """
newline = "\n" + " " * indent
return newline.join((string[i : i + length] for i in range(0, len(string), length))) | def function[wrap, parameter[string, length, indent]]:
constant[ Wrap a string at a line length ]
variable[newline] assign[=] binary_operation[constant[
] + binary_operation[constant[ ] * name[indent]]]
return[call[name[newline].join, parameter[<ast.GeneratorExp object at 0x7da1b0ebf5b0>]]] | keyword[def] identifier[wrap] ( identifier[string] , identifier[length] , identifier[indent] ):
literal[string]
identifier[newline] = literal[string] + literal[string] * identifier[indent]
keyword[return] identifier[newline] . identifier[join] (( identifier[string] [ identifier[i] : identifier[i] + identifier[length] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[string] ), identifier[length] ))) | def wrap(string, length, indent):
""" Wrap a string at a line length """
newline = '\n' + ' ' * indent
return newline.join((string[i:i + length] for i in range(0, len(string), length))) |
def default_content_filter(sender, instance, **kwargs): # pylint: disable=unused-argument
"""
Set default value for `EnterpriseCustomerCatalog.content_filter` if not already set.
"""
if kwargs['created'] and not instance.content_filter:
instance.content_filter = get_default_catalog_content_filter()
instance.save() | def function[default_content_filter, parameter[sender, instance]]:
constant[
Set default value for `EnterpriseCustomerCatalog.content_filter` if not already set.
]
if <ast.BoolOp object at 0x7da1b0052620> begin[:]
name[instance].content_filter assign[=] call[name[get_default_catalog_content_filter], parameter[]]
call[name[instance].save, parameter[]] | keyword[def] identifier[default_content_filter] ( identifier[sender] , identifier[instance] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[kwargs] [ literal[string] ] keyword[and] keyword[not] identifier[instance] . identifier[content_filter] :
identifier[instance] . identifier[content_filter] = identifier[get_default_catalog_content_filter] ()
identifier[instance] . identifier[save] () | def default_content_filter(sender, instance, **kwargs): # pylint: disable=unused-argument
'\n Set default value for `EnterpriseCustomerCatalog.content_filter` if not already set.\n '
if kwargs['created'] and (not instance.content_filter):
instance.content_filter = get_default_catalog_content_filter()
instance.save() # depends on [control=['if'], data=[]] |
def validiate_webhook_signature(self, webhook, signature):
"""Validates a webhook signature from a webhook body + client secret
Parameters
webhook (string)
The request body of the webhook.
signature (string)
The webhook signature specified in X-Uber-Signature header.
"""
digester = hmac.new(self.session.oauth2credential.client_secret,
webhook,
hashlib.sha256
)
return (signature == digester.hexdigest()) | def function[validiate_webhook_signature, parameter[self, webhook, signature]]:
constant[Validates a webhook signature from a webhook body + client secret
Parameters
webhook (string)
The request body of the webhook.
signature (string)
The webhook signature specified in X-Uber-Signature header.
]
variable[digester] assign[=] call[name[hmac].new, parameter[name[self].session.oauth2credential.client_secret, name[webhook], name[hashlib].sha256]]
return[compare[name[signature] equal[==] call[name[digester].hexdigest, parameter[]]]] | keyword[def] identifier[validiate_webhook_signature] ( identifier[self] , identifier[webhook] , identifier[signature] ):
literal[string]
identifier[digester] = identifier[hmac] . identifier[new] ( identifier[self] . identifier[session] . identifier[oauth2credential] . identifier[client_secret] ,
identifier[webhook] ,
identifier[hashlib] . identifier[sha256]
)
keyword[return] ( identifier[signature] == identifier[digester] . identifier[hexdigest] ()) | def validiate_webhook_signature(self, webhook, signature):
"""Validates a webhook signature from a webhook body + client secret
Parameters
webhook (string)
The request body of the webhook.
signature (string)
The webhook signature specified in X-Uber-Signature header.
"""
digester = hmac.new(self.session.oauth2credential.client_secret, webhook, hashlib.sha256)
return signature == digester.hexdigest() |
def convert_hetatms_to_Hill_notation(lines, ignore_list = []):#['HOH']):
'''From the PDB site:
The elements of the chemical formula are given in the order following Hill ordering. The order of elements depends
on whether carbon is present or not. If carbon is present, the order should be: C, then H, then the other elements
in alphabetical order of their symbol. If carbon is not present, the elements are listed purely in alphabetic order
of their symbol. This is the 'Hill' system used by Chemical Abstracts.
WARNING: This assumes that all atoms are in the PDB. This is not usually the case so the formulae will be missing
atoms in those cases. To account for some missing data, we merge the element counters to use the most
amount of information we can.
In general, the FORMUL lines should be used. This function can be used in files with missing headers.
'''
ignore_list = set(ignore_list)
hetatms = {}
for l in lines:
if l.startswith('HETATM'):
het_id = l[17:20].strip()
if het_id in ignore_list:
continue
res_id = l[21:27]
atom_name = l[12:16]
alt_loc = l[16]
hetatms[het_id] = hetatms.get(het_id, {})
hetatms[het_id][res_id] = hetatms[het_id].get(res_id, {})
hetatms[het_id][res_id][alt_loc] = hetatms[het_id][res_id].get(alt_loc, ElementCounter())
hetatms[het_id][res_id][alt_loc].add(atom_name)
for het_id, res_atoms in hetatms.iteritems():
res_ids = res_atoms.keys()
for res_id in res_ids:
ecs = hetatms[het_id][res_id].values()
for x in range(1, len(ecs)):
ecs[0].merge(ecs[x])
hetatms[het_id][res_id] = ecs[0]
str_mapping = {}
mapping = {}
for het_id, res_atoms in hetatms.iteritems():
res_ids = res_atoms.keys()
for res_id in res_ids:
Hill_notation = hetatms[het_id][res_id]
if str_mapping.get(het_id):
if not str_mapping[het_id] == str(Hill_notation):
mapping[het_id].merge(Hill_notation)
str_mapping[het_id] = str(mapping[het_id])
else:
str_mapping[het_id] = str(Hill_notation)
mapping[het_id] = Hill_notation
return mapping | def function[convert_hetatms_to_Hill_notation, parameter[lines, ignore_list]]:
constant[From the PDB site:
The elements of the chemical formula are given in the order following Hill ordering. The order of elements depends
on whether carbon is present or not. If carbon is present, the order should be: C, then H, then the other elements
in alphabetical order of their symbol. If carbon is not present, the elements are listed purely in alphabetic order
of their symbol. This is the 'Hill' system used by Chemical Abstracts.
WARNING: This assumes that all atoms are in the PDB. This is not usually the case so the formulae will be missing
atoms in those cases. To account for some missing data, we merge the element counters to use the most
amount of information we can.
In general, the FORMUL lines should be used. This function can be used in files with missing headers.
]
variable[ignore_list] assign[=] call[name[set], parameter[name[ignore_list]]]
variable[hetatms] assign[=] dictionary[[], []]
for taget[name[l]] in starred[name[lines]] begin[:]
if call[name[l].startswith, parameter[constant[HETATM]]] begin[:]
variable[het_id] assign[=] call[call[name[l]][<ast.Slice object at 0x7da20c6e6f50>].strip, parameter[]]
if compare[name[het_id] in name[ignore_list]] begin[:]
continue
variable[res_id] assign[=] call[name[l]][<ast.Slice object at 0x7da20c6e7c40>]
variable[atom_name] assign[=] call[name[l]][<ast.Slice object at 0x7da20c6e4a00>]
variable[alt_loc] assign[=] call[name[l]][constant[16]]
call[name[hetatms]][name[het_id]] assign[=] call[name[hetatms].get, parameter[name[het_id], dictionary[[], []]]]
call[call[name[hetatms]][name[het_id]]][name[res_id]] assign[=] call[call[name[hetatms]][name[het_id]].get, parameter[name[res_id], dictionary[[], []]]]
call[call[call[name[hetatms]][name[het_id]]][name[res_id]]][name[alt_loc]] assign[=] call[call[call[name[hetatms]][name[het_id]]][name[res_id]].get, parameter[name[alt_loc], call[name[ElementCounter], parameter[]]]]
call[call[call[call[name[hetatms]][name[het_id]]][name[res_id]]][name[alt_loc]].add, parameter[name[atom_name]]]
for taget[tuple[[<ast.Name object at 0x7da18f8112d0>, <ast.Name object at 0x7da18f813c10>]]] in starred[call[name[hetatms].iteritems, parameter[]]] begin[:]
variable[res_ids] assign[=] call[name[res_atoms].keys, parameter[]]
for taget[name[res_id]] in starred[name[res_ids]] begin[:]
variable[ecs] assign[=] call[call[call[name[hetatms]][name[het_id]]][name[res_id]].values, parameter[]]
for taget[name[x]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[ecs]]]]]] begin[:]
call[call[name[ecs]][constant[0]].merge, parameter[call[name[ecs]][name[x]]]]
call[call[name[hetatms]][name[het_id]]][name[res_id]] assign[=] call[name[ecs]][constant[0]]
variable[str_mapping] assign[=] dictionary[[], []]
variable[mapping] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18f8131f0>, <ast.Name object at 0x7da18f8134f0>]]] in starred[call[name[hetatms].iteritems, parameter[]]] begin[:]
variable[res_ids] assign[=] call[name[res_atoms].keys, parameter[]]
for taget[name[res_id]] in starred[name[res_ids]] begin[:]
variable[Hill_notation] assign[=] call[call[name[hetatms]][name[het_id]]][name[res_id]]
if call[name[str_mapping].get, parameter[name[het_id]]] begin[:]
if <ast.UnaryOp object at 0x7da18f812710> begin[:]
call[call[name[mapping]][name[het_id]].merge, parameter[name[Hill_notation]]]
call[name[str_mapping]][name[het_id]] assign[=] call[name[str], parameter[call[name[mapping]][name[het_id]]]]
return[name[mapping]] | keyword[def] identifier[convert_hetatms_to_Hill_notation] ( identifier[lines] , identifier[ignore_list] =[]):
literal[string]
identifier[ignore_list] = identifier[set] ( identifier[ignore_list] )
identifier[hetatms] ={}
keyword[for] identifier[l] keyword[in] identifier[lines] :
keyword[if] identifier[l] . identifier[startswith] ( literal[string] ):
identifier[het_id] = identifier[l] [ literal[int] : literal[int] ]. identifier[strip] ()
keyword[if] identifier[het_id] keyword[in] identifier[ignore_list] :
keyword[continue]
identifier[res_id] = identifier[l] [ literal[int] : literal[int] ]
identifier[atom_name] = identifier[l] [ literal[int] : literal[int] ]
identifier[alt_loc] = identifier[l] [ literal[int] ]
identifier[hetatms] [ identifier[het_id] ]= identifier[hetatms] . identifier[get] ( identifier[het_id] ,{})
identifier[hetatms] [ identifier[het_id] ][ identifier[res_id] ]= identifier[hetatms] [ identifier[het_id] ]. identifier[get] ( identifier[res_id] ,{})
identifier[hetatms] [ identifier[het_id] ][ identifier[res_id] ][ identifier[alt_loc] ]= identifier[hetatms] [ identifier[het_id] ][ identifier[res_id] ]. identifier[get] ( identifier[alt_loc] , identifier[ElementCounter] ())
identifier[hetatms] [ identifier[het_id] ][ identifier[res_id] ][ identifier[alt_loc] ]. identifier[add] ( identifier[atom_name] )
keyword[for] identifier[het_id] , identifier[res_atoms] keyword[in] identifier[hetatms] . identifier[iteritems] ():
identifier[res_ids] = identifier[res_atoms] . identifier[keys] ()
keyword[for] identifier[res_id] keyword[in] identifier[res_ids] :
identifier[ecs] = identifier[hetatms] [ identifier[het_id] ][ identifier[res_id] ]. identifier[values] ()
keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[ecs] )):
identifier[ecs] [ literal[int] ]. identifier[merge] ( identifier[ecs] [ identifier[x] ])
identifier[hetatms] [ identifier[het_id] ][ identifier[res_id] ]= identifier[ecs] [ literal[int] ]
identifier[str_mapping] ={}
identifier[mapping] ={}
keyword[for] identifier[het_id] , identifier[res_atoms] keyword[in] identifier[hetatms] . identifier[iteritems] ():
identifier[res_ids] = identifier[res_atoms] . identifier[keys] ()
keyword[for] identifier[res_id] keyword[in] identifier[res_ids] :
identifier[Hill_notation] = identifier[hetatms] [ identifier[het_id] ][ identifier[res_id] ]
keyword[if] identifier[str_mapping] . identifier[get] ( identifier[het_id] ):
keyword[if] keyword[not] identifier[str_mapping] [ identifier[het_id] ]== identifier[str] ( identifier[Hill_notation] ):
identifier[mapping] [ identifier[het_id] ]. identifier[merge] ( identifier[Hill_notation] )
identifier[str_mapping] [ identifier[het_id] ]= identifier[str] ( identifier[mapping] [ identifier[het_id] ])
keyword[else] :
identifier[str_mapping] [ identifier[het_id] ]= identifier[str] ( identifier[Hill_notation] )
identifier[mapping] [ identifier[het_id] ]= identifier[Hill_notation]
keyword[return] identifier[mapping] | def convert_hetatms_to_Hill_notation(lines, ignore_list=[]): #['HOH']):
"From the PDB site:\n The elements of the chemical formula are given in the order following Hill ordering. The order of elements depends\n on whether carbon is present or not. If carbon is present, the order should be: C, then H, then the other elements\n in alphabetical order of their symbol. If carbon is not present, the elements are listed purely in alphabetic order\n of their symbol. This is the 'Hill' system used by Chemical Abstracts.\n\n WARNING: This assumes that all atoms are in the PDB. This is not usually the case so the formulae will be missing\n atoms in those cases. To account for some missing data, we merge the element counters to use the most\n amount of information we can.\n In general, the FORMUL lines should be used. This function can be used in files with missing headers.\n "
ignore_list = set(ignore_list)
hetatms = {}
for l in lines:
if l.startswith('HETATM'):
het_id = l[17:20].strip()
if het_id in ignore_list:
continue # depends on [control=['if'], data=[]]
res_id = l[21:27]
atom_name = l[12:16]
alt_loc = l[16]
hetatms[het_id] = hetatms.get(het_id, {})
hetatms[het_id][res_id] = hetatms[het_id].get(res_id, {})
hetatms[het_id][res_id][alt_loc] = hetatms[het_id][res_id].get(alt_loc, ElementCounter())
hetatms[het_id][res_id][alt_loc].add(atom_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['l']]
for (het_id, res_atoms) in hetatms.iteritems():
res_ids = res_atoms.keys()
for res_id in res_ids:
ecs = hetatms[het_id][res_id].values()
for x in range(1, len(ecs)):
ecs[0].merge(ecs[x]) # depends on [control=['for'], data=['x']]
hetatms[het_id][res_id] = ecs[0] # depends on [control=['for'], data=['res_id']] # depends on [control=['for'], data=[]]
str_mapping = {}
mapping = {}
for (het_id, res_atoms) in hetatms.iteritems():
res_ids = res_atoms.keys()
for res_id in res_ids:
Hill_notation = hetatms[het_id][res_id]
if str_mapping.get(het_id):
if not str_mapping[het_id] == str(Hill_notation):
mapping[het_id].merge(Hill_notation)
str_mapping[het_id] = str(mapping[het_id]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
str_mapping[het_id] = str(Hill_notation)
mapping[het_id] = Hill_notation # depends on [control=['for'], data=['res_id']] # depends on [control=['for'], data=[]]
return mapping |
def flatten(l, types=(list, float)):
"""
Flat nested list of lists into a single list.
"""
l = [item if isinstance(item, types) else [item] for item in l]
return [item for sublist in l for item in sublist] | def function[flatten, parameter[l, types]]:
constant[
Flat nested list of lists into a single list.
]
variable[l] assign[=] <ast.ListComp object at 0x7da1b2344a30>
return[<ast.ListComp object at 0x7da18f58d360>] | keyword[def] identifier[flatten] ( identifier[l] , identifier[types] =( identifier[list] , identifier[float] )):
literal[string]
identifier[l] =[ identifier[item] keyword[if] identifier[isinstance] ( identifier[item] , identifier[types] ) keyword[else] [ identifier[item] ] keyword[for] identifier[item] keyword[in] identifier[l] ]
keyword[return] [ identifier[item] keyword[for] identifier[sublist] keyword[in] identifier[l] keyword[for] identifier[item] keyword[in] identifier[sublist] ] | def flatten(l, types=(list, float)):
"""
Flat nested list of lists into a single list.
"""
l = [item if isinstance(item, types) else [item] for item in l]
return [item for sublist in l for item in sublist] |
def marker(self, marker_name=None, label=None,
color=None, retina=False):
"""Returns a single marker image without any
background map.
Parameters
----------
marker_name : str
The marker's shape and size.
label : str, optional
The marker's alphanumeric label.
Options are a through z, 0 through 99, or the
name of a valid Maki icon.
color : str, optional
The marker's color.
Options are three- or six-digit hexadecimal
color codes.
retina : bool, optional
The marker's scale, where True indicates Retina scale
(double scale) and False indicates regular scale.
The default value is false.
Returns
-------
request.Response
The response object with the specified marker.
"""
# Check for marker_name.
if marker_name is None:
raise ValidationError(
"marker_name is a required argument"
)
# Validate marker_name and retina.
marker_name = self._validate_marker_name(marker_name)
retina = self._validate_retina(retina)
# Create dict and start building URI resource path.
path_values = dict(
marker_name=marker_name
)
path_part = "/marker/{marker_name}"
# Validate label, update dict,
# and continue building URI resource path.
if label is not None:
label = self._validate_label(label)
path_values["label"] = label
path_part += "-{label}"
# Validate color, update dict,
# and continue building URI resource path.
if color is not None:
color = self._validate_color(color)
path_values["color"] = color
path_part += "+{color}"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Finish building URI resource path.
path_part = "{}.png".format(retina)
uri += path_part
# Send HTTP GET request.
response = self.session.get(uri)
self.handle_http_error(response)
return response | def function[marker, parameter[self, marker_name, label, color, retina]]:
constant[Returns a single marker image without any
background map.
Parameters
----------
marker_name : str
The marker's shape and size.
label : str, optional
The marker's alphanumeric label.
Options are a through z, 0 through 99, or the
name of a valid Maki icon.
color : str, optional
The marker's color.
Options are three- or six-digit hexadecimal
color codes.
retina : bool, optional
The marker's scale, where True indicates Retina scale
(double scale) and False indicates regular scale.
The default value is false.
Returns
-------
request.Response
The response object with the specified marker.
]
if compare[name[marker_name] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b18be3e0>
variable[marker_name] assign[=] call[name[self]._validate_marker_name, parameter[name[marker_name]]]
variable[retina] assign[=] call[name[self]._validate_retina, parameter[name[retina]]]
variable[path_values] assign[=] call[name[dict], parameter[]]
variable[path_part] assign[=] constant[/marker/{marker_name}]
if compare[name[label] is_not constant[None]] begin[:]
variable[label] assign[=] call[name[self]._validate_label, parameter[name[label]]]
call[name[path_values]][constant[label]] assign[=] name[label]
<ast.AugAssign object at 0x7da1b18327d0>
if compare[name[color] is_not constant[None]] begin[:]
variable[color] assign[=] call[name[self]._validate_color, parameter[name[color]]]
call[name[path_values]][constant[color]] assign[=] name[color]
<ast.AugAssign object at 0x7da1b1833e20>
variable[uri] assign[=] call[call[name[URITemplate], parameter[binary_operation[name[self].base_uri + name[path_part]]]].expand, parameter[]]
variable[path_part] assign[=] call[constant[{}.png].format, parameter[name[retina]]]
<ast.AugAssign object at 0x7da1b1832020>
variable[response] assign[=] call[name[self].session.get, parameter[name[uri]]]
call[name[self].handle_http_error, parameter[name[response]]]
return[name[response]] | keyword[def] identifier[marker] ( identifier[self] , identifier[marker_name] = keyword[None] , identifier[label] = keyword[None] ,
identifier[color] = keyword[None] , identifier[retina] = keyword[False] ):
literal[string]
keyword[if] identifier[marker_name] keyword[is] keyword[None] :
keyword[raise] identifier[ValidationError] (
literal[string]
)
identifier[marker_name] = identifier[self] . identifier[_validate_marker_name] ( identifier[marker_name] )
identifier[retina] = identifier[self] . identifier[_validate_retina] ( identifier[retina] )
identifier[path_values] = identifier[dict] (
identifier[marker_name] = identifier[marker_name]
)
identifier[path_part] = literal[string]
keyword[if] identifier[label] keyword[is] keyword[not] keyword[None] :
identifier[label] = identifier[self] . identifier[_validate_label] ( identifier[label] )
identifier[path_values] [ literal[string] ]= identifier[label]
identifier[path_part] += literal[string]
keyword[if] identifier[color] keyword[is] keyword[not] keyword[None] :
identifier[color] = identifier[self] . identifier[_validate_color] ( identifier[color] )
identifier[path_values] [ literal[string] ]= identifier[color]
identifier[path_part] += literal[string]
identifier[uri] = identifier[URITemplate] ( identifier[self] . identifier[base_uri] + identifier[path_part] ). identifier[expand] (** identifier[path_values] )
identifier[path_part] = literal[string] . identifier[format] ( identifier[retina] )
identifier[uri] += identifier[path_part]
identifier[response] = identifier[self] . identifier[session] . identifier[get] ( identifier[uri] )
identifier[self] . identifier[handle_http_error] ( identifier[response] )
keyword[return] identifier[response] | def marker(self, marker_name=None, label=None, color=None, retina=False):
"""Returns a single marker image without any
background map.
Parameters
----------
marker_name : str
The marker's shape and size.
label : str, optional
The marker's alphanumeric label.
Options are a through z, 0 through 99, or the
name of a valid Maki icon.
color : str, optional
The marker's color.
Options are three- or six-digit hexadecimal
color codes.
retina : bool, optional
The marker's scale, where True indicates Retina scale
(double scale) and False indicates regular scale.
The default value is false.
Returns
-------
request.Response
The response object with the specified marker.
"""
# Check for marker_name.
if marker_name is None:
raise ValidationError('marker_name is a required argument') # depends on [control=['if'], data=[]]
# Validate marker_name and retina.
marker_name = self._validate_marker_name(marker_name)
retina = self._validate_retina(retina)
# Create dict and start building URI resource path.
path_values = dict(marker_name=marker_name)
path_part = '/marker/{marker_name}'
# Validate label, update dict,
# and continue building URI resource path.
if label is not None:
label = self._validate_label(label)
path_values['label'] = label
path_part += '-{label}' # depends on [control=['if'], data=['label']]
# Validate color, update dict,
# and continue building URI resource path.
if color is not None:
color = self._validate_color(color)
path_values['color'] = color
path_part += '+{color}' # depends on [control=['if'], data=['color']]
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Finish building URI resource path.
path_part = '{}.png'.format(retina)
uri += path_part
# Send HTTP GET request.
response = self.session.get(uri)
self.handle_http_error(response)
return response |
def from_equation(expr, vars, pars, name=None, hessian=False):
r"""
Create a potential class from an expression for the potential.
.. note::
This utility requires having `Sympy <http://www.sympy.org/>`_ installed.
.. warning::
These potentials are *not* pickle-able and cannot be written
out to YAML files (using `~gala.potential.PotentialBase.save()`)
Parameters
----------
expr : :class:`sympy.core.expr.Expr`, str
Either a ``Sympy`` expression, or a string that can be converted to
a ``Sympy`` expression.
vars : iterable
An iterable of variable names in the expression.
pars : iterable
An iterable of parameter names in the expression.
name : str (optional)
The name of the potential class returned.
hessian : bool (optional)
Generate a function to compute the Hessian.
Returns
-------
CustomPotential : `~gala.potential.PotentialBase`
A potential class that represents the input equation. To instantiate the
potential, use just like a normal class with parameters.
Examples
--------
Here we'll create a potential class for the harmonic oscillator
potential, :math:`\Phi(x) = \frac{1}{2}\,k\,x^2`::
>>> Potential = from_equation("1/2*k*x**2", vars="x", pars="k",
... name='HarmonicOscillator')
>>> p1 = Potential(k=1.)
>>> p1
<HarmonicOscillatorPotential: k=1.00 (dimensionless)>
The potential class (and object) is a fully-fledged subclass of
`~gala.potential.PotentialBase` and therefore has many useful methods.
For example, to integrate an orbit::
>>> orbit = p1.integrate_orbit([1.,0], dt=0.01, n_steps=1000)
"""
try:
import sympy
from sympy.utilities.lambdify import lambdify
except ImportError:
raise ImportError("sympy is required to use 'from_equation()' "
"potential class creation.")
# convert all input to Sympy objects
expr = sympy.sympify(expr)
vars = [sympy.sympify(v) for v in vars]
var_names = [v.name for v in vars]
pars = [sympy.sympify(p) for p in pars]
par_names = [p.name for p in pars]
ndim = len(vars)
# Energy / value
energyfunc = lambdify(vars + pars, expr, dummify=False, modules='numpy')
# Gradient
gradfuncs = []
for var in vars:
gradfuncs.append(lambdify(vars + pars, sympy.diff(expr,var), dummify=False, modules='numpy'))
class CustomPotential(PotentialBase):
def __init__(self, units=None, **kwargs):
for par in par_names:
if par not in kwargs:
raise ValueError("You must specify a value for "
"parameter '{}'.".format(par))
super(CustomPotential,self).__init__(units=units,
parameters=kwargs,
ndim=ndim)
def _energy(self, w, t=0.):
kw = self.parameters.copy()
for k,v in kw.items():
kw[k] = v.value
for i,name in enumerate(var_names):
kw[name] = w[:,i]
return np.array(energyfunc(**kw))
def _gradient(self, w, t=0.):
kw = self.parameters.copy()
for k,v in kw.items():
kw[k] = v.value
for i,name in enumerate(var_names):
kw[name] = w[:,i]
grad = np.vstack([f(**kw)[np.newaxis] for f in gradfuncs])
return grad.T
if name is not None:
# name = _classnamify(name)
if "potential" not in name.lower():
name = name + "Potential"
CustomPotential.__name__ = str(name)
# Hessian
if hessian:
hessfuncs = []
for var1 in vars:
for var2 in vars:
hessfuncs.append(lambdify(vars + pars, sympy.diff(expr,var1,var2),
dummify=False, modules='numpy'))
def _hessian(self, w, t):
kw = self.parameters.copy()
for k,v in kw.items():
kw[k] = v.value
for i,name in enumerate(var_names):
kw[name] = w[:,i]
# expand = [np.newaxis] * w[i].ndim
# This ain't pretty, bub
arrs = []
for f in hessfuncs:
hess_arr = np.array(f(**kw))
if hess_arr.shape != w[:,i].shape:
hess_arr = np.tile(hess_arr, reps=w[:,i].shape)
arrs.append(hess_arr)
hess = np.vstack(arrs)
return hess.reshape((ndim,ndim,len(w[:,i])))
CustomPotential._hessian = _hessian
CustomPotential.save = None
return CustomPotential | def function[from_equation, parameter[expr, vars, pars, name, hessian]]:
constant[
Create a potential class from an expression for the potential.
.. note::
This utility requires having `Sympy <http://www.sympy.org/>`_ installed.
.. warning::
These potentials are *not* pickle-able and cannot be written
out to YAML files (using `~gala.potential.PotentialBase.save()`)
Parameters
----------
expr : :class:`sympy.core.expr.Expr`, str
Either a ``Sympy`` expression, or a string that can be converted to
a ``Sympy`` expression.
vars : iterable
An iterable of variable names in the expression.
pars : iterable
An iterable of parameter names in the expression.
name : str (optional)
The name of the potential class returned.
hessian : bool (optional)
Generate a function to compute the Hessian.
Returns
-------
CustomPotential : `~gala.potential.PotentialBase`
A potential class that represents the input equation. To instantiate the
potential, use just like a normal class with parameters.
Examples
--------
Here we'll create a potential class for the harmonic oscillator
potential, :math:`\Phi(x) = \frac{1}{2}\,k\,x^2`::
>>> Potential = from_equation("1/2*k*x**2", vars="x", pars="k",
... name='HarmonicOscillator')
>>> p1 = Potential(k=1.)
>>> p1
<HarmonicOscillatorPotential: k=1.00 (dimensionless)>
The potential class (and object) is a fully-fledged subclass of
`~gala.potential.PotentialBase` and therefore has many useful methods.
For example, to integrate an orbit::
>>> orbit = p1.integrate_orbit([1.,0], dt=0.01, n_steps=1000)
]
<ast.Try object at 0x7da1b0e2c490>
variable[expr] assign[=] call[name[sympy].sympify, parameter[name[expr]]]
variable[vars] assign[=] <ast.ListComp object at 0x7da1b0e2c850>
variable[var_names] assign[=] <ast.ListComp object at 0x7da1b0e2ca60>
variable[pars] assign[=] <ast.ListComp object at 0x7da1b0e2cc10>
variable[par_names] assign[=] <ast.ListComp object at 0x7da1b0e2ce20>
variable[ndim] assign[=] call[name[len], parameter[name[vars]]]
variable[energyfunc] assign[=] call[name[lambdify], parameter[binary_operation[name[vars] + name[pars]], name[expr]]]
variable[gradfuncs] assign[=] list[[]]
for taget[name[var]] in starred[name[vars]] begin[:]
call[name[gradfuncs].append, parameter[call[name[lambdify], parameter[binary_operation[name[vars] + name[pars]], call[name[sympy].diff, parameter[name[expr], name[var]]]]]]]
class class[CustomPotential, parameter[]] begin[:]
def function[__init__, parameter[self, units]]:
for taget[name[par]] in starred[name[par_names]] begin[:]
if compare[name[par] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
<ast.Raise object at 0x7da1b0e2da20>
call[call[name[super], parameter[name[CustomPotential], name[self]]].__init__, parameter[]]
def function[_energy, parameter[self, w, t]]:
variable[kw] assign[=] call[name[self].parameters.copy, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0e2e0b0>, <ast.Name object at 0x7da1b0e2e0e0>]]] in starred[call[name[kw].items, parameter[]]] begin[:]
call[name[kw]][name[k]] assign[=] name[v].value
for taget[tuple[[<ast.Name object at 0x7da1b0e2e320>, <ast.Name object at 0x7da1b0e2e350>]]] in starred[call[name[enumerate], parameter[name[var_names]]]] begin[:]
call[name[kw]][name[name]] assign[=] call[name[w]][tuple[[<ast.Slice object at 0x7da1b0e2e560>, <ast.Name object at 0x7da1b0e2e590>]]]
return[call[name[np].array, parameter[call[name[energyfunc], parameter[]]]]]
def function[_gradient, parameter[self, w, t]]:
variable[kw] assign[=] call[name[self].parameters.copy, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0e2ea40>, <ast.Name object at 0x7da1b0e2ea70>]]] in starred[call[name[kw].items, parameter[]]] begin[:]
call[name[kw]][name[k]] assign[=] name[v].value
for taget[tuple[[<ast.Name object at 0x7da1b0e2ecb0>, <ast.Name object at 0x7da1b0e2ece0>]]] in starred[call[name[enumerate], parameter[name[var_names]]]] begin[:]
call[name[kw]][name[name]] assign[=] call[name[w]][tuple[[<ast.Slice object at 0x7da1b0e2eef0>, <ast.Name object at 0x7da1b0e2ef20>]]]
variable[grad] assign[=] call[name[np].vstack, parameter[<ast.ListComp object at 0x7da1b0e2c190>]]
return[name[grad].T]
if compare[name[name] is_not constant[None]] begin[:]
if compare[constant[potential] <ast.NotIn object at 0x7da2590d7190> call[name[name].lower, parameter[]]] begin[:]
variable[name] assign[=] binary_operation[name[name] + constant[Potential]]
name[CustomPotential].__name__ assign[=] call[name[str], parameter[name[name]]]
if name[hessian] begin[:]
variable[hessfuncs] assign[=] list[[]]
for taget[name[var1]] in starred[name[vars]] begin[:]
for taget[name[var2]] in starred[name[vars]] begin[:]
call[name[hessfuncs].append, parameter[call[name[lambdify], parameter[binary_operation[name[vars] + name[pars]], call[name[sympy].diff, parameter[name[expr], name[var1], name[var2]]]]]]]
def function[_hessian, parameter[self, w, t]]:
variable[kw] assign[=] call[name[self].parameters.copy, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0e4f220>, <ast.Name object at 0x7da1b0e4f8e0>]]] in starred[call[name[kw].items, parameter[]]] begin[:]
call[name[kw]][name[k]] assign[=] name[v].value
for taget[tuple[[<ast.Name object at 0x7da1b0e4f9d0>, <ast.Name object at 0x7da1b0e4fca0>]]] in starred[call[name[enumerate], parameter[name[var_names]]]] begin[:]
call[name[kw]][name[name]] assign[=] call[name[w]][tuple[[<ast.Slice object at 0x7da1b0e59390>, <ast.Name object at 0x7da1b0e59cf0>]]]
variable[arrs] assign[=] list[[]]
for taget[name[f]] in starred[name[hessfuncs]] begin[:]
variable[hess_arr] assign[=] call[name[np].array, parameter[call[name[f], parameter[]]]]
if compare[name[hess_arr].shape not_equal[!=] call[name[w]][tuple[[<ast.Slice object at 0x7da1b0e58f10>, <ast.Name object at 0x7da1b0e598d0>]]].shape] begin[:]
variable[hess_arr] assign[=] call[name[np].tile, parameter[name[hess_arr]]]
call[name[arrs].append, parameter[name[hess_arr]]]
variable[hess] assign[=] call[name[np].vstack, parameter[name[arrs]]]
return[call[name[hess].reshape, parameter[tuple[[<ast.Name object at 0x7da1b0e59930>, <ast.Name object at 0x7da1b0e59b10>, <ast.Call object at 0x7da1b0e599f0>]]]]]
name[CustomPotential]._hessian assign[=] name[_hessian]
name[CustomPotential].save assign[=] constant[None]
return[name[CustomPotential]] | keyword[def] identifier[from_equation] ( identifier[expr] , identifier[vars] , identifier[pars] , identifier[name] = keyword[None] , identifier[hessian] = keyword[False] ):
literal[string]
keyword[try] :
keyword[import] identifier[sympy]
keyword[from] identifier[sympy] . identifier[utilities] . identifier[lambdify] keyword[import] identifier[lambdify]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[ImportError] ( literal[string]
literal[string] )
identifier[expr] = identifier[sympy] . identifier[sympify] ( identifier[expr] )
identifier[vars] =[ identifier[sympy] . identifier[sympify] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[vars] ]
identifier[var_names] =[ identifier[v] . identifier[name] keyword[for] identifier[v] keyword[in] identifier[vars] ]
identifier[pars] =[ identifier[sympy] . identifier[sympify] ( identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[pars] ]
identifier[par_names] =[ identifier[p] . identifier[name] keyword[for] identifier[p] keyword[in] identifier[pars] ]
identifier[ndim] = identifier[len] ( identifier[vars] )
identifier[energyfunc] = identifier[lambdify] ( identifier[vars] + identifier[pars] , identifier[expr] , identifier[dummify] = keyword[False] , identifier[modules] = literal[string] )
identifier[gradfuncs] =[]
keyword[for] identifier[var] keyword[in] identifier[vars] :
identifier[gradfuncs] . identifier[append] ( identifier[lambdify] ( identifier[vars] + identifier[pars] , identifier[sympy] . identifier[diff] ( identifier[expr] , identifier[var] ), identifier[dummify] = keyword[False] , identifier[modules] = literal[string] ))
keyword[class] identifier[CustomPotential] ( identifier[PotentialBase] ):
keyword[def] identifier[__init__] ( identifier[self] , identifier[units] = keyword[None] ,** identifier[kwargs] ):
keyword[for] identifier[par] keyword[in] identifier[par_names] :
keyword[if] identifier[par] keyword[not] keyword[in] identifier[kwargs] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[par] ))
identifier[super] ( identifier[CustomPotential] , identifier[self] ). identifier[__init__] ( identifier[units] = identifier[units] ,
identifier[parameters] = identifier[kwargs] ,
identifier[ndim] = identifier[ndim] )
keyword[def] identifier[_energy] ( identifier[self] , identifier[w] , identifier[t] = literal[int] ):
identifier[kw] = identifier[self] . identifier[parameters] . identifier[copy] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kw] . identifier[items] ():
identifier[kw] [ identifier[k] ]= identifier[v] . identifier[value]
keyword[for] identifier[i] , identifier[name] keyword[in] identifier[enumerate] ( identifier[var_names] ):
identifier[kw] [ identifier[name] ]= identifier[w] [:, identifier[i] ]
keyword[return] identifier[np] . identifier[array] ( identifier[energyfunc] (** identifier[kw] ))
keyword[def] identifier[_gradient] ( identifier[self] , identifier[w] , identifier[t] = literal[int] ):
identifier[kw] = identifier[self] . identifier[parameters] . identifier[copy] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kw] . identifier[items] ():
identifier[kw] [ identifier[k] ]= identifier[v] . identifier[value]
keyword[for] identifier[i] , identifier[name] keyword[in] identifier[enumerate] ( identifier[var_names] ):
identifier[kw] [ identifier[name] ]= identifier[w] [:, identifier[i] ]
identifier[grad] = identifier[np] . identifier[vstack] ([ identifier[f] (** identifier[kw] )[ identifier[np] . identifier[newaxis] ] keyword[for] identifier[f] keyword[in] identifier[gradfuncs] ])
keyword[return] identifier[grad] . identifier[T]
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[name] . identifier[lower] ():
identifier[name] = identifier[name] + literal[string]
identifier[CustomPotential] . identifier[__name__] = identifier[str] ( identifier[name] )
keyword[if] identifier[hessian] :
identifier[hessfuncs] =[]
keyword[for] identifier[var1] keyword[in] identifier[vars] :
keyword[for] identifier[var2] keyword[in] identifier[vars] :
identifier[hessfuncs] . identifier[append] ( identifier[lambdify] ( identifier[vars] + identifier[pars] , identifier[sympy] . identifier[diff] ( identifier[expr] , identifier[var1] , identifier[var2] ),
identifier[dummify] = keyword[False] , identifier[modules] = literal[string] ))
keyword[def] identifier[_hessian] ( identifier[self] , identifier[w] , identifier[t] ):
identifier[kw] = identifier[self] . identifier[parameters] . identifier[copy] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kw] . identifier[items] ():
identifier[kw] [ identifier[k] ]= identifier[v] . identifier[value]
keyword[for] identifier[i] , identifier[name] keyword[in] identifier[enumerate] ( identifier[var_names] ):
identifier[kw] [ identifier[name] ]= identifier[w] [:, identifier[i] ]
identifier[arrs] =[]
keyword[for] identifier[f] keyword[in] identifier[hessfuncs] :
identifier[hess_arr] = identifier[np] . identifier[array] ( identifier[f] (** identifier[kw] ))
keyword[if] identifier[hess_arr] . identifier[shape] != identifier[w] [:, identifier[i] ]. identifier[shape] :
identifier[hess_arr] = identifier[np] . identifier[tile] ( identifier[hess_arr] , identifier[reps] = identifier[w] [:, identifier[i] ]. identifier[shape] )
identifier[arrs] . identifier[append] ( identifier[hess_arr] )
identifier[hess] = identifier[np] . identifier[vstack] ( identifier[arrs] )
keyword[return] identifier[hess] . identifier[reshape] (( identifier[ndim] , identifier[ndim] , identifier[len] ( identifier[w] [:, identifier[i] ])))
identifier[CustomPotential] . identifier[_hessian] = identifier[_hessian]
identifier[CustomPotential] . identifier[save] = keyword[None]
keyword[return] identifier[CustomPotential] | def from_equation(expr, vars, pars, name=None, hessian=False):
"""
Create a potential class from an expression for the potential.
.. note::
This utility requires having `Sympy <http://www.sympy.org/>`_ installed.
.. warning::
These potentials are *not* pickle-able and cannot be written
out to YAML files (using `~gala.potential.PotentialBase.save()`)
Parameters
----------
expr : :class:`sympy.core.expr.Expr`, str
Either a ``Sympy`` expression, or a string that can be converted to
a ``Sympy`` expression.
vars : iterable
An iterable of variable names in the expression.
pars : iterable
An iterable of parameter names in the expression.
name : str (optional)
The name of the potential class returned.
hessian : bool (optional)
Generate a function to compute the Hessian.
Returns
-------
CustomPotential : `~gala.potential.PotentialBase`
A potential class that represents the input equation. To instantiate the
potential, use just like a normal class with parameters.
Examples
--------
Here we'll create a potential class for the harmonic oscillator
potential, :math:`\\Phi(x) = \\frac{1}{2}\\,k\\,x^2`::
>>> Potential = from_equation("1/2*k*x**2", vars="x", pars="k",
... name='HarmonicOscillator')
>>> p1 = Potential(k=1.)
>>> p1
<HarmonicOscillatorPotential: k=1.00 (dimensionless)>
The potential class (and object) is a fully-fledged subclass of
`~gala.potential.PotentialBase` and therefore has many useful methods.
For example, to integrate an orbit::
>>> orbit = p1.integrate_orbit([1.,0], dt=0.01, n_steps=1000)
"""
try:
import sympy
from sympy.utilities.lambdify import lambdify # depends on [control=['try'], data=[]]
except ImportError:
raise ImportError("sympy is required to use 'from_equation()' potential class creation.") # depends on [control=['except'], data=[]]
# convert all input to Sympy objects
expr = sympy.sympify(expr)
vars = [sympy.sympify(v) for v in vars]
var_names = [v.name for v in vars]
pars = [sympy.sympify(p) for p in pars]
par_names = [p.name for p in pars]
ndim = len(vars)
# Energy / value
energyfunc = lambdify(vars + pars, expr, dummify=False, modules='numpy')
# Gradient
gradfuncs = []
for var in vars:
gradfuncs.append(lambdify(vars + pars, sympy.diff(expr, var), dummify=False, modules='numpy')) # depends on [control=['for'], data=['var']]
class CustomPotential(PotentialBase):
def __init__(self, units=None, **kwargs):
for par in par_names:
if par not in kwargs:
raise ValueError("You must specify a value for parameter '{}'.".format(par)) # depends on [control=['if'], data=['par']] # depends on [control=['for'], data=['par']]
super(CustomPotential, self).__init__(units=units, parameters=kwargs, ndim=ndim)
def _energy(self, w, t=0.0):
kw = self.parameters.copy()
for (k, v) in kw.items():
kw[k] = v.value # depends on [control=['for'], data=[]]
for (i, name) in enumerate(var_names):
kw[name] = w[:, i] # depends on [control=['for'], data=[]]
return np.array(energyfunc(**kw))
def _gradient(self, w, t=0.0):
kw = self.parameters.copy()
for (k, v) in kw.items():
kw[k] = v.value # depends on [control=['for'], data=[]]
for (i, name) in enumerate(var_names):
kw[name] = w[:, i] # depends on [control=['for'], data=[]]
grad = np.vstack([f(**kw)[np.newaxis] for f in gradfuncs])
return grad.T
if name is not None:
# name = _classnamify(name)
if 'potential' not in name.lower():
name = name + 'Potential' # depends on [control=['if'], data=[]]
CustomPotential.__name__ = str(name) # depends on [control=['if'], data=['name']]
# Hessian
if hessian:
hessfuncs = []
for var1 in vars:
for var2 in vars:
hessfuncs.append(lambdify(vars + pars, sympy.diff(expr, var1, var2), dummify=False, modules='numpy')) # depends on [control=['for'], data=['var2']] # depends on [control=['for'], data=['var1']]
def _hessian(self, w, t):
kw = self.parameters.copy()
for (k, v) in kw.items():
kw[k] = v.value # depends on [control=['for'], data=[]]
for (i, name) in enumerate(var_names):
kw[name] = w[:, i] # depends on [control=['for'], data=[]]
# expand = [np.newaxis] * w[i].ndim
# This ain't pretty, bub
arrs = []
for f in hessfuncs:
hess_arr = np.array(f(**kw))
if hess_arr.shape != w[:, i].shape:
hess_arr = np.tile(hess_arr, reps=w[:, i].shape) # depends on [control=['if'], data=[]]
arrs.append(hess_arr) # depends on [control=['for'], data=['f']]
hess = np.vstack(arrs)
return hess.reshape((ndim, ndim, len(w[:, i])))
CustomPotential._hessian = _hessian # depends on [control=['if'], data=[]]
CustomPotential.save = None
return CustomPotential |
def parse_singular_string(t, tag_name):
'''Parses the sole string value with name tag_name in tag t. Heavy-handed with the asserts.'''
pos = t.getElementsByTagName(tag_name)
assert(len(pos) == 1)
pos = pos[0]
assert(len(pos.childNodes) == 1)
return pos.childNodes[0].data | def function[parse_singular_string, parameter[t, tag_name]]:
constant[Parses the sole string value with name tag_name in tag t. Heavy-handed with the asserts.]
variable[pos] assign[=] call[name[t].getElementsByTagName, parameter[name[tag_name]]]
assert[compare[call[name[len], parameter[name[pos]]] equal[==] constant[1]]]
variable[pos] assign[=] call[name[pos]][constant[0]]
assert[compare[call[name[len], parameter[name[pos].childNodes]] equal[==] constant[1]]]
return[call[name[pos].childNodes][constant[0]].data] | keyword[def] identifier[parse_singular_string] ( identifier[t] , identifier[tag_name] ):
literal[string]
identifier[pos] = identifier[t] . identifier[getElementsByTagName] ( identifier[tag_name] )
keyword[assert] ( identifier[len] ( identifier[pos] )== literal[int] )
identifier[pos] = identifier[pos] [ literal[int] ]
keyword[assert] ( identifier[len] ( identifier[pos] . identifier[childNodes] )== literal[int] )
keyword[return] identifier[pos] . identifier[childNodes] [ literal[int] ]. identifier[data] | def parse_singular_string(t, tag_name):
"""Parses the sole string value with name tag_name in tag t. Heavy-handed with the asserts."""
pos = t.getElementsByTagName(tag_name)
assert len(pos) == 1
pos = pos[0]
assert len(pos.childNodes) == 1
return pos.childNodes[0].data |
def validate(self, expected_type, is_array, val):
"""
Validates that the expected type matches the value
Returns two element tuple: (bool, string)
- `bool` - True if valid, False if not
- `string` - Description of validation error, or None if valid
:Parameters:
expected_type
string name of the type expected. This may be a Barrister primitive, or a user defined type.
is_array
If True then require that the val be a list
val
Value to validate against the expected type
"""
if val == None:
if expected_type.optional:
return True, None
else:
return False, "Value cannot be null"
elif is_array:
if not isinstance(val, list):
return self._type_err(val, "list")
else:
for v in val:
ok, msg = self.validate(expected_type, False, v)
if not ok:
return ok, msg
elif expected_type.type == "int":
if not isinstance(val, (long, int)):
return self._type_err(val, "int")
elif expected_type.type == "float":
if not isinstance(val, (float, int, long)):
return self._type_err(val, "float")
elif expected_type.type == "bool":
if not isinstance(val, bool):
return self._type_err(val, "bool")
elif expected_type.type == "string":
if not isinstance(val, (str, unicode)):
return self._type_err(val, "string")
else:
return self.get(expected_type.type).validate(val)
return True, None | def function[validate, parameter[self, expected_type, is_array, val]]:
constant[
Validates that the expected type matches the value
Returns two element tuple: (bool, string)
- `bool` - True if valid, False if not
- `string` - Description of validation error, or None if valid
:Parameters:
expected_type
string name of the type expected. This may be a Barrister primitive, or a user defined type.
is_array
If True then require that the val be a list
val
Value to validate against the expected type
]
if compare[name[val] equal[==] constant[None]] begin[:]
if name[expected_type].optional begin[:]
return[tuple[[<ast.Constant object at 0x7da1b2298820>, <ast.Constant object at 0x7da1b22982e0>]]]
return[tuple[[<ast.Constant object at 0x7da2044c2a70>, <ast.Constant object at 0x7da2044c22c0>]]] | keyword[def] identifier[validate] ( identifier[self] , identifier[expected_type] , identifier[is_array] , identifier[val] ):
literal[string]
keyword[if] identifier[val] == keyword[None] :
keyword[if] identifier[expected_type] . identifier[optional] :
keyword[return] keyword[True] , keyword[None]
keyword[else] :
keyword[return] keyword[False] , literal[string]
keyword[elif] identifier[is_array] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[list] ):
keyword[return] identifier[self] . identifier[_type_err] ( identifier[val] , literal[string] )
keyword[else] :
keyword[for] identifier[v] keyword[in] identifier[val] :
identifier[ok] , identifier[msg] = identifier[self] . identifier[validate] ( identifier[expected_type] , keyword[False] , identifier[v] )
keyword[if] keyword[not] identifier[ok] :
keyword[return] identifier[ok] , identifier[msg]
keyword[elif] identifier[expected_type] . identifier[type] == literal[string] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] ,( identifier[long] , identifier[int] )):
keyword[return] identifier[self] . identifier[_type_err] ( identifier[val] , literal[string] )
keyword[elif] identifier[expected_type] . identifier[type] == literal[string] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] ,( identifier[float] , identifier[int] , identifier[long] )):
keyword[return] identifier[self] . identifier[_type_err] ( identifier[val] , literal[string] )
keyword[elif] identifier[expected_type] . identifier[type] == literal[string] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[bool] ):
keyword[return] identifier[self] . identifier[_type_err] ( identifier[val] , literal[string] )
keyword[elif] identifier[expected_type] . identifier[type] == literal[string] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] ,( identifier[str] , identifier[unicode] )):
keyword[return] identifier[self] . identifier[_type_err] ( identifier[val] , literal[string] )
keyword[else] :
keyword[return] identifier[self] . identifier[get] ( identifier[expected_type] . identifier[type] ). identifier[validate] ( identifier[val] )
keyword[return] keyword[True] , keyword[None] | def validate(self, expected_type, is_array, val):
"""
Validates that the expected type matches the value
Returns two element tuple: (bool, string)
- `bool` - True if valid, False if not
- `string` - Description of validation error, or None if valid
:Parameters:
expected_type
string name of the type expected. This may be a Barrister primitive, or a user defined type.
is_array
If True then require that the val be a list
val
Value to validate against the expected type
"""
if val == None:
if expected_type.optional:
return (True, None) # depends on [control=['if'], data=[]]
else:
return (False, 'Value cannot be null') # depends on [control=['if'], data=[]]
elif is_array:
if not isinstance(val, list):
return self._type_err(val, 'list') # depends on [control=['if'], data=[]]
else:
for v in val:
(ok, msg) = self.validate(expected_type, False, v)
if not ok:
return (ok, msg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]]
elif expected_type.type == 'int':
if not isinstance(val, (long, int)):
return self._type_err(val, 'int') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif expected_type.type == 'float':
if not isinstance(val, (float, int, long)):
return self._type_err(val, 'float') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif expected_type.type == 'bool':
if not isinstance(val, bool):
return self._type_err(val, 'bool') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif expected_type.type == 'string':
if not isinstance(val, (str, unicode)):
return self._type_err(val, 'string') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
return self.get(expected_type.type).validate(val)
return (True, None) |
def mousePressEvent(self, event):
"""Launches edit of cell if first column clicked, otherwise passes to super class"""
index = self.indexAt(event.pos())
if index.isValid():
if index.column() == 0:
self.edit(index, QtGui.QAbstractItemView.DoubleClicked, event)
else:
super(ProtocolView, self).mousePressEvent(event) | def function[mousePressEvent, parameter[self, event]]:
constant[Launches edit of cell if first column clicked, otherwise passes to super class]
variable[index] assign[=] call[name[self].indexAt, parameter[call[name[event].pos, parameter[]]]]
if call[name[index].isValid, parameter[]] begin[:]
if compare[call[name[index].column, parameter[]] equal[==] constant[0]] begin[:]
call[name[self].edit, parameter[name[index], name[QtGui].QAbstractItemView.DoubleClicked, name[event]]] | keyword[def] identifier[mousePressEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[index] = identifier[self] . identifier[indexAt] ( identifier[event] . identifier[pos] ())
keyword[if] identifier[index] . identifier[isValid] ():
keyword[if] identifier[index] . identifier[column] ()== literal[int] :
identifier[self] . identifier[edit] ( identifier[index] , identifier[QtGui] . identifier[QAbstractItemView] . identifier[DoubleClicked] , identifier[event] )
keyword[else] :
identifier[super] ( identifier[ProtocolView] , identifier[self] ). identifier[mousePressEvent] ( identifier[event] ) | def mousePressEvent(self, event):
"""Launches edit of cell if first column clicked, otherwise passes to super class"""
index = self.indexAt(event.pos())
if index.isValid():
if index.column() == 0:
self.edit(index, QtGui.QAbstractItemView.DoubleClicked, event) # depends on [control=['if'], data=[]]
else:
super(ProtocolView, self).mousePressEvent(event) # depends on [control=['if'], data=[]] |
def flatten_list(x: List[Any]) -> List[Any]:
"""
Converts a list of lists into a flat list.
Args:
x: list of lists
Returns:
flat list
As per
http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python
""" # noqa
return [item for sublist in x for item in sublist] | def function[flatten_list, parameter[x]]:
constant[
Converts a list of lists into a flat list.
Args:
x: list of lists
Returns:
flat list
As per
http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python
]
return[<ast.ListComp object at 0x7da1b18348b0>] | keyword[def] identifier[flatten_list] ( identifier[x] : identifier[List] [ identifier[Any] ])-> identifier[List] [ identifier[Any] ]:
literal[string]
keyword[return] [ identifier[item] keyword[for] identifier[sublist] keyword[in] identifier[x] keyword[for] identifier[item] keyword[in] identifier[sublist] ] | def flatten_list(x: List[Any]) -> List[Any]:
"""
Converts a list of lists into a flat list.
Args:
x: list of lists
Returns:
flat list
As per
http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python
""" # noqa
return [item for sublist in x for item in sublist] |
def drawBernoulli(N,p=0.5,seed=0):
'''
Generates arrays of booleans drawn from a simple Bernoulli distribution.
The input p can be a float or a list-like of floats; its length T determines
the number of entries in the output. The t-th entry of the output is an
array of N booleans which are True with probability p[t] and False otherwise.
Arguments
---------
N : int
Number of draws in each row.
p : float or [float]
Probability or probabilities of the event occurring (True).
seed : int
Seed for random number generator.
Returns
-------
draws : np.array or [np.array]
T-length list of arrays of Bernoulli draws each of size N, or a single
array of size N (if sigma is a scalar).
'''
# Set up the RNG
RNG = np.random.RandomState(seed)
if isinstance(p,float):# Return a single array of size N
draws = RNG.uniform(size=N) < p
else: # Set up empty list to populate, then loop and populate list with draws:
draws=[]
for t in range(len(p)):
draws.append(RNG.uniform(size=N) < p[t])
return draws | def function[drawBernoulli, parameter[N, p, seed]]:
constant[
Generates arrays of booleans drawn from a simple Bernoulli distribution.
The input p can be a float or a list-like of floats; its length T determines
the number of entries in the output. The t-th entry of the output is an
array of N booleans which are True with probability p[t] and False otherwise.
Arguments
---------
N : int
Number of draws in each row.
p : float or [float]
Probability or probabilities of the event occurring (True).
seed : int
Seed for random number generator.
Returns
-------
draws : np.array or [np.array]
T-length list of arrays of Bernoulli draws each of size N, or a single
array of size N (if sigma is a scalar).
]
variable[RNG] assign[=] call[name[np].random.RandomState, parameter[name[seed]]]
if call[name[isinstance], parameter[name[p], name[float]]] begin[:]
variable[draws] assign[=] compare[call[name[RNG].uniform, parameter[]] less[<] name[p]]
return[name[draws]] | keyword[def] identifier[drawBernoulli] ( identifier[N] , identifier[p] = literal[int] , identifier[seed] = literal[int] ):
literal[string]
identifier[RNG] = identifier[np] . identifier[random] . identifier[RandomState] ( identifier[seed] )
keyword[if] identifier[isinstance] ( identifier[p] , identifier[float] ):
identifier[draws] = identifier[RNG] . identifier[uniform] ( identifier[size] = identifier[N] )< identifier[p]
keyword[else] :
identifier[draws] =[]
keyword[for] identifier[t] keyword[in] identifier[range] ( identifier[len] ( identifier[p] )):
identifier[draws] . identifier[append] ( identifier[RNG] . identifier[uniform] ( identifier[size] = identifier[N] )< identifier[p] [ identifier[t] ])
keyword[return] identifier[draws] | def drawBernoulli(N, p=0.5, seed=0):
"""
Generates arrays of booleans drawn from a simple Bernoulli distribution.
The input p can be a float or a list-like of floats; its length T determines
the number of entries in the output. The t-th entry of the output is an
array of N booleans which are True with probability p[t] and False otherwise.
Arguments
---------
N : int
Number of draws in each row.
p : float or [float]
Probability or probabilities of the event occurring (True).
seed : int
Seed for random number generator.
Returns
-------
draws : np.array or [np.array]
T-length list of arrays of Bernoulli draws each of size N, or a single
array of size N (if sigma is a scalar).
"""
# Set up the RNG
RNG = np.random.RandomState(seed)
if isinstance(p, float): # Return a single array of size N
draws = RNG.uniform(size=N) < p # depends on [control=['if'], data=[]]
else: # Set up empty list to populate, then loop and populate list with draws:
draws = []
for t in range(len(p)):
draws.append(RNG.uniform(size=N) < p[t]) # depends on [control=['for'], data=['t']]
return draws |
def cli(env):
"""List routing types."""
mgr = SoftLayer.LoadBalancerManager(env.client)
routing_types = mgr.get_routing_types()
table = formatting.KeyValueTable(['ID', 'Name'])
table.align['ID'] = 'l'
table.align['Name'] = 'l'
table.sortby = 'ID'
for routing_type in routing_types:
table.add_row([routing_type['id'], routing_type['name']])
env.fout(table) | def function[cli, parameter[env]]:
constant[List routing types.]
variable[mgr] assign[=] call[name[SoftLayer].LoadBalancerManager, parameter[name[env].client]]
variable[routing_types] assign[=] call[name[mgr].get_routing_types, parameter[]]
variable[table] assign[=] call[name[formatting].KeyValueTable, parameter[list[[<ast.Constant object at 0x7da20c7ca200>, <ast.Constant object at 0x7da20c7ca350>]]]]
call[name[table].align][constant[ID]] assign[=] constant[l]
call[name[table].align][constant[Name]] assign[=] constant[l]
name[table].sortby assign[=] constant[ID]
for taget[name[routing_type]] in starred[name[routing_types]] begin[:]
call[name[table].add_row, parameter[list[[<ast.Subscript object at 0x7da18f58e9b0>, <ast.Subscript object at 0x7da18f58e1a0>]]]]
call[name[env].fout, parameter[name[table]]] | keyword[def] identifier[cli] ( identifier[env] ):
literal[string]
identifier[mgr] = identifier[SoftLayer] . identifier[LoadBalancerManager] ( identifier[env] . identifier[client] )
identifier[routing_types] = identifier[mgr] . identifier[get_routing_types] ()
identifier[table] = identifier[formatting] . identifier[KeyValueTable] ([ literal[string] , literal[string] ])
identifier[table] . identifier[align] [ literal[string] ]= literal[string]
identifier[table] . identifier[align] [ literal[string] ]= literal[string]
identifier[table] . identifier[sortby] = literal[string]
keyword[for] identifier[routing_type] keyword[in] identifier[routing_types] :
identifier[table] . identifier[add_row] ([ identifier[routing_type] [ literal[string] ], identifier[routing_type] [ literal[string] ]])
identifier[env] . identifier[fout] ( identifier[table] ) | def cli(env):
"""List routing types."""
mgr = SoftLayer.LoadBalancerManager(env.client)
routing_types = mgr.get_routing_types()
table = formatting.KeyValueTable(['ID', 'Name'])
table.align['ID'] = 'l'
table.align['Name'] = 'l'
table.sortby = 'ID'
for routing_type in routing_types:
table.add_row([routing_type['id'], routing_type['name']]) # depends on [control=['for'], data=['routing_type']]
env.fout(table) |
def is_commentable(obj_or_class):
"""
:param obj_or_class: a class or instance
"""
if isinstance(obj_or_class, type):
return issubclass(obj_or_class, Commentable)
if not isinstance(obj_or_class, Commentable):
return False
if obj_or_class.id is None:
return False
return True | def function[is_commentable, parameter[obj_or_class]]:
constant[
:param obj_or_class: a class or instance
]
if call[name[isinstance], parameter[name[obj_or_class], name[type]]] begin[:]
return[call[name[issubclass], parameter[name[obj_or_class], name[Commentable]]]]
if <ast.UnaryOp object at 0x7da20c6c7880> begin[:]
return[constant[False]]
if compare[name[obj_or_class].id is constant[None]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[is_commentable] ( identifier[obj_or_class] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj_or_class] , identifier[type] ):
keyword[return] identifier[issubclass] ( identifier[obj_or_class] , identifier[Commentable] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[obj_or_class] , identifier[Commentable] ):
keyword[return] keyword[False]
keyword[if] identifier[obj_or_class] . identifier[id] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_commentable(obj_or_class):
"""
:param obj_or_class: a class or instance
"""
if isinstance(obj_or_class, type):
return issubclass(obj_or_class, Commentable) # depends on [control=['if'], data=[]]
if not isinstance(obj_or_class, Commentable):
return False # depends on [control=['if'], data=[]]
if obj_or_class.id is None:
return False # depends on [control=['if'], data=[]]
return True |
def run(self, scheduler_schedule_id, **kwargs):
"""
Deactivates the schedule specified by the ID `scheduler_schedule_id` in
the scheduler service.
Arguments:
scheduler_schedule_id {str} -- The ID of the schedule to deactivate
"""
log = self.get_logger(**kwargs)
self.scheduler.update_schedule(scheduler_schedule_id, {"active": False})
log.info(
"Deactivated schedule %s in the scheduler service", scheduler_schedule_id
) | def function[run, parameter[self, scheduler_schedule_id]]:
constant[
Deactivates the schedule specified by the ID `scheduler_schedule_id` in
the scheduler service.
Arguments:
scheduler_schedule_id {str} -- The ID of the schedule to deactivate
]
variable[log] assign[=] call[name[self].get_logger, parameter[]]
call[name[self].scheduler.update_schedule, parameter[name[scheduler_schedule_id], dictionary[[<ast.Constant object at 0x7da18f00efb0>], [<ast.Constant object at 0x7da18f00c790>]]]]
call[name[log].info, parameter[constant[Deactivated schedule %s in the scheduler service], name[scheduler_schedule_id]]] | keyword[def] identifier[run] ( identifier[self] , identifier[scheduler_schedule_id] ,** identifier[kwargs] ):
literal[string]
identifier[log] = identifier[self] . identifier[get_logger] (** identifier[kwargs] )
identifier[self] . identifier[scheduler] . identifier[update_schedule] ( identifier[scheduler_schedule_id] ,{ literal[string] : keyword[False] })
identifier[log] . identifier[info] (
literal[string] , identifier[scheduler_schedule_id]
) | def run(self, scheduler_schedule_id, **kwargs):
"""
Deactivates the schedule specified by the ID `scheduler_schedule_id` in
the scheduler service.
Arguments:
scheduler_schedule_id {str} -- The ID of the schedule to deactivate
"""
log = self.get_logger(**kwargs)
self.scheduler.update_schedule(scheduler_schedule_id, {'active': False})
log.info('Deactivated schedule %s in the scheduler service', scheduler_schedule_id) |
def channels_close(self, room_id, **kwargs):
"""Removes the channel from the user’s list of channels."""
return self.__call_api_post('channels.close', roomId=room_id, kwargs=kwargs) | def function[channels_close, parameter[self, room_id]]:
constant[Removes the channel from the user’s list of channels.]
return[call[name[self].__call_api_post, parameter[constant[channels.close]]]] | keyword[def] identifier[channels_close] ( identifier[self] , identifier[room_id] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[__call_api_post] ( literal[string] , identifier[roomId] = identifier[room_id] , identifier[kwargs] = identifier[kwargs] ) | def channels_close(self, room_id, **kwargs):
"""Removes the channel from the user’s list of channels."""
return self.__call_api_post('channels.close', roomId=room_id, kwargs=kwargs) |
def reset(self):
"""
Stop timer and execute ``on_reset`` if overflow occured.
"""
if self.state is not TimerState.stopped:
if self.on_reset and self.state is TimerState.overflow:
if callable(self.on_reset):
self.on_reset()
else:
execute(self.on_reset)
self.state = TimerState.stopped | def function[reset, parameter[self]]:
constant[
Stop timer and execute ``on_reset`` if overflow occured.
]
if compare[name[self].state is_not name[TimerState].stopped] begin[:]
if <ast.BoolOp object at 0x7da1b07947c0> begin[:]
if call[name[callable], parameter[name[self].on_reset]] begin[:]
call[name[self].on_reset, parameter[]]
name[self].state assign[=] name[TimerState].stopped | keyword[def] identifier[reset] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[state] keyword[is] keyword[not] identifier[TimerState] . identifier[stopped] :
keyword[if] identifier[self] . identifier[on_reset] keyword[and] identifier[self] . identifier[state] keyword[is] identifier[TimerState] . identifier[overflow] :
keyword[if] identifier[callable] ( identifier[self] . identifier[on_reset] ):
identifier[self] . identifier[on_reset] ()
keyword[else] :
identifier[execute] ( identifier[self] . identifier[on_reset] )
identifier[self] . identifier[state] = identifier[TimerState] . identifier[stopped] | def reset(self):
"""
Stop timer and execute ``on_reset`` if overflow occured.
"""
if self.state is not TimerState.stopped:
if self.on_reset and self.state is TimerState.overflow:
if callable(self.on_reset):
self.on_reset() # depends on [control=['if'], data=[]]
else:
execute(self.on_reset) # depends on [control=['if'], data=[]]
self.state = TimerState.stopped # depends on [control=['if'], data=[]] |
def encode(self, pdu):
"""encode the contents of the NPCI into the PDU."""
if _debug: NPCI._debug("encode %s", repr(pdu))
PCI.update(pdu, self)
# only version 1 messages supported
pdu.put(self.npduVersion)
# build the flags
if self.npduNetMessage is not None:
netLayerMessage = 0x80
else:
netLayerMessage = 0x00
# map the destination address
dnetPresent = 0x00
if self.npduDADR is not None:
dnetPresent = 0x20
# map the source address
snetPresent = 0x00
if self.npduSADR is not None:
snetPresent = 0x08
# encode the control octet
control = netLayerMessage | dnetPresent | snetPresent
if self.pduExpectingReply:
control |= 0x04
control |= (self.pduNetworkPriority & 0x03)
self.npduControl = control
pdu.put(control)
# make sure expecting reply and priority get passed down
pdu.pduExpectingReply = self.pduExpectingReply
pdu.pduNetworkPriority = self.pduNetworkPriority
# encode the destination address
if dnetPresent:
if self.npduDADR.addrType == Address.remoteStationAddr:
pdu.put_short(self.npduDADR.addrNet)
pdu.put(self.npduDADR.addrLen)
pdu.put_data(self.npduDADR.addrAddr)
elif self.npduDADR.addrType == Address.remoteBroadcastAddr:
pdu.put_short(self.npduDADR.addrNet)
pdu.put(0)
elif self.npduDADR.addrType == Address.globalBroadcastAddr:
pdu.put_short(0xFFFF)
pdu.put(0)
# encode the source address
if snetPresent:
pdu.put_short(self.npduSADR.addrNet)
pdu.put(self.npduSADR.addrLen)
pdu.put_data(self.npduSADR.addrAddr)
# put the hop count
if dnetPresent:
pdu.put(self.npduHopCount)
# put the network layer message type (if present)
if netLayerMessage:
pdu.put(self.npduNetMessage)
# put the vendor ID
if (self.npduNetMessage >= 0x80) and (self.npduNetMessage <= 0xFF):
pdu.put_short(self.npduVendorID) | def function[encode, parameter[self, pdu]]:
constant[encode the contents of the NPCI into the PDU.]
if name[_debug] begin[:]
call[name[NPCI]._debug, parameter[constant[encode %s], call[name[repr], parameter[name[pdu]]]]]
call[name[PCI].update, parameter[name[pdu], name[self]]]
call[name[pdu].put, parameter[name[self].npduVersion]]
if compare[name[self].npduNetMessage is_not constant[None]] begin[:]
variable[netLayerMessage] assign[=] constant[128]
variable[dnetPresent] assign[=] constant[0]
if compare[name[self].npduDADR is_not constant[None]] begin[:]
variable[dnetPresent] assign[=] constant[32]
variable[snetPresent] assign[=] constant[0]
if compare[name[self].npduSADR is_not constant[None]] begin[:]
variable[snetPresent] assign[=] constant[8]
variable[control] assign[=] binary_operation[binary_operation[name[netLayerMessage] <ast.BitOr object at 0x7da2590d6aa0> name[dnetPresent]] <ast.BitOr object at 0x7da2590d6aa0> name[snetPresent]]
if name[self].pduExpectingReply begin[:]
<ast.AugAssign object at 0x7da1b08e5e70>
<ast.AugAssign object at 0x7da1b08e7910>
name[self].npduControl assign[=] name[control]
call[name[pdu].put, parameter[name[control]]]
name[pdu].pduExpectingReply assign[=] name[self].pduExpectingReply
name[pdu].pduNetworkPriority assign[=] name[self].pduNetworkPriority
if name[dnetPresent] begin[:]
if compare[name[self].npduDADR.addrType equal[==] name[Address].remoteStationAddr] begin[:]
call[name[pdu].put_short, parameter[name[self].npduDADR.addrNet]]
call[name[pdu].put, parameter[name[self].npduDADR.addrLen]]
call[name[pdu].put_data, parameter[name[self].npduDADR.addrAddr]]
if name[snetPresent] begin[:]
call[name[pdu].put_short, parameter[name[self].npduSADR.addrNet]]
call[name[pdu].put, parameter[name[self].npduSADR.addrLen]]
call[name[pdu].put_data, parameter[name[self].npduSADR.addrAddr]]
if name[dnetPresent] begin[:]
call[name[pdu].put, parameter[name[self].npduHopCount]]
if name[netLayerMessage] begin[:]
call[name[pdu].put, parameter[name[self].npduNetMessage]]
if <ast.BoolOp object at 0x7da1b08e68c0> begin[:]
call[name[pdu].put_short, parameter[name[self].npduVendorID]] | keyword[def] identifier[encode] ( identifier[self] , identifier[pdu] ):
literal[string]
keyword[if] identifier[_debug] : identifier[NPCI] . identifier[_debug] ( literal[string] , identifier[repr] ( identifier[pdu] ))
identifier[PCI] . identifier[update] ( identifier[pdu] , identifier[self] )
identifier[pdu] . identifier[put] ( identifier[self] . identifier[npduVersion] )
keyword[if] identifier[self] . identifier[npduNetMessage] keyword[is] keyword[not] keyword[None] :
identifier[netLayerMessage] = literal[int]
keyword[else] :
identifier[netLayerMessage] = literal[int]
identifier[dnetPresent] = literal[int]
keyword[if] identifier[self] . identifier[npduDADR] keyword[is] keyword[not] keyword[None] :
identifier[dnetPresent] = literal[int]
identifier[snetPresent] = literal[int]
keyword[if] identifier[self] . identifier[npduSADR] keyword[is] keyword[not] keyword[None] :
identifier[snetPresent] = literal[int]
identifier[control] = identifier[netLayerMessage] | identifier[dnetPresent] | identifier[snetPresent]
keyword[if] identifier[self] . identifier[pduExpectingReply] :
identifier[control] |= literal[int]
identifier[control] |=( identifier[self] . identifier[pduNetworkPriority] & literal[int] )
identifier[self] . identifier[npduControl] = identifier[control]
identifier[pdu] . identifier[put] ( identifier[control] )
identifier[pdu] . identifier[pduExpectingReply] = identifier[self] . identifier[pduExpectingReply]
identifier[pdu] . identifier[pduNetworkPriority] = identifier[self] . identifier[pduNetworkPriority]
keyword[if] identifier[dnetPresent] :
keyword[if] identifier[self] . identifier[npduDADR] . identifier[addrType] == identifier[Address] . identifier[remoteStationAddr] :
identifier[pdu] . identifier[put_short] ( identifier[self] . identifier[npduDADR] . identifier[addrNet] )
identifier[pdu] . identifier[put] ( identifier[self] . identifier[npduDADR] . identifier[addrLen] )
identifier[pdu] . identifier[put_data] ( identifier[self] . identifier[npduDADR] . identifier[addrAddr] )
keyword[elif] identifier[self] . identifier[npduDADR] . identifier[addrType] == identifier[Address] . identifier[remoteBroadcastAddr] :
identifier[pdu] . identifier[put_short] ( identifier[self] . identifier[npduDADR] . identifier[addrNet] )
identifier[pdu] . identifier[put] ( literal[int] )
keyword[elif] identifier[self] . identifier[npduDADR] . identifier[addrType] == identifier[Address] . identifier[globalBroadcastAddr] :
identifier[pdu] . identifier[put_short] ( literal[int] )
identifier[pdu] . identifier[put] ( literal[int] )
keyword[if] identifier[snetPresent] :
identifier[pdu] . identifier[put_short] ( identifier[self] . identifier[npduSADR] . identifier[addrNet] )
identifier[pdu] . identifier[put] ( identifier[self] . identifier[npduSADR] . identifier[addrLen] )
identifier[pdu] . identifier[put_data] ( identifier[self] . identifier[npduSADR] . identifier[addrAddr] )
keyword[if] identifier[dnetPresent] :
identifier[pdu] . identifier[put] ( identifier[self] . identifier[npduHopCount] )
keyword[if] identifier[netLayerMessage] :
identifier[pdu] . identifier[put] ( identifier[self] . identifier[npduNetMessage] )
keyword[if] ( identifier[self] . identifier[npduNetMessage] >= literal[int] ) keyword[and] ( identifier[self] . identifier[npduNetMessage] <= literal[int] ):
identifier[pdu] . identifier[put_short] ( identifier[self] . identifier[npduVendorID] ) | def encode(self, pdu):
"""encode the contents of the NPCI into the PDU."""
if _debug:
NPCI._debug('encode %s', repr(pdu)) # depends on [control=['if'], data=[]]
PCI.update(pdu, self)
# only version 1 messages supported
pdu.put(self.npduVersion)
# build the flags
if self.npduNetMessage is not None:
netLayerMessage = 128 # depends on [control=['if'], data=[]]
else:
netLayerMessage = 0
# map the destination address
dnetPresent = 0
if self.npduDADR is not None:
dnetPresent = 32 # depends on [control=['if'], data=[]]
# map the source address
snetPresent = 0
if self.npduSADR is not None:
snetPresent = 8 # depends on [control=['if'], data=[]]
# encode the control octet
control = netLayerMessage | dnetPresent | snetPresent
if self.pduExpectingReply:
control |= 4 # depends on [control=['if'], data=[]]
control |= self.pduNetworkPriority & 3
self.npduControl = control
pdu.put(control)
# make sure expecting reply and priority get passed down
pdu.pduExpectingReply = self.pduExpectingReply
pdu.pduNetworkPriority = self.pduNetworkPriority
# encode the destination address
if dnetPresent:
if self.npduDADR.addrType == Address.remoteStationAddr:
pdu.put_short(self.npduDADR.addrNet)
pdu.put(self.npduDADR.addrLen)
pdu.put_data(self.npduDADR.addrAddr) # depends on [control=['if'], data=[]]
elif self.npduDADR.addrType == Address.remoteBroadcastAddr:
pdu.put_short(self.npduDADR.addrNet)
pdu.put(0) # depends on [control=['if'], data=[]]
elif self.npduDADR.addrType == Address.globalBroadcastAddr:
pdu.put_short(65535)
pdu.put(0) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# encode the source address
if snetPresent:
pdu.put_short(self.npduSADR.addrNet)
pdu.put(self.npduSADR.addrLen)
pdu.put_data(self.npduSADR.addrAddr) # depends on [control=['if'], data=[]]
# put the hop count
if dnetPresent:
pdu.put(self.npduHopCount) # depends on [control=['if'], data=[]]
# put the network layer message type (if present)
if netLayerMessage:
pdu.put(self.npduNetMessage)
# put the vendor ID
if self.npduNetMessage >= 128 and self.npduNetMessage <= 255:
pdu.put_short(self.npduVendorID) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def _open_binary_stream(uri, mode, transport_params):
"""Open an arbitrary URI in the specified binary mode.
Not all modes are supported for all protocols.
:arg uri: The URI to open. May be a string, or something else.
:arg str mode: The mode to open with. Must be rb, wb or ab.
:arg transport_params: Keyword argumens for the transport layer.
:returns: A file object and the filename
:rtype: tuple
"""
if mode not in ('rb', 'rb+', 'wb', 'wb+', 'ab', 'ab+'):
#
# This should really be a ValueError, but for the sake of compatibility
# with older versions, which raise NotImplementedError, we do the same.
#
raise NotImplementedError('unsupported mode: %r' % mode)
if isinstance(uri, six.string_types):
# this method just routes the request to classes handling the specific storage
# schemes, depending on the URI protocol in `uri`
filename = uri.split('/')[-1]
parsed_uri = _parse_uri(uri)
unsupported = "%r mode not supported for %r scheme" % (mode, parsed_uri.scheme)
if parsed_uri.scheme == "file":
fobj = io.open(parsed_uri.uri_path, mode)
return fobj, filename
elif parsed_uri.scheme in smart_open_ssh.SCHEMES:
fobj = smart_open_ssh.open(
parsed_uri.uri_path,
mode,
host=parsed_uri.host,
user=parsed_uri.user,
port=parsed_uri.port,
)
return fobj, filename
elif parsed_uri.scheme in smart_open_s3.SUPPORTED_SCHEMES:
return _s3_open_uri(parsed_uri, mode, transport_params), filename
elif parsed_uri.scheme == "hdfs":
_check_kwargs(smart_open_hdfs.open, transport_params)
return smart_open_hdfs.open(parsed_uri.uri_path, mode), filename
elif parsed_uri.scheme == "webhdfs":
kw = _check_kwargs(smart_open_webhdfs.open, transport_params)
return smart_open_webhdfs.open(parsed_uri.uri_path, mode, **kw), filename
elif parsed_uri.scheme.startswith('http'):
#
# The URI may contain a query string and fragments, which interfere
# with our compressed/uncompressed estimation, so we strip them.
#
filename = P.basename(urlparse.urlparse(uri).path)
kw = _check_kwargs(smart_open_http.open, transport_params)
return smart_open_http.open(uri, mode, **kw), filename
else:
raise NotImplementedError("scheme %r is not supported", parsed_uri.scheme)
elif hasattr(uri, 'read'):
# simply pass-through if already a file-like
# we need to return something as the file name, but we don't know what
# so we probe for uri.name (e.g., this works with open() or tempfile.NamedTemporaryFile)
# if the value ends with COMPRESSED_EXT, we will note it in _compression_wrapper()
# if there is no such an attribute, we return "unknown" - this effectively disables any compression
filename = getattr(uri, 'name', 'unknown')
return uri, filename
else:
raise TypeError("don't know how to handle uri %r" % uri) | def function[_open_binary_stream, parameter[uri, mode, transport_params]]:
constant[Open an arbitrary URI in the specified binary mode.
Not all modes are supported for all protocols.
:arg uri: The URI to open. May be a string, or something else.
:arg str mode: The mode to open with. Must be rb, wb or ab.
:arg transport_params: Keyword argumens for the transport layer.
:returns: A file object and the filename
:rtype: tuple
]
if compare[name[mode] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c794580>, <ast.Constant object at 0x7da20c796a10>, <ast.Constant object at 0x7da20c794ee0>, <ast.Constant object at 0x7da20c795de0>, <ast.Constant object at 0x7da20c7955a0>, <ast.Constant object at 0x7da20c7942b0>]]] begin[:]
<ast.Raise object at 0x7da20c795630>
if call[name[isinstance], parameter[name[uri], name[six].string_types]] begin[:]
variable[filename] assign[=] call[call[name[uri].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da20c7966e0>]
variable[parsed_uri] assign[=] call[name[_parse_uri], parameter[name[uri]]]
variable[unsupported] assign[=] binary_operation[constant[%r mode not supported for %r scheme] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c7948e0>, <ast.Attribute object at 0x7da20c794490>]]]
if compare[name[parsed_uri].scheme equal[==] constant[file]] begin[:]
variable[fobj] assign[=] call[name[io].open, parameter[name[parsed_uri].uri_path, name[mode]]]
return[tuple[[<ast.Name object at 0x7da20c7958d0>, <ast.Name object at 0x7da20c795b70>]]] | keyword[def] identifier[_open_binary_stream] ( identifier[uri] , identifier[mode] , identifier[transport_params] ):
literal[string]
keyword[if] identifier[mode] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ):
keyword[raise] identifier[NotImplementedError] ( literal[string] % identifier[mode] )
keyword[if] identifier[isinstance] ( identifier[uri] , identifier[six] . identifier[string_types] ):
identifier[filename] = identifier[uri] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[parsed_uri] = identifier[_parse_uri] ( identifier[uri] )
identifier[unsupported] = literal[string] %( identifier[mode] , identifier[parsed_uri] . identifier[scheme] )
keyword[if] identifier[parsed_uri] . identifier[scheme] == literal[string] :
identifier[fobj] = identifier[io] . identifier[open] ( identifier[parsed_uri] . identifier[uri_path] , identifier[mode] )
keyword[return] identifier[fobj] , identifier[filename]
keyword[elif] identifier[parsed_uri] . identifier[scheme] keyword[in] identifier[smart_open_ssh] . identifier[SCHEMES] :
identifier[fobj] = identifier[smart_open_ssh] . identifier[open] (
identifier[parsed_uri] . identifier[uri_path] ,
identifier[mode] ,
identifier[host] = identifier[parsed_uri] . identifier[host] ,
identifier[user] = identifier[parsed_uri] . identifier[user] ,
identifier[port] = identifier[parsed_uri] . identifier[port] ,
)
keyword[return] identifier[fobj] , identifier[filename]
keyword[elif] identifier[parsed_uri] . identifier[scheme] keyword[in] identifier[smart_open_s3] . identifier[SUPPORTED_SCHEMES] :
keyword[return] identifier[_s3_open_uri] ( identifier[parsed_uri] , identifier[mode] , identifier[transport_params] ), identifier[filename]
keyword[elif] identifier[parsed_uri] . identifier[scheme] == literal[string] :
identifier[_check_kwargs] ( identifier[smart_open_hdfs] . identifier[open] , identifier[transport_params] )
keyword[return] identifier[smart_open_hdfs] . identifier[open] ( identifier[parsed_uri] . identifier[uri_path] , identifier[mode] ), identifier[filename]
keyword[elif] identifier[parsed_uri] . identifier[scheme] == literal[string] :
identifier[kw] = identifier[_check_kwargs] ( identifier[smart_open_webhdfs] . identifier[open] , identifier[transport_params] )
keyword[return] identifier[smart_open_webhdfs] . identifier[open] ( identifier[parsed_uri] . identifier[uri_path] , identifier[mode] ,** identifier[kw] ), identifier[filename]
keyword[elif] identifier[parsed_uri] . identifier[scheme] . identifier[startswith] ( literal[string] ):
identifier[filename] = identifier[P] . identifier[basename] ( identifier[urlparse] . identifier[urlparse] ( identifier[uri] ). identifier[path] )
identifier[kw] = identifier[_check_kwargs] ( identifier[smart_open_http] . identifier[open] , identifier[transport_params] )
keyword[return] identifier[smart_open_http] . identifier[open] ( identifier[uri] , identifier[mode] ,** identifier[kw] ), identifier[filename]
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] , identifier[parsed_uri] . identifier[scheme] )
keyword[elif] identifier[hasattr] ( identifier[uri] , literal[string] ):
identifier[filename] = identifier[getattr] ( identifier[uri] , literal[string] , literal[string] )
keyword[return] identifier[uri] , identifier[filename]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[uri] ) | def _open_binary_stream(uri, mode, transport_params):
"""Open an arbitrary URI in the specified binary mode.
Not all modes are supported for all protocols.
:arg uri: The URI to open. May be a string, or something else.
:arg str mode: The mode to open with. Must be rb, wb or ab.
:arg transport_params: Keyword argumens for the transport layer.
:returns: A file object and the filename
:rtype: tuple
"""
if mode not in ('rb', 'rb+', 'wb', 'wb+', 'ab', 'ab+'):
#
# This should really be a ValueError, but for the sake of compatibility
# with older versions, which raise NotImplementedError, we do the same.
#
raise NotImplementedError('unsupported mode: %r' % mode) # depends on [control=['if'], data=['mode']]
if isinstance(uri, six.string_types):
# this method just routes the request to classes handling the specific storage
# schemes, depending on the URI protocol in `uri`
filename = uri.split('/')[-1]
parsed_uri = _parse_uri(uri)
unsupported = '%r mode not supported for %r scheme' % (mode, parsed_uri.scheme)
if parsed_uri.scheme == 'file':
fobj = io.open(parsed_uri.uri_path, mode)
return (fobj, filename) # depends on [control=['if'], data=[]]
elif parsed_uri.scheme in smart_open_ssh.SCHEMES:
fobj = smart_open_ssh.open(parsed_uri.uri_path, mode, host=parsed_uri.host, user=parsed_uri.user, port=parsed_uri.port)
return (fobj, filename) # depends on [control=['if'], data=[]]
elif parsed_uri.scheme in smart_open_s3.SUPPORTED_SCHEMES:
return (_s3_open_uri(parsed_uri, mode, transport_params), filename) # depends on [control=['if'], data=[]]
elif parsed_uri.scheme == 'hdfs':
_check_kwargs(smart_open_hdfs.open, transport_params)
return (smart_open_hdfs.open(parsed_uri.uri_path, mode), filename) # depends on [control=['if'], data=[]]
elif parsed_uri.scheme == 'webhdfs':
kw = _check_kwargs(smart_open_webhdfs.open, transport_params)
return (smart_open_webhdfs.open(parsed_uri.uri_path, mode, **kw), filename) # depends on [control=['if'], data=[]]
elif parsed_uri.scheme.startswith('http'):
#
# The URI may contain a query string and fragments, which interfere
# with our compressed/uncompressed estimation, so we strip them.
#
filename = P.basename(urlparse.urlparse(uri).path)
kw = _check_kwargs(smart_open_http.open, transport_params)
return (smart_open_http.open(uri, mode, **kw), filename) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('scheme %r is not supported', parsed_uri.scheme) # depends on [control=['if'], data=[]]
elif hasattr(uri, 'read'):
# simply pass-through if already a file-like
# we need to return something as the file name, but we don't know what
# so we probe for uri.name (e.g., this works with open() or tempfile.NamedTemporaryFile)
# if the value ends with COMPRESSED_EXT, we will note it in _compression_wrapper()
# if there is no such an attribute, we return "unknown" - this effectively disables any compression
filename = getattr(uri, 'name', 'unknown')
return (uri, filename) # depends on [control=['if'], data=[]]
else:
raise TypeError("don't know how to handle uri %r" % uri) |
def create_embedded_template_draft(self, client_id, signer_roles, test_mode=False, files=None, file_urls=None, title=None, subject=None, message=None, cc_roles=None, merge_fields=None, use_preexisting_fields=False):
''' Creates an embedded Template draft for further editing.
Args:
test_mode (bool, optional): Whether this is a test, the signature request created from this draft will not be legally binding if set to 1. Defaults to 0.
client_id (str): Client id of the app you're using to create this draft.
files (list of str): The file(s) to use for the template.
file_urls (list of str): URLs of the file for HelloSign to use for the template. Use either `files` or `file_urls`, but not both.
title (str, optional): The template title
subject (str, optional): The default template email subject
message (str, optional): The default template email message
signer_roles (list of dict): A list of signer roles, each of which has the following attributes:
name (str): The role name of the signer that will be displayed when the template is used to create a signature request.
order (str, optional): The order in which this signer role is required to sign.
cc_roles (list of str, optional): The CC roles that must be assigned when using the template to send a signature request
merge_fields (list of dict, optional): The merge fields that can be placed on the template's document(s) by the user claiming the template draft. Each must have the following two parameters:
name (str): The name of the merge field. Must be unique.
type (str): Can only be "text" or "checkbox".
use_preexisting_fields (bool): Whether to use preexisting PDF fields
Returns:
A Template object specifying the Id of the draft
'''
params = {
'test_mode': test_mode,
'client_id': client_id,
'files': files,
'file_urls': file_urls,
'title': title,
'subject': subject,
'message': message,
'signer_roles': signer_roles,
'cc_roles': cc_roles,
'merge_fields': merge_fields,
'use_preexisting_fields': use_preexisting_fields
}
return self._create_embedded_template_draft(**params) | def function[create_embedded_template_draft, parameter[self, client_id, signer_roles, test_mode, files, file_urls, title, subject, message, cc_roles, merge_fields, use_preexisting_fields]]:
constant[ Creates an embedded Template draft for further editing.
Args:
test_mode (bool, optional): Whether this is a test, the signature request created from this draft will not be legally binding if set to 1. Defaults to 0.
client_id (str): Client id of the app you're using to create this draft.
files (list of str): The file(s) to use for the template.
file_urls (list of str): URLs of the file for HelloSign to use for the template. Use either `files` or `file_urls`, but not both.
title (str, optional): The template title
subject (str, optional): The default template email subject
message (str, optional): The default template email message
signer_roles (list of dict): A list of signer roles, each of which has the following attributes:
name (str): The role name of the signer that will be displayed when the template is used to create a signature request.
order (str, optional): The order in which this signer role is required to sign.
cc_roles (list of str, optional): The CC roles that must be assigned when using the template to send a signature request
merge_fields (list of dict, optional): The merge fields that can be placed on the template's document(s) by the user claiming the template draft. Each must have the following two parameters:
name (str): The name of the merge field. Must be unique.
type (str): Can only be "text" or "checkbox".
use_preexisting_fields (bool): Whether to use preexisting PDF fields
Returns:
A Template object specifying the Id of the draft
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0c14070>, <ast.Constant object at 0x7da1b0c17970>, <ast.Constant object at 0x7da1b0c1d150>, <ast.Constant object at 0x7da1b0c1e9b0>, <ast.Constant object at 0x7da1b0c1eaa0>, <ast.Constant object at 0x7da1b0c1eb00>, <ast.Constant object at 0x7da1b0c1eb30>, <ast.Constant object at 0x7da1b0c1ead0>, <ast.Constant object at 0x7da1b0c1e9e0>, <ast.Constant object at 0x7da1b0c1ea40>, <ast.Constant object at 0x7da1b0c1ea70>], [<ast.Name object at 0x7da1b0c1ea10>, <ast.Name object at 0x7da1b0c1eb60>, <ast.Name object at 0x7da1b0c1ec50>, <ast.Name object at 0x7da1b0c1ee60>, <ast.Name object at 0x7da1b0c1ee90>, <ast.Name object at 0x7da1b0c1ec80>, <ast.Name object at 0x7da1b0c1ece0>, <ast.Name object at 0x7da1b0c1ed70>, <ast.Name object at 0x7da1b0c1eda0>, <ast.Name object at 0x7da1b0c1ee00>, <ast.Name object at 0x7da1b0c1ee30>]]
return[call[name[self]._create_embedded_template_draft, parameter[]]] | keyword[def] identifier[create_embedded_template_draft] ( identifier[self] , identifier[client_id] , identifier[signer_roles] , identifier[test_mode] = keyword[False] , identifier[files] = keyword[None] , identifier[file_urls] = keyword[None] , identifier[title] = keyword[None] , identifier[subject] = keyword[None] , identifier[message] = keyword[None] , identifier[cc_roles] = keyword[None] , identifier[merge_fields] = keyword[None] , identifier[use_preexisting_fields] = keyword[False] ):
literal[string]
identifier[params] ={
literal[string] : identifier[test_mode] ,
literal[string] : identifier[client_id] ,
literal[string] : identifier[files] ,
literal[string] : identifier[file_urls] ,
literal[string] : identifier[title] ,
literal[string] : identifier[subject] ,
literal[string] : identifier[message] ,
literal[string] : identifier[signer_roles] ,
literal[string] : identifier[cc_roles] ,
literal[string] : identifier[merge_fields] ,
literal[string] : identifier[use_preexisting_fields]
}
keyword[return] identifier[self] . identifier[_create_embedded_template_draft] (** identifier[params] ) | def create_embedded_template_draft(self, client_id, signer_roles, test_mode=False, files=None, file_urls=None, title=None, subject=None, message=None, cc_roles=None, merge_fields=None, use_preexisting_fields=False):
""" Creates an embedded Template draft for further editing.
Args:
test_mode (bool, optional): Whether this is a test, the signature request created from this draft will not be legally binding if set to 1. Defaults to 0.
client_id (str): Client id of the app you're using to create this draft.
files (list of str): The file(s) to use for the template.
file_urls (list of str): URLs of the file for HelloSign to use for the template. Use either `files` or `file_urls`, but not both.
title (str, optional): The template title
subject (str, optional): The default template email subject
message (str, optional): The default template email message
signer_roles (list of dict): A list of signer roles, each of which has the following attributes:
name (str): The role name of the signer that will be displayed when the template is used to create a signature request.
order (str, optional): The order in which this signer role is required to sign.
cc_roles (list of str, optional): The CC roles that must be assigned when using the template to send a signature request
merge_fields (list of dict, optional): The merge fields that can be placed on the template's document(s) by the user claiming the template draft. Each must have the following two parameters:
name (str): The name of the merge field. Must be unique.
type (str): Can only be "text" or "checkbox".
use_preexisting_fields (bool): Whether to use preexisting PDF fields
Returns:
A Template object specifying the Id of the draft
"""
params = {'test_mode': test_mode, 'client_id': client_id, 'files': files, 'file_urls': file_urls, 'title': title, 'subject': subject, 'message': message, 'signer_roles': signer_roles, 'cc_roles': cc_roles, 'merge_fields': merge_fields, 'use_preexisting_fields': use_preexisting_fields}
return self._create_embedded_template_draft(**params) |
def gelu(x):
"""Gaussian Error Linear Unit.
This is a smoother version of the RELU.
Original paper: https://arxiv.org/abs/1606.08415
Args:
x: float Tensor to perform activation.
Returns:
x with the GELU activation applied.
"""
cdf = 0.5 * (1.0 + tf.tanh(
(np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3)))))
return x * cdf | def function[gelu, parameter[x]]:
constant[Gaussian Error Linear Unit.
This is a smoother version of the RELU.
Original paper: https://arxiv.org/abs/1606.08415
Args:
x: float Tensor to perform activation.
Returns:
x with the GELU activation applied.
]
variable[cdf] assign[=] binary_operation[constant[0.5] * binary_operation[constant[1.0] + call[name[tf].tanh, parameter[binary_operation[call[name[np].sqrt, parameter[binary_operation[constant[2] / name[np].pi]]] * binary_operation[name[x] + binary_operation[constant[0.044715] * call[name[tf].pow, parameter[name[x], constant[3]]]]]]]]]]
return[binary_operation[name[x] * name[cdf]]] | keyword[def] identifier[gelu] ( identifier[x] ):
literal[string]
identifier[cdf] = literal[int] *( literal[int] + identifier[tf] . identifier[tanh] (
( identifier[np] . identifier[sqrt] ( literal[int] / identifier[np] . identifier[pi] )*( identifier[x] + literal[int] * identifier[tf] . identifier[pow] ( identifier[x] , literal[int] )))))
keyword[return] identifier[x] * identifier[cdf] | def gelu(x):
"""Gaussian Error Linear Unit.
This is a smoother version of the RELU.
Original paper: https://arxiv.org/abs/1606.08415
Args:
x: float Tensor to perform activation.
Returns:
x with the GELU activation applied.
"""
cdf = 0.5 * (1.0 + tf.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3))))
return x * cdf |
def _cdf(self, xloc, dist, base, cache):
"""Cumulative distribution function."""
return evaluation.evaluate_forward(dist, base**xloc, cache=cache) | def function[_cdf, parameter[self, xloc, dist, base, cache]]:
constant[Cumulative distribution function.]
return[call[name[evaluation].evaluate_forward, parameter[name[dist], binary_operation[name[base] ** name[xloc]]]]] | keyword[def] identifier[_cdf] ( identifier[self] , identifier[xloc] , identifier[dist] , identifier[base] , identifier[cache] ):
literal[string]
keyword[return] identifier[evaluation] . identifier[evaluate_forward] ( identifier[dist] , identifier[base] ** identifier[xloc] , identifier[cache] = identifier[cache] ) | def _cdf(self, xloc, dist, base, cache):
"""Cumulative distribution function."""
return evaluation.evaluate_forward(dist, base ** xloc, cache=cache) |
def get_time_from_str(when):
"""Convert a string to a time: first uses the dateutil parser, falls back
on fuzzy matching with parsedatetime
"""
zero_oclock_today = datetime.now(tzlocal()).replace(
hour=0, minute=0, second=0, microsecond=0)
try:
event_time = dateutil_parse(when, default=zero_oclock_today)
except ValueError:
struct, result = fuzzy_date_parse(when)
if not result:
raise ValueError('Date and time is invalid: %s' % (when))
event_time = datetime.fromtimestamp(time.mktime(struct), tzlocal())
return event_time | def function[get_time_from_str, parameter[when]]:
constant[Convert a string to a time: first uses the dateutil parser, falls back
on fuzzy matching with parsedatetime
]
variable[zero_oclock_today] assign[=] call[call[name[datetime].now, parameter[call[name[tzlocal], parameter[]]]].replace, parameter[]]
<ast.Try object at 0x7da1b1dd85b0>
return[name[event_time]] | keyword[def] identifier[get_time_from_str] ( identifier[when] ):
literal[string]
identifier[zero_oclock_today] = identifier[datetime] . identifier[now] ( identifier[tzlocal] ()). identifier[replace] (
identifier[hour] = literal[int] , identifier[minute] = literal[int] , identifier[second] = literal[int] , identifier[microsecond] = literal[int] )
keyword[try] :
identifier[event_time] = identifier[dateutil_parse] ( identifier[when] , identifier[default] = identifier[zero_oclock_today] )
keyword[except] identifier[ValueError] :
identifier[struct] , identifier[result] = identifier[fuzzy_date_parse] ( identifier[when] )
keyword[if] keyword[not] identifier[result] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[when] ))
identifier[event_time] = identifier[datetime] . identifier[fromtimestamp] ( identifier[time] . identifier[mktime] ( identifier[struct] ), identifier[tzlocal] ())
keyword[return] identifier[event_time] | def get_time_from_str(when):
"""Convert a string to a time: first uses the dateutil parser, falls back
on fuzzy matching with parsedatetime
"""
zero_oclock_today = datetime.now(tzlocal()).replace(hour=0, minute=0, second=0, microsecond=0)
try:
event_time = dateutil_parse(when, default=zero_oclock_today) # depends on [control=['try'], data=[]]
except ValueError:
(struct, result) = fuzzy_date_parse(when)
if not result:
raise ValueError('Date and time is invalid: %s' % when) # depends on [control=['if'], data=[]]
event_time = datetime.fromtimestamp(time.mktime(struct), tzlocal()) # depends on [control=['except'], data=[]]
return event_time |
def raise_on_errors(self):
"""
Raises a :class:`~notifiers.exceptions.NotificationError` if response hold errors
:raises: :class:`~notifiers.exceptions.NotificationError`: If response has errors
"""
if self.errors:
raise NotificationError(
provider=self.provider,
data=self.data,
errors=self.errors,
response=self.response,
) | def function[raise_on_errors, parameter[self]]:
constant[
Raises a :class:`~notifiers.exceptions.NotificationError` if response hold errors
:raises: :class:`~notifiers.exceptions.NotificationError`: If response has errors
]
if name[self].errors begin[:]
<ast.Raise object at 0x7da1b1e900a0> | keyword[def] identifier[raise_on_errors] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[errors] :
keyword[raise] identifier[NotificationError] (
identifier[provider] = identifier[self] . identifier[provider] ,
identifier[data] = identifier[self] . identifier[data] ,
identifier[errors] = identifier[self] . identifier[errors] ,
identifier[response] = identifier[self] . identifier[response] ,
) | def raise_on_errors(self):
"""
Raises a :class:`~notifiers.exceptions.NotificationError` if response hold errors
:raises: :class:`~notifiers.exceptions.NotificationError`: If response has errors
"""
if self.errors:
raise NotificationError(provider=self.provider, data=self.data, errors=self.errors, response=self.response) # depends on [control=['if'], data=[]] |
def get_valid_app_auth(self, app_uri):
"""获得账号下可用的应用的密钥
列出指定应用的可用密钥
Args:
- app_uri: 应用的完整标识
Returns:
返回一个tuple对象,其格式为(<result>, <ResponseInfo>)
- result 成功返回可用秘钥列表,失败返回None
- ResponseInfo 请求的Response信息
"""
ret, retInfo = self.get_app_keys(app_uri)
if ret is None:
return None
for k in ret:
if (k.get('state') == 'enabled'):
return QiniuMacAuth(k.get('ak'), k.get('sk'))
return None | def function[get_valid_app_auth, parameter[self, app_uri]]:
constant[获得账号下可用的应用的密钥
列出指定应用的可用密钥
Args:
- app_uri: 应用的完整标识
Returns:
返回一个tuple对象,其格式为(<result>, <ResponseInfo>)
- result 成功返回可用秘钥列表,失败返回None
- ResponseInfo 请求的Response信息
]
<ast.Tuple object at 0x7da20c7c83d0> assign[=] call[name[self].get_app_keys, parameter[name[app_uri]]]
if compare[name[ret] is constant[None]] begin[:]
return[constant[None]]
for taget[name[k]] in starred[name[ret]] begin[:]
if compare[call[name[k].get, parameter[constant[state]]] equal[==] constant[enabled]] begin[:]
return[call[name[QiniuMacAuth], parameter[call[name[k].get, parameter[constant[ak]]], call[name[k].get, parameter[constant[sk]]]]]]
return[constant[None]] | keyword[def] identifier[get_valid_app_auth] ( identifier[self] , identifier[app_uri] ):
literal[string]
identifier[ret] , identifier[retInfo] = identifier[self] . identifier[get_app_keys] ( identifier[app_uri] )
keyword[if] identifier[ret] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[for] identifier[k] keyword[in] identifier[ret] :
keyword[if] ( identifier[k] . identifier[get] ( literal[string] )== literal[string] ):
keyword[return] identifier[QiniuMacAuth] ( identifier[k] . identifier[get] ( literal[string] ), identifier[k] . identifier[get] ( literal[string] ))
keyword[return] keyword[None] | def get_valid_app_auth(self, app_uri):
"""获得账号下可用的应用的密钥
列出指定应用的可用密钥
Args:
- app_uri: 应用的完整标识
Returns:
返回一个tuple对象,其格式为(<result>, <ResponseInfo>)
- result 成功返回可用秘钥列表,失败返回None
- ResponseInfo 请求的Response信息
"""
(ret, retInfo) = self.get_app_keys(app_uri)
if ret is None:
return None # depends on [control=['if'], data=[]]
for k in ret:
if k.get('state') == 'enabled':
return QiniuMacAuth(k.get('ak'), k.get('sk')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
return None |
def _dict_func(self, func, axis, *args, **kwargs):
"""Apply function to certain indices across given axis.
Args:
func: The function to apply.
axis: Target axis to apply the function along.
Returns:
A new PandasQueryCompiler.
"""
if "axis" not in kwargs:
kwargs["axis"] = axis
if axis == 0:
index = self.columns
else:
index = self.index
func = {idx: func[key] for key in func for idx in index.get_indexer_for([key])}
def dict_apply_builder(df, func_dict={}):
# Sometimes `apply` can return a `Series`, but we require that internally
# all objects are `DataFrame`s.
return pandas.DataFrame(df.apply(func_dict, *args, **kwargs))
result_data = self.data.apply_func_to_select_indices_along_full_axis(
axis, dict_apply_builder, func, keep_remaining=False
)
full_result = self._post_process_apply(result_data, axis)
return full_result | def function[_dict_func, parameter[self, func, axis]]:
constant[Apply function to certain indices across given axis.
Args:
func: The function to apply.
axis: Target axis to apply the function along.
Returns:
A new PandasQueryCompiler.
]
if compare[constant[axis] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][constant[axis]] assign[=] name[axis]
if compare[name[axis] equal[==] constant[0]] begin[:]
variable[index] assign[=] name[self].columns
variable[func] assign[=] <ast.DictComp object at 0x7da18bcc9d50>
def function[dict_apply_builder, parameter[df, func_dict]]:
return[call[name[pandas].DataFrame, parameter[call[name[df].apply, parameter[name[func_dict], <ast.Starred object at 0x7da18bccb670>]]]]]
variable[result_data] assign[=] call[name[self].data.apply_func_to_select_indices_along_full_axis, parameter[name[axis], name[dict_apply_builder], name[func]]]
variable[full_result] assign[=] call[name[self]._post_process_apply, parameter[name[result_data], name[axis]]]
return[name[full_result]] | keyword[def] identifier[_dict_func] ( identifier[self] , identifier[func] , identifier[axis] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]= identifier[axis]
keyword[if] identifier[axis] == literal[int] :
identifier[index] = identifier[self] . identifier[columns]
keyword[else] :
identifier[index] = identifier[self] . identifier[index]
identifier[func] ={ identifier[idx] : identifier[func] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[func] keyword[for] identifier[idx] keyword[in] identifier[index] . identifier[get_indexer_for] ([ identifier[key] ])}
keyword[def] identifier[dict_apply_builder] ( identifier[df] , identifier[func_dict] ={}):
keyword[return] identifier[pandas] . identifier[DataFrame] ( identifier[df] . identifier[apply] ( identifier[func_dict] ,* identifier[args] ,** identifier[kwargs] ))
identifier[result_data] = identifier[self] . identifier[data] . identifier[apply_func_to_select_indices_along_full_axis] (
identifier[axis] , identifier[dict_apply_builder] , identifier[func] , identifier[keep_remaining] = keyword[False]
)
identifier[full_result] = identifier[self] . identifier[_post_process_apply] ( identifier[result_data] , identifier[axis] )
keyword[return] identifier[full_result] | def _dict_func(self, func, axis, *args, **kwargs):
"""Apply function to certain indices across given axis.
Args:
func: The function to apply.
axis: Target axis to apply the function along.
Returns:
A new PandasQueryCompiler.
"""
if 'axis' not in kwargs:
kwargs['axis'] = axis # depends on [control=['if'], data=['kwargs']]
if axis == 0:
index = self.columns # depends on [control=['if'], data=[]]
else:
index = self.index
func = {idx: func[key] for key in func for idx in index.get_indexer_for([key])}
def dict_apply_builder(df, func_dict={}):
# Sometimes `apply` can return a `Series`, but we require that internally
# all objects are `DataFrame`s.
return pandas.DataFrame(df.apply(func_dict, *args, **kwargs))
result_data = self.data.apply_func_to_select_indices_along_full_axis(axis, dict_apply_builder, func, keep_remaining=False)
full_result = self._post_process_apply(result_data, axis)
return full_result |
def setSpecArgs(**kwargs):
"""Sets optional arguments for the spectrogram appearance.
Available options:
:param nfft: size of FFT window to use
:type nfft: int
:param overlap: percent overlap of window
:type overlap: number
:param window: Type of window to use, choices are hanning, hamming, blackman, bartlett or none (rectangular)
:type window: string
:param colormap: Gets set by colormap editor. Holds the information to generate the colormap. Items: :meth:`lut<pyqtgraph:pyqtgraph.ImageItem.setLookupTable>`, :meth:`levels<pyqtgraph:pyqtgraph.ImageItem.setLevels>`, state (info for editor)
:type colormap: dict
"""
for key, value in kwargs.items():
if key == 'colormap':
SpecWidget.imgArgs['lut'] = value['lut']
SpecWidget.imgArgs['levels'] = value['levels']
SpecWidget.imgArgs['state'] = value['state']
for w in SpecWidget.instances:
w.updateColormap()
else:
SpecWidget.specgramArgs[key] = value | def function[setSpecArgs, parameter[]]:
constant[Sets optional arguments for the spectrogram appearance.
Available options:
:param nfft: size of FFT window to use
:type nfft: int
:param overlap: percent overlap of window
:type overlap: number
:param window: Type of window to use, choices are hanning, hamming, blackman, bartlett or none (rectangular)
:type window: string
:param colormap: Gets set by colormap editor. Holds the information to generate the colormap. Items: :meth:`lut<pyqtgraph:pyqtgraph.ImageItem.setLookupTable>`, :meth:`levels<pyqtgraph:pyqtgraph.ImageItem.setLevels>`, state (info for editor)
:type colormap: dict
]
for taget[tuple[[<ast.Name object at 0x7da20c76d360>, <ast.Name object at 0x7da20c76d4b0>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
if compare[name[key] equal[==] constant[colormap]] begin[:]
call[name[SpecWidget].imgArgs][constant[lut]] assign[=] call[name[value]][constant[lut]]
call[name[SpecWidget].imgArgs][constant[levels]] assign[=] call[name[value]][constant[levels]]
call[name[SpecWidget].imgArgs][constant[state]] assign[=] call[name[value]][constant[state]]
for taget[name[w]] in starred[name[SpecWidget].instances] begin[:]
call[name[w].updateColormap, parameter[]] | keyword[def] identifier[setSpecArgs] (** identifier[kwargs] ):
literal[string]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] ():
keyword[if] identifier[key] == literal[string] :
identifier[SpecWidget] . identifier[imgArgs] [ literal[string] ]= identifier[value] [ literal[string] ]
identifier[SpecWidget] . identifier[imgArgs] [ literal[string] ]= identifier[value] [ literal[string] ]
identifier[SpecWidget] . identifier[imgArgs] [ literal[string] ]= identifier[value] [ literal[string] ]
keyword[for] identifier[w] keyword[in] identifier[SpecWidget] . identifier[instances] :
identifier[w] . identifier[updateColormap] ()
keyword[else] :
identifier[SpecWidget] . identifier[specgramArgs] [ identifier[key] ]= identifier[value] | def setSpecArgs(**kwargs):
"""Sets optional arguments for the spectrogram appearance.
Available options:
:param nfft: size of FFT window to use
:type nfft: int
:param overlap: percent overlap of window
:type overlap: number
:param window: Type of window to use, choices are hanning, hamming, blackman, bartlett or none (rectangular)
:type window: string
:param colormap: Gets set by colormap editor. Holds the information to generate the colormap. Items: :meth:`lut<pyqtgraph:pyqtgraph.ImageItem.setLookupTable>`, :meth:`levels<pyqtgraph:pyqtgraph.ImageItem.setLevels>`, state (info for editor)
:type colormap: dict
"""
for (key, value) in kwargs.items():
if key == 'colormap':
SpecWidget.imgArgs['lut'] = value['lut']
SpecWidget.imgArgs['levels'] = value['levels']
SpecWidget.imgArgs['state'] = value['state']
for w in SpecWidget.instances:
w.updateColormap() # depends on [control=['for'], data=['w']] # depends on [control=['if'], data=[]]
else:
SpecWidget.specgramArgs[key] = value # depends on [control=['for'], data=[]] |
def top_k_accuracy(input:Tensor, targs:Tensor, k:int=5)->Rank0Tensor:
"Computes the Top-k accuracy (target is in the top k predictions)."
input = input.topk(k=k, dim=-1)[1]
targs = targs.unsqueeze(dim=-1).expand_as(input)
return (input == targs).max(dim=-1)[0].float().mean() | def function[top_k_accuracy, parameter[input, targs, k]]:
constant[Computes the Top-k accuracy (target is in the top k predictions).]
variable[input] assign[=] call[call[name[input].topk, parameter[]]][constant[1]]
variable[targs] assign[=] call[call[name[targs].unsqueeze, parameter[]].expand_as, parameter[name[input]]]
return[call[call[call[call[compare[name[input] equal[==] name[targs]].max, parameter[]]][constant[0]].float, parameter[]].mean, parameter[]]] | keyword[def] identifier[top_k_accuracy] ( identifier[input] : identifier[Tensor] , identifier[targs] : identifier[Tensor] , identifier[k] : identifier[int] = literal[int] )-> identifier[Rank0Tensor] :
literal[string]
identifier[input] = identifier[input] . identifier[topk] ( identifier[k] = identifier[k] , identifier[dim] =- literal[int] )[ literal[int] ]
identifier[targs] = identifier[targs] . identifier[unsqueeze] ( identifier[dim] =- literal[int] ). identifier[expand_as] ( identifier[input] )
keyword[return] ( identifier[input] == identifier[targs] ). identifier[max] ( identifier[dim] =- literal[int] )[ literal[int] ]. identifier[float] (). identifier[mean] () | def top_k_accuracy(input: Tensor, targs: Tensor, k: int=5) -> Rank0Tensor:
"""Computes the Top-k accuracy (target is in the top k predictions)."""
input = input.topk(k=k, dim=-1)[1]
targs = targs.unsqueeze(dim=-1).expand_as(input)
return (input == targs).max(dim=-1)[0].float().mean() |
def module_sys_modules_key(key):
"""
Check if a module is in the sys.modules dictionary in some manner. If so,
return the key used in that dictionary.
:param key: our key to the module.
:returns: the key in sys.modules or None.
"""
moduleparts = key.split(".")
for partnum, part in enumerate(moduleparts):
modkey = ".".join(moduleparts[partnum:])
if modkey in sys.modules:
return modkey
return None | def function[module_sys_modules_key, parameter[key]]:
constant[
Check if a module is in the sys.modules dictionary in some manner. If so,
return the key used in that dictionary.
:param key: our key to the module.
:returns: the key in sys.modules or None.
]
variable[moduleparts] assign[=] call[name[key].split, parameter[constant[.]]]
for taget[tuple[[<ast.Name object at 0x7da18bcc91e0>, <ast.Name object at 0x7da18bcc8d30>]]] in starred[call[name[enumerate], parameter[name[moduleparts]]]] begin[:]
variable[modkey] assign[=] call[constant[.].join, parameter[call[name[moduleparts]][<ast.Slice object at 0x7da18bcc9ff0>]]]
if compare[name[modkey] in name[sys].modules] begin[:]
return[name[modkey]]
return[constant[None]] | keyword[def] identifier[module_sys_modules_key] ( identifier[key] ):
literal[string]
identifier[moduleparts] = identifier[key] . identifier[split] ( literal[string] )
keyword[for] identifier[partnum] , identifier[part] keyword[in] identifier[enumerate] ( identifier[moduleparts] ):
identifier[modkey] = literal[string] . identifier[join] ( identifier[moduleparts] [ identifier[partnum] :])
keyword[if] identifier[modkey] keyword[in] identifier[sys] . identifier[modules] :
keyword[return] identifier[modkey]
keyword[return] keyword[None] | def module_sys_modules_key(key):
"""
Check if a module is in the sys.modules dictionary in some manner. If so,
return the key used in that dictionary.
:param key: our key to the module.
:returns: the key in sys.modules or None.
"""
moduleparts = key.split('.')
for (partnum, part) in enumerate(moduleparts):
modkey = '.'.join(moduleparts[partnum:])
if modkey in sys.modules:
return modkey # depends on [control=['if'], data=['modkey']] # depends on [control=['for'], data=[]]
return None |
def count(cls, *criteria, **filters):
"""Count how many comments."""
return cls.query.filter(*criteria).filter_by(**filters).count() | def function[count, parameter[cls]]:
constant[Count how many comments.]
return[call[call[call[name[cls].query.filter, parameter[<ast.Starred object at 0x7da2054a60e0>]].filter_by, parameter[]].count, parameter[]]] | keyword[def] identifier[count] ( identifier[cls] ,* identifier[criteria] ,** identifier[filters] ):
literal[string]
keyword[return] identifier[cls] . identifier[query] . identifier[filter] (* identifier[criteria] ). identifier[filter_by] (** identifier[filters] ). identifier[count] () | def count(cls, *criteria, **filters):
"""Count how many comments."""
return cls.query.filter(*criteria).filter_by(**filters).count() |
def has_next(self):
"""
Checks for one more item than last on this page.
"""
try:
next_item = self.paginator.object_list[self.paginator.per_page]
except IndexError:
return False
return True | def function[has_next, parameter[self]]:
constant[
Checks for one more item than last on this page.
]
<ast.Try object at 0x7da18bcc8280>
return[constant[True]] | keyword[def] identifier[has_next] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[next_item] = identifier[self] . identifier[paginator] . identifier[object_list] [ identifier[self] . identifier[paginator] . identifier[per_page] ]
keyword[except] identifier[IndexError] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def has_next(self):
"""
Checks for one more item than last on this page.
"""
try:
next_item = self.paginator.object_list[self.paginator.per_page] # depends on [control=['try'], data=[]]
except IndexError:
return False # depends on [control=['except'], data=[]]
return True |
def update(self, **kwargs):
"""Updates an instance.
Args:
**kwargs : Arbitrary keyword arguments. Column names are
keywords and their new values are the values.
Examples:
>>> customer.update(email="newemail@x.com", name="new")
"""
kwargs = self._preprocess_params(kwargs)
kwargs = self.preprocess_kwargs_before_update(kwargs)
for key, value in kwargs.iteritems():
cls = type(self)
if not hasattr(cls, key) or isinstance(getattr(cls, key), property):
continue
if key not in self._no_overwrite_:
setattr(self, key, value)
if isinstance(getattr(self, key), OrderingList):
getattr(self, key).reorder()
elif isinstance(getattr(cls, key), AssociationProxyInstance):
target_name = getattr(cls, key).target_collection
target_rel = getattr(self, target_name)
if isinstance(target_rel, OrderingList):
target_rel.reorder()
try:
self.session.commit()
return self
except Exception as e:
self.session.rollback()
raise e | def function[update, parameter[self]]:
constant[Updates an instance.
Args:
**kwargs : Arbitrary keyword arguments. Column names are
keywords and their new values are the values.
Examples:
>>> customer.update(email="newemail@x.com", name="new")
]
variable[kwargs] assign[=] call[name[self]._preprocess_params, parameter[name[kwargs]]]
variable[kwargs] assign[=] call[name[self].preprocess_kwargs_before_update, parameter[name[kwargs]]]
for taget[tuple[[<ast.Name object at 0x7da1b242e800>, <ast.Name object at 0x7da1b242ece0>]]] in starred[call[name[kwargs].iteritems, parameter[]]] begin[:]
variable[cls] assign[=] call[name[type], parameter[name[self]]]
if <ast.BoolOp object at 0x7da1b242f1f0> begin[:]
continue
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self]._no_overwrite_] begin[:]
call[name[setattr], parameter[name[self], name[key], name[value]]]
if call[name[isinstance], parameter[call[name[getattr], parameter[name[self], name[key]]], name[OrderingList]]] begin[:]
call[call[name[getattr], parameter[name[self], name[key]]].reorder, parameter[]]
<ast.Try object at 0x7da1b24ad870> | keyword[def] identifier[update] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] = identifier[self] . identifier[_preprocess_params] ( identifier[kwargs] )
identifier[kwargs] = identifier[self] . identifier[preprocess_kwargs_before_update] ( identifier[kwargs] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[kwargs] . identifier[iteritems] ():
identifier[cls] = identifier[type] ( identifier[self] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[cls] , identifier[key] ) keyword[or] identifier[isinstance] ( identifier[getattr] ( identifier[cls] , identifier[key] ), identifier[property] ):
keyword[continue]
keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[_no_overwrite_] :
identifier[setattr] ( identifier[self] , identifier[key] , identifier[value] )
keyword[if] identifier[isinstance] ( identifier[getattr] ( identifier[self] , identifier[key] ), identifier[OrderingList] ):
identifier[getattr] ( identifier[self] , identifier[key] ). identifier[reorder] ()
keyword[elif] identifier[isinstance] ( identifier[getattr] ( identifier[cls] , identifier[key] ), identifier[AssociationProxyInstance] ):
identifier[target_name] = identifier[getattr] ( identifier[cls] , identifier[key] ). identifier[target_collection]
identifier[target_rel] = identifier[getattr] ( identifier[self] , identifier[target_name] )
keyword[if] identifier[isinstance] ( identifier[target_rel] , identifier[OrderingList] ):
identifier[target_rel] . identifier[reorder] ()
keyword[try] :
identifier[self] . identifier[session] . identifier[commit] ()
keyword[return] identifier[self]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[session] . identifier[rollback] ()
keyword[raise] identifier[e] | def update(self, **kwargs):
"""Updates an instance.
Args:
**kwargs : Arbitrary keyword arguments. Column names are
keywords and their new values are the values.
Examples:
>>> customer.update(email="newemail@x.com", name="new")
"""
kwargs = self._preprocess_params(kwargs)
kwargs = self.preprocess_kwargs_before_update(kwargs)
for (key, value) in kwargs.iteritems():
cls = type(self)
if not hasattr(cls, key) or isinstance(getattr(cls, key), property):
continue # depends on [control=['if'], data=[]]
if key not in self._no_overwrite_:
setattr(self, key, value) # depends on [control=['if'], data=['key']]
if isinstance(getattr(self, key), OrderingList):
getattr(self, key).reorder() # depends on [control=['if'], data=[]]
elif isinstance(getattr(cls, key), AssociationProxyInstance):
target_name = getattr(cls, key).target_collection
target_rel = getattr(self, target_name)
if isinstance(target_rel, OrderingList):
target_rel.reorder() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
try:
self.session.commit()
return self # depends on [control=['try'], data=[]]
except Exception as e:
self.session.rollback()
raise e # depends on [control=['except'], data=['e']] |
def sum(self):
"""Summary
Returns:
TYPE: Description
"""
return LazyOpResult(
grizzly_impl.aggr(
self.expr,
"+",
0,
self.weld_type
),
self.weld_type,
0
) | def function[sum, parameter[self]]:
constant[Summary
Returns:
TYPE: Description
]
return[call[name[LazyOpResult], parameter[call[name[grizzly_impl].aggr, parameter[name[self].expr, constant[+], constant[0], name[self].weld_type]], name[self].weld_type, constant[0]]]] | keyword[def] identifier[sum] ( identifier[self] ):
literal[string]
keyword[return] identifier[LazyOpResult] (
identifier[grizzly_impl] . identifier[aggr] (
identifier[self] . identifier[expr] ,
literal[string] ,
literal[int] ,
identifier[self] . identifier[weld_type]
),
identifier[self] . identifier[weld_type] ,
literal[int]
) | def sum(self):
"""Summary
Returns:
TYPE: Description
"""
return LazyOpResult(grizzly_impl.aggr(self.expr, '+', 0, self.weld_type), self.weld_type, 0) |
def run_tutorial(min_magnitude=2, shift_len=0.2, num_cores=4, min_cc=0.5):
"""Functional, tested example script for running the lag-calc tutorial."""
if num_cores > cpu_count():
num_cores = cpu_count()
client = Client('NCEDC')
t1 = UTCDateTime(2004, 9, 28)
t2 = t1 + 86400
print('Downloading catalog')
catalog = client.get_events(
starttime=t1, endtime=t2, minmagnitude=min_magnitude,
minlatitude=35.7, maxlatitude=36.1, minlongitude=-120.6,
maxlongitude=-120.2, includearrivals=True)
# We don't need all the picks, lets take the information from the
# five most used stations - note that this is done to reduce computational
# costs.
catalog = catalog_utils.filter_picks(
catalog, channels=['EHZ'], top_n_picks=5)
# There is a duplicate pick in event 3 in the catalog - this has the effect
# of reducing our detections - check it yourself.
for pick in catalog[3].picks:
if pick.waveform_id.station_code == 'PHOB' and \
pick.onset == 'emergent':
catalog[3].picks.remove(pick)
print('Generating templates')
templates = template_gen.from_client(
catalog=catalog, client_id='NCEDC', lowcut=2.0, highcut=9.0,
samp_rate=50.0, filt_order=4, length=3.0, prepick=0.15,
swin='all', process_len=3600)
# In this section we generate a series of chunks of data.
start_time = UTCDateTime(2004, 9, 28, 17)
end_time = UTCDateTime(2004, 9, 28, 20)
process_len = 3600
chunks = []
chunk_start = start_time
while chunk_start < end_time:
chunk_end = chunk_start + process_len
if chunk_end > end_time:
chunk_end = end_time
chunks.append((chunk_start, chunk_end))
chunk_start += process_len
all_detections = []
picked_catalog = Catalog()
template_names = [str(template[0].stats.starttime)
for template in templates]
for t1, t2 in chunks:
print('Downloading and processing for start-time: %s' % t1)
# Download and process the data
bulk_info = [(tr.stats.network, tr.stats.station, '*',
tr.stats.channel, t1, t2) for tr in templates[0]]
# Just downloading a chunk of data
st = client.get_waveforms_bulk(bulk_info)
st.merge(fill_value='interpolate')
st = pre_processing.shortproc(
st, lowcut=2.0, highcut=9.0, filt_order=4, samp_rate=50.0,
debug=0, num_cores=num_cores)
detections = match_filter.match_filter(
template_names=template_names, template_list=templates, st=st,
threshold=8.0, threshold_type='MAD', trig_int=6.0, plotvar=False,
plotdir='.', cores=num_cores)
# Extract unique detections from set.
unique_detections = []
for master in detections:
keep = True
for slave in detections:
if not master == slave and\
abs(master.detect_time - slave.detect_time) <= 1.0:
# If the events are within 1s of each other then test which
# was the 'best' match, strongest detection
if not master.detect_val > slave.detect_val:
keep = False
break
if keep:
unique_detections.append(master)
all_detections += unique_detections
picked_catalog += lag_calc.lag_calc(
detections=unique_detections, detect_data=st,
template_names=template_names, templates=templates,
shift_len=shift_len, min_cc=min_cc, interpolate=False, plot=False,
parallel=True, debug=3)
# Return all of this so that we can use this function for testing.
return all_detections, picked_catalog, templates, template_names | def function[run_tutorial, parameter[min_magnitude, shift_len, num_cores, min_cc]]:
constant[Functional, tested example script for running the lag-calc tutorial.]
if compare[name[num_cores] greater[>] call[name[cpu_count], parameter[]]] begin[:]
variable[num_cores] assign[=] call[name[cpu_count], parameter[]]
variable[client] assign[=] call[name[Client], parameter[constant[NCEDC]]]
variable[t1] assign[=] call[name[UTCDateTime], parameter[constant[2004], constant[9], constant[28]]]
variable[t2] assign[=] binary_operation[name[t1] + constant[86400]]
call[name[print], parameter[constant[Downloading catalog]]]
variable[catalog] assign[=] call[name[client].get_events, parameter[]]
variable[catalog] assign[=] call[name[catalog_utils].filter_picks, parameter[name[catalog]]]
for taget[name[pick]] in starred[call[name[catalog]][constant[3]].picks] begin[:]
if <ast.BoolOp object at 0x7da2054a4490> begin[:]
call[call[name[catalog]][constant[3]].picks.remove, parameter[name[pick]]]
call[name[print], parameter[constant[Generating templates]]]
variable[templates] assign[=] call[name[template_gen].from_client, parameter[]]
variable[start_time] assign[=] call[name[UTCDateTime], parameter[constant[2004], constant[9], constant[28], constant[17]]]
variable[end_time] assign[=] call[name[UTCDateTime], parameter[constant[2004], constant[9], constant[28], constant[20]]]
variable[process_len] assign[=] constant[3600]
variable[chunks] assign[=] list[[]]
variable[chunk_start] assign[=] name[start_time]
while compare[name[chunk_start] less[<] name[end_time]] begin[:]
variable[chunk_end] assign[=] binary_operation[name[chunk_start] + name[process_len]]
if compare[name[chunk_end] greater[>] name[end_time]] begin[:]
variable[chunk_end] assign[=] name[end_time]
call[name[chunks].append, parameter[tuple[[<ast.Name object at 0x7da2054a43a0>, <ast.Name object at 0x7da2054a47c0>]]]]
<ast.AugAssign object at 0x7da2054a5e70>
variable[all_detections] assign[=] list[[]]
variable[picked_catalog] assign[=] call[name[Catalog], parameter[]]
variable[template_names] assign[=] <ast.ListComp object at 0x7da2054a6fb0>
for taget[tuple[[<ast.Name object at 0x7da2054a58a0>, <ast.Name object at 0x7da2054a4bb0>]]] in starred[name[chunks]] begin[:]
call[name[print], parameter[binary_operation[constant[Downloading and processing for start-time: %s] <ast.Mod object at 0x7da2590d6920> name[t1]]]]
variable[bulk_info] assign[=] <ast.ListComp object at 0x7da2054a5180>
variable[st] assign[=] call[name[client].get_waveforms_bulk, parameter[name[bulk_info]]]
call[name[st].merge, parameter[]]
variable[st] assign[=] call[name[pre_processing].shortproc, parameter[name[st]]]
variable[detections] assign[=] call[name[match_filter].match_filter, parameter[]]
variable[unique_detections] assign[=] list[[]]
for taget[name[master]] in starred[name[detections]] begin[:]
variable[keep] assign[=] constant[True]
for taget[name[slave]] in starred[name[detections]] begin[:]
if <ast.BoolOp object at 0x7da2054a4040> begin[:]
if <ast.UnaryOp object at 0x7da2054a4190> begin[:]
variable[keep] assign[=] constant[False]
break
if name[keep] begin[:]
call[name[unique_detections].append, parameter[name[master]]]
<ast.AugAssign object at 0x7da2045672e0>
<ast.AugAssign object at 0x7da204566230>
return[tuple[[<ast.Name object at 0x7da204567340>, <ast.Name object at 0x7da2045646a0>, <ast.Name object at 0x7da204567130>, <ast.Name object at 0x7da204567a60>]]] | keyword[def] identifier[run_tutorial] ( identifier[min_magnitude] = literal[int] , identifier[shift_len] = literal[int] , identifier[num_cores] = literal[int] , identifier[min_cc] = literal[int] ):
literal[string]
keyword[if] identifier[num_cores] > identifier[cpu_count] ():
identifier[num_cores] = identifier[cpu_count] ()
identifier[client] = identifier[Client] ( literal[string] )
identifier[t1] = identifier[UTCDateTime] ( literal[int] , literal[int] , literal[int] )
identifier[t2] = identifier[t1] + literal[int]
identifier[print] ( literal[string] )
identifier[catalog] = identifier[client] . identifier[get_events] (
identifier[starttime] = identifier[t1] , identifier[endtime] = identifier[t2] , identifier[minmagnitude] = identifier[min_magnitude] ,
identifier[minlatitude] = literal[int] , identifier[maxlatitude] = literal[int] , identifier[minlongitude] =- literal[int] ,
identifier[maxlongitude] =- literal[int] , identifier[includearrivals] = keyword[True] )
identifier[catalog] = identifier[catalog_utils] . identifier[filter_picks] (
identifier[catalog] , identifier[channels] =[ literal[string] ], identifier[top_n_picks] = literal[int] )
keyword[for] identifier[pick] keyword[in] identifier[catalog] [ literal[int] ]. identifier[picks] :
keyword[if] identifier[pick] . identifier[waveform_id] . identifier[station_code] == literal[string] keyword[and] identifier[pick] . identifier[onset] == literal[string] :
identifier[catalog] [ literal[int] ]. identifier[picks] . identifier[remove] ( identifier[pick] )
identifier[print] ( literal[string] )
identifier[templates] = identifier[template_gen] . identifier[from_client] (
identifier[catalog] = identifier[catalog] , identifier[client_id] = literal[string] , identifier[lowcut] = literal[int] , identifier[highcut] = literal[int] ,
identifier[samp_rate] = literal[int] , identifier[filt_order] = literal[int] , identifier[length] = literal[int] , identifier[prepick] = literal[int] ,
identifier[swin] = literal[string] , identifier[process_len] = literal[int] )
identifier[start_time] = identifier[UTCDateTime] ( literal[int] , literal[int] , literal[int] , literal[int] )
identifier[end_time] = identifier[UTCDateTime] ( literal[int] , literal[int] , literal[int] , literal[int] )
identifier[process_len] = literal[int]
identifier[chunks] =[]
identifier[chunk_start] = identifier[start_time]
keyword[while] identifier[chunk_start] < identifier[end_time] :
identifier[chunk_end] = identifier[chunk_start] + identifier[process_len]
keyword[if] identifier[chunk_end] > identifier[end_time] :
identifier[chunk_end] = identifier[end_time]
identifier[chunks] . identifier[append] (( identifier[chunk_start] , identifier[chunk_end] ))
identifier[chunk_start] += identifier[process_len]
identifier[all_detections] =[]
identifier[picked_catalog] = identifier[Catalog] ()
identifier[template_names] =[ identifier[str] ( identifier[template] [ literal[int] ]. identifier[stats] . identifier[starttime] )
keyword[for] identifier[template] keyword[in] identifier[templates] ]
keyword[for] identifier[t1] , identifier[t2] keyword[in] identifier[chunks] :
identifier[print] ( literal[string] % identifier[t1] )
identifier[bulk_info] =[( identifier[tr] . identifier[stats] . identifier[network] , identifier[tr] . identifier[stats] . identifier[station] , literal[string] ,
identifier[tr] . identifier[stats] . identifier[channel] , identifier[t1] , identifier[t2] ) keyword[for] identifier[tr] keyword[in] identifier[templates] [ literal[int] ]]
identifier[st] = identifier[client] . identifier[get_waveforms_bulk] ( identifier[bulk_info] )
identifier[st] . identifier[merge] ( identifier[fill_value] = literal[string] )
identifier[st] = identifier[pre_processing] . identifier[shortproc] (
identifier[st] , identifier[lowcut] = literal[int] , identifier[highcut] = literal[int] , identifier[filt_order] = literal[int] , identifier[samp_rate] = literal[int] ,
identifier[debug] = literal[int] , identifier[num_cores] = identifier[num_cores] )
identifier[detections] = identifier[match_filter] . identifier[match_filter] (
identifier[template_names] = identifier[template_names] , identifier[template_list] = identifier[templates] , identifier[st] = identifier[st] ,
identifier[threshold] = literal[int] , identifier[threshold_type] = literal[string] , identifier[trig_int] = literal[int] , identifier[plotvar] = keyword[False] ,
identifier[plotdir] = literal[string] , identifier[cores] = identifier[num_cores] )
identifier[unique_detections] =[]
keyword[for] identifier[master] keyword[in] identifier[detections] :
identifier[keep] = keyword[True]
keyword[for] identifier[slave] keyword[in] identifier[detections] :
keyword[if] keyword[not] identifier[master] == identifier[slave] keyword[and] identifier[abs] ( identifier[master] . identifier[detect_time] - identifier[slave] . identifier[detect_time] )<= literal[int] :
keyword[if] keyword[not] identifier[master] . identifier[detect_val] > identifier[slave] . identifier[detect_val] :
identifier[keep] = keyword[False]
keyword[break]
keyword[if] identifier[keep] :
identifier[unique_detections] . identifier[append] ( identifier[master] )
identifier[all_detections] += identifier[unique_detections]
identifier[picked_catalog] += identifier[lag_calc] . identifier[lag_calc] (
identifier[detections] = identifier[unique_detections] , identifier[detect_data] = identifier[st] ,
identifier[template_names] = identifier[template_names] , identifier[templates] = identifier[templates] ,
identifier[shift_len] = identifier[shift_len] , identifier[min_cc] = identifier[min_cc] , identifier[interpolate] = keyword[False] , identifier[plot] = keyword[False] ,
identifier[parallel] = keyword[True] , identifier[debug] = literal[int] )
keyword[return] identifier[all_detections] , identifier[picked_catalog] , identifier[templates] , identifier[template_names] | def run_tutorial(min_magnitude=2, shift_len=0.2, num_cores=4, min_cc=0.5):
"""Functional, tested example script for running the lag-calc tutorial."""
if num_cores > cpu_count():
num_cores = cpu_count() # depends on [control=['if'], data=['num_cores']]
client = Client('NCEDC')
t1 = UTCDateTime(2004, 9, 28)
t2 = t1 + 86400
print('Downloading catalog')
catalog = client.get_events(starttime=t1, endtime=t2, minmagnitude=min_magnitude, minlatitude=35.7, maxlatitude=36.1, minlongitude=-120.6, maxlongitude=-120.2, includearrivals=True)
# We don't need all the picks, lets take the information from the
# five most used stations - note that this is done to reduce computational
# costs.
catalog = catalog_utils.filter_picks(catalog, channels=['EHZ'], top_n_picks=5)
# There is a duplicate pick in event 3 in the catalog - this has the effect
# of reducing our detections - check it yourself.
for pick in catalog[3].picks:
if pick.waveform_id.station_code == 'PHOB' and pick.onset == 'emergent':
catalog[3].picks.remove(pick) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pick']]
print('Generating templates')
templates = template_gen.from_client(catalog=catalog, client_id='NCEDC', lowcut=2.0, highcut=9.0, samp_rate=50.0, filt_order=4, length=3.0, prepick=0.15, swin='all', process_len=3600)
# In this section we generate a series of chunks of data.
start_time = UTCDateTime(2004, 9, 28, 17)
end_time = UTCDateTime(2004, 9, 28, 20)
process_len = 3600
chunks = []
chunk_start = start_time
while chunk_start < end_time:
chunk_end = chunk_start + process_len
if chunk_end > end_time:
chunk_end = end_time # depends on [control=['if'], data=['chunk_end', 'end_time']]
chunks.append((chunk_start, chunk_end))
chunk_start += process_len # depends on [control=['while'], data=['chunk_start', 'end_time']]
all_detections = []
picked_catalog = Catalog()
template_names = [str(template[0].stats.starttime) for template in templates]
for (t1, t2) in chunks:
print('Downloading and processing for start-time: %s' % t1)
# Download and process the data
bulk_info = [(tr.stats.network, tr.stats.station, '*', tr.stats.channel, t1, t2) for tr in templates[0]]
# Just downloading a chunk of data
st = client.get_waveforms_bulk(bulk_info)
st.merge(fill_value='interpolate')
st = pre_processing.shortproc(st, lowcut=2.0, highcut=9.0, filt_order=4, samp_rate=50.0, debug=0, num_cores=num_cores)
detections = match_filter.match_filter(template_names=template_names, template_list=templates, st=st, threshold=8.0, threshold_type='MAD', trig_int=6.0, plotvar=False, plotdir='.', cores=num_cores)
# Extract unique detections from set.
unique_detections = []
for master in detections:
keep = True
for slave in detections:
if not master == slave and abs(master.detect_time - slave.detect_time) <= 1.0:
# If the events are within 1s of each other then test which
# was the 'best' match, strongest detection
if not master.detect_val > slave.detect_val:
keep = False
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['slave']]
if keep:
unique_detections.append(master) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['master']]
all_detections += unique_detections
picked_catalog += lag_calc.lag_calc(detections=unique_detections, detect_data=st, template_names=template_names, templates=templates, shift_len=shift_len, min_cc=min_cc, interpolate=False, plot=False, parallel=True, debug=3) # depends on [control=['for'], data=[]]
# Return all of this so that we can use this function for testing.
return (all_detections, picked_catalog, templates, template_names) |
def adjustHeadingPointer(self):
'''Adjust the value of the heading pointer.'''
self.headingText.set_text(str(self.heading))
self.headingText.set_size(self.fontSize) | def function[adjustHeadingPointer, parameter[self]]:
constant[Adjust the value of the heading pointer.]
call[name[self].headingText.set_text, parameter[call[name[str], parameter[name[self].heading]]]]
call[name[self].headingText.set_size, parameter[name[self].fontSize]] | keyword[def] identifier[adjustHeadingPointer] ( identifier[self] ):
literal[string]
identifier[self] . identifier[headingText] . identifier[set_text] ( identifier[str] ( identifier[self] . identifier[heading] ))
identifier[self] . identifier[headingText] . identifier[set_size] ( identifier[self] . identifier[fontSize] ) | def adjustHeadingPointer(self):
"""Adjust the value of the heading pointer."""
self.headingText.set_text(str(self.heading))
self.headingText.set_size(self.fontSize) |
def realloc(self, ptr, size):
"""
A somewhat faithful implementation of libc `realloc`.
:param ptr: the location in memory to be reallocated
:param size: the new size desired for the allocation
:returns: the address of the allocation, or a NULL pointer if the allocation was freed or if no new allocation
was made
"""
raise NotImplementedError("%s not implemented for %s" % (self.realloc.__func__.__name__,
self.__class__.__name__)) | def function[realloc, parameter[self, ptr, size]]:
constant[
A somewhat faithful implementation of libc `realloc`.
:param ptr: the location in memory to be reallocated
:param size: the new size desired for the allocation
:returns: the address of the allocation, or a NULL pointer if the allocation was freed or if no new allocation
was made
]
<ast.Raise object at 0x7da1b1c32fb0> | keyword[def] identifier[realloc] ( identifier[self] , identifier[ptr] , identifier[size] ):
literal[string]
keyword[raise] identifier[NotImplementedError] ( literal[string] %( identifier[self] . identifier[realloc] . identifier[__func__] . identifier[__name__] ,
identifier[self] . identifier[__class__] . identifier[__name__] )) | def realloc(self, ptr, size):
"""
A somewhat faithful implementation of libc `realloc`.
:param ptr: the location in memory to be reallocated
:param size: the new size desired for the allocation
:returns: the address of the allocation, or a NULL pointer if the allocation was freed or if no new allocation
was made
"""
raise NotImplementedError('%s not implemented for %s' % (self.realloc.__func__.__name__, self.__class__.__name__)) |
def sign(payload, key, headers=None, algorithm=ALGORITHMS.HS256):
"""Signs a claims set and returns a JWS string.
Args:
payload (str): A string to sign
key (str or dict): The key to use for signing the claim set. Can be
individual JWK or JWK set.
headers (dict, optional): A set of headers that will be added to
the default headers. Any headers that are added as additional
headers will override the default headers.
algorithm (str, optional): The algorithm to use for signing the
the claims. Defaults to HS256.
Returns:
str: The string representation of the header, claims, and signature.
Raises:
JWSError: If there is an error signing the token.
Examples:
>>> jws.sign({'a': 'b'}, 'secret', algorithm='HS256')
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8'
"""
if algorithm not in ALGORITHMS.SUPPORTED:
raise JWSError('Algorithm %s not supported.' % algorithm)
encoded_header = _encode_header(algorithm, additional_headers=headers)
encoded_payload = _encode_payload(payload)
signed_output = _sign_header_and_claims(encoded_header, encoded_payload, algorithm, key)
return signed_output | def function[sign, parameter[payload, key, headers, algorithm]]:
constant[Signs a claims set and returns a JWS string.
Args:
payload (str): A string to sign
key (str or dict): The key to use for signing the claim set. Can be
individual JWK or JWK set.
headers (dict, optional): A set of headers that will be added to
the default headers. Any headers that are added as additional
headers will override the default headers.
algorithm (str, optional): The algorithm to use for signing the
the claims. Defaults to HS256.
Returns:
str: The string representation of the header, claims, and signature.
Raises:
JWSError: If there is an error signing the token.
Examples:
>>> jws.sign({'a': 'b'}, 'secret', algorithm='HS256')
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8'
]
if compare[name[algorithm] <ast.NotIn object at 0x7da2590d7190> name[ALGORITHMS].SUPPORTED] begin[:]
<ast.Raise object at 0x7da18c4cfd30>
variable[encoded_header] assign[=] call[name[_encode_header], parameter[name[algorithm]]]
variable[encoded_payload] assign[=] call[name[_encode_payload], parameter[name[payload]]]
variable[signed_output] assign[=] call[name[_sign_header_and_claims], parameter[name[encoded_header], name[encoded_payload], name[algorithm], name[key]]]
return[name[signed_output]] | keyword[def] identifier[sign] ( identifier[payload] , identifier[key] , identifier[headers] = keyword[None] , identifier[algorithm] = identifier[ALGORITHMS] . identifier[HS256] ):
literal[string]
keyword[if] identifier[algorithm] keyword[not] keyword[in] identifier[ALGORITHMS] . identifier[SUPPORTED] :
keyword[raise] identifier[JWSError] ( literal[string] % identifier[algorithm] )
identifier[encoded_header] = identifier[_encode_header] ( identifier[algorithm] , identifier[additional_headers] = identifier[headers] )
identifier[encoded_payload] = identifier[_encode_payload] ( identifier[payload] )
identifier[signed_output] = identifier[_sign_header_and_claims] ( identifier[encoded_header] , identifier[encoded_payload] , identifier[algorithm] , identifier[key] )
keyword[return] identifier[signed_output] | def sign(payload, key, headers=None, algorithm=ALGORITHMS.HS256):
"""Signs a claims set and returns a JWS string.
Args:
payload (str): A string to sign
key (str or dict): The key to use for signing the claim set. Can be
individual JWK or JWK set.
headers (dict, optional): A set of headers that will be added to
the default headers. Any headers that are added as additional
headers will override the default headers.
algorithm (str, optional): The algorithm to use for signing the
the claims. Defaults to HS256.
Returns:
str: The string representation of the header, claims, and signature.
Raises:
JWSError: If there is an error signing the token.
Examples:
>>> jws.sign({'a': 'b'}, 'secret', algorithm='HS256')
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8'
"""
if algorithm not in ALGORITHMS.SUPPORTED:
raise JWSError('Algorithm %s not supported.' % algorithm) # depends on [control=['if'], data=['algorithm']]
encoded_header = _encode_header(algorithm, additional_headers=headers)
encoded_payload = _encode_payload(payload)
signed_output = _sign_header_and_claims(encoded_header, encoded_payload, algorithm, key)
return signed_output |
def convert_label_indexer(index, label, index_name='', method=None,
tolerance=None):
"""Given a pandas.Index and labels (e.g., from __getitem__) for one
dimension, return an indexer suitable for indexing an ndarray along that
dimension. If `index` is a pandas.MultiIndex and depending on `label`,
return a new pandas.Index or pandas.MultiIndex (otherwise return None).
"""
new_index = None
if isinstance(label, slice):
if method is not None or tolerance is not None:
raise NotImplementedError(
'cannot use ``method`` argument if any indexers are '
'slice objects')
indexer = index.slice_indexer(_sanitize_slice_element(label.start),
_sanitize_slice_element(label.stop),
_sanitize_slice_element(label.step))
if not isinstance(indexer, slice):
# unlike pandas, in xarray we never want to silently convert a
# slice indexer into an array indexer
raise KeyError('cannot represent labeled-based slice indexer for '
'dimension %r with a slice over integer positions; '
'the index is unsorted or non-unique' % index_name)
elif is_dict_like(label):
is_nested_vals = _is_nested_tuple(tuple(label.values()))
if not isinstance(index, pd.MultiIndex):
raise ValueError('cannot use a dict-like object for selection on '
'a dimension that does not have a MultiIndex')
elif len(label) == index.nlevels and not is_nested_vals:
indexer = index.get_loc(tuple((label[k] for k in index.names)))
else:
for k, v in label.items():
# index should be an item (i.e. Hashable) not an array-like
if not isinstance(v, Hashable):
raise ValueError('Vectorized selection is not '
'available along level variable: ' + k)
indexer, new_index = index.get_loc_level(
tuple(label.values()), level=tuple(label.keys()))
# GH2619. Raise a KeyError if nothing is chosen
if indexer.dtype.kind == 'b' and indexer.sum() == 0:
raise KeyError('{} not found'.format(label))
elif isinstance(label, tuple) and isinstance(index, pd.MultiIndex):
if _is_nested_tuple(label):
indexer = index.get_locs(label)
elif len(label) == index.nlevels:
indexer = index.get_loc(label)
else:
indexer, new_index = index.get_loc_level(
label, level=list(range(len(label)))
)
else:
label = (label if getattr(label, 'ndim', 1) > 1 # vectorized-indexing
else _asarray_tuplesafe(label))
if label.ndim == 0:
if isinstance(index, pd.MultiIndex):
indexer, new_index = index.get_loc_level(label.item(), level=0)
else:
indexer = get_loc(index, label.item(), method, tolerance)
elif label.dtype.kind == 'b':
indexer = label
else:
if isinstance(index, pd.MultiIndex) and label.ndim > 1:
raise ValueError('Vectorized selection is not available along '
'MultiIndex variable: ' + index_name)
indexer = get_indexer_nd(index, label, method, tolerance)
if np.any(indexer < 0):
raise KeyError('not all values found in index %r'
% index_name)
return indexer, new_index | def function[convert_label_indexer, parameter[index, label, index_name, method, tolerance]]:
constant[Given a pandas.Index and labels (e.g., from __getitem__) for one
dimension, return an indexer suitable for indexing an ndarray along that
dimension. If `index` is a pandas.MultiIndex and depending on `label`,
return a new pandas.Index or pandas.MultiIndex (otherwise return None).
]
variable[new_index] assign[=] constant[None]
if call[name[isinstance], parameter[name[label], name[slice]]] begin[:]
if <ast.BoolOp object at 0x7da1b1c12c80> begin[:]
<ast.Raise object at 0x7da1b1c12320>
variable[indexer] assign[=] call[name[index].slice_indexer, parameter[call[name[_sanitize_slice_element], parameter[name[label].start]], call[name[_sanitize_slice_element], parameter[name[label].stop]], call[name[_sanitize_slice_element], parameter[name[label].step]]]]
if <ast.UnaryOp object at 0x7da1b1c124a0> begin[:]
<ast.Raise object at 0x7da1b1c10eb0>
return[tuple[[<ast.Name object at 0x7da20e954850>, <ast.Name object at 0x7da20e954310>]]] | keyword[def] identifier[convert_label_indexer] ( identifier[index] , identifier[label] , identifier[index_name] = literal[string] , identifier[method] = keyword[None] ,
identifier[tolerance] = keyword[None] ):
literal[string]
identifier[new_index] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[label] , identifier[slice] ):
keyword[if] identifier[method] keyword[is] keyword[not] keyword[None] keyword[or] identifier[tolerance] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[NotImplementedError] (
literal[string]
literal[string] )
identifier[indexer] = identifier[index] . identifier[slice_indexer] ( identifier[_sanitize_slice_element] ( identifier[label] . identifier[start] ),
identifier[_sanitize_slice_element] ( identifier[label] . identifier[stop] ),
identifier[_sanitize_slice_element] ( identifier[label] . identifier[step] ))
keyword[if] keyword[not] identifier[isinstance] ( identifier[indexer] , identifier[slice] ):
keyword[raise] identifier[KeyError] ( literal[string]
literal[string]
literal[string] % identifier[index_name] )
keyword[elif] identifier[is_dict_like] ( identifier[label] ):
identifier[is_nested_vals] = identifier[_is_nested_tuple] ( identifier[tuple] ( identifier[label] . identifier[values] ()))
keyword[if] keyword[not] identifier[isinstance] ( identifier[index] , identifier[pd] . identifier[MultiIndex] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[elif] identifier[len] ( identifier[label] )== identifier[index] . identifier[nlevels] keyword[and] keyword[not] identifier[is_nested_vals] :
identifier[indexer] = identifier[index] . identifier[get_loc] ( identifier[tuple] (( identifier[label] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[index] . identifier[names] )))
keyword[else] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[label] . identifier[items] ():
keyword[if] keyword[not] identifier[isinstance] ( identifier[v] , identifier[Hashable] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] + identifier[k] )
identifier[indexer] , identifier[new_index] = identifier[index] . identifier[get_loc_level] (
identifier[tuple] ( identifier[label] . identifier[values] ()), identifier[level] = identifier[tuple] ( identifier[label] . identifier[keys] ()))
keyword[if] identifier[indexer] . identifier[dtype] . identifier[kind] == literal[string] keyword[and] identifier[indexer] . identifier[sum] ()== literal[int] :
keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[label] ))
keyword[elif] identifier[isinstance] ( identifier[label] , identifier[tuple] ) keyword[and] identifier[isinstance] ( identifier[index] , identifier[pd] . identifier[MultiIndex] ):
keyword[if] identifier[_is_nested_tuple] ( identifier[label] ):
identifier[indexer] = identifier[index] . identifier[get_locs] ( identifier[label] )
keyword[elif] identifier[len] ( identifier[label] )== identifier[index] . identifier[nlevels] :
identifier[indexer] = identifier[index] . identifier[get_loc] ( identifier[label] )
keyword[else] :
identifier[indexer] , identifier[new_index] = identifier[index] . identifier[get_loc_level] (
identifier[label] , identifier[level] = identifier[list] ( identifier[range] ( identifier[len] ( identifier[label] )))
)
keyword[else] :
identifier[label] =( identifier[label] keyword[if] identifier[getattr] ( identifier[label] , literal[string] , literal[int] )> literal[int]
keyword[else] identifier[_asarray_tuplesafe] ( identifier[label] ))
keyword[if] identifier[label] . identifier[ndim] == literal[int] :
keyword[if] identifier[isinstance] ( identifier[index] , identifier[pd] . identifier[MultiIndex] ):
identifier[indexer] , identifier[new_index] = identifier[index] . identifier[get_loc_level] ( identifier[label] . identifier[item] (), identifier[level] = literal[int] )
keyword[else] :
identifier[indexer] = identifier[get_loc] ( identifier[index] , identifier[label] . identifier[item] (), identifier[method] , identifier[tolerance] )
keyword[elif] identifier[label] . identifier[dtype] . identifier[kind] == literal[string] :
identifier[indexer] = identifier[label]
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[index] , identifier[pd] . identifier[MultiIndex] ) keyword[and] identifier[label] . identifier[ndim] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] + identifier[index_name] )
identifier[indexer] = identifier[get_indexer_nd] ( identifier[index] , identifier[label] , identifier[method] , identifier[tolerance] )
keyword[if] identifier[np] . identifier[any] ( identifier[indexer] < literal[int] ):
keyword[raise] identifier[KeyError] ( literal[string]
% identifier[index_name] )
keyword[return] identifier[indexer] , identifier[new_index] | def convert_label_indexer(index, label, index_name='', method=None, tolerance=None):
"""Given a pandas.Index and labels (e.g., from __getitem__) for one
dimension, return an indexer suitable for indexing an ndarray along that
dimension. If `index` is a pandas.MultiIndex and depending on `label`,
return a new pandas.Index or pandas.MultiIndex (otherwise return None).
"""
new_index = None
if isinstance(label, slice):
if method is not None or tolerance is not None:
raise NotImplementedError('cannot use ``method`` argument if any indexers are slice objects') # depends on [control=['if'], data=[]]
indexer = index.slice_indexer(_sanitize_slice_element(label.start), _sanitize_slice_element(label.stop), _sanitize_slice_element(label.step))
if not isinstance(indexer, slice):
# unlike pandas, in xarray we never want to silently convert a
# slice indexer into an array indexer
raise KeyError('cannot represent labeled-based slice indexer for dimension %r with a slice over integer positions; the index is unsorted or non-unique' % index_name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif is_dict_like(label):
is_nested_vals = _is_nested_tuple(tuple(label.values()))
if not isinstance(index, pd.MultiIndex):
raise ValueError('cannot use a dict-like object for selection on a dimension that does not have a MultiIndex') # depends on [control=['if'], data=[]]
elif len(label) == index.nlevels and (not is_nested_vals):
indexer = index.get_loc(tuple((label[k] for k in index.names))) # depends on [control=['if'], data=[]]
else:
for (k, v) in label.items():
# index should be an item (i.e. Hashable) not an array-like
if not isinstance(v, Hashable):
raise ValueError('Vectorized selection is not available along level variable: ' + k) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
(indexer, new_index) = index.get_loc_level(tuple(label.values()), level=tuple(label.keys()))
# GH2619. Raise a KeyError if nothing is chosen
if indexer.dtype.kind == 'b' and indexer.sum() == 0:
raise KeyError('{} not found'.format(label)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(label, tuple) and isinstance(index, pd.MultiIndex):
if _is_nested_tuple(label):
indexer = index.get_locs(label) # depends on [control=['if'], data=[]]
elif len(label) == index.nlevels:
indexer = index.get_loc(label) # depends on [control=['if'], data=[]]
else:
(indexer, new_index) = index.get_loc_level(label, level=list(range(len(label)))) # depends on [control=['if'], data=[]]
else: # vectorized-indexing
label = label if getattr(label, 'ndim', 1) > 1 else _asarray_tuplesafe(label)
if label.ndim == 0:
if isinstance(index, pd.MultiIndex):
(indexer, new_index) = index.get_loc_level(label.item(), level=0) # depends on [control=['if'], data=[]]
else:
indexer = get_loc(index, label.item(), method, tolerance) # depends on [control=['if'], data=[]]
elif label.dtype.kind == 'b':
indexer = label # depends on [control=['if'], data=[]]
else:
if isinstance(index, pd.MultiIndex) and label.ndim > 1:
raise ValueError('Vectorized selection is not available along MultiIndex variable: ' + index_name) # depends on [control=['if'], data=[]]
indexer = get_indexer_nd(index, label, method, tolerance)
if np.any(indexer < 0):
raise KeyError('not all values found in index %r' % index_name) # depends on [control=['if'], data=[]]
return (indexer, new_index) |
def get_remote_node(self):
"""Return the remote node that it's executing the actual test session
:returns: tuple with server type (local, grid, ggr, selenium) and remote node name
"""
logging.getLogger("requests").setLevel(logging.WARNING)
remote_node = None
server_type = 'local'
if self.driver_wrapper.config.getboolean_optional('Server', 'enabled'):
# Request session info from grid hub
session_id = self.driver_wrapper.driver.session_id
self.logger.debug("Trying to identify remote node")
try:
# Request session info from grid hub and extract remote node
url = '{}/grid/api/testsession?session={}'.format(self.get_server_url(),
session_id)
proxy_id = requests.get(url).json()['proxyId']
remote_node = urlparse(proxy_id).hostname if urlparse(proxy_id).hostname else proxy_id
server_type = 'grid'
self.logger.debug("Test running in remote node %s", remote_node)
except (ValueError, KeyError):
try:
# Request session info from GGR and extract remote node
from toolium.selenoid import Selenoid
remote_node = Selenoid(self.driver_wrapper).get_selenoid_info()['Name']
server_type = 'ggr'
self.logger.debug("Test running in a GGR remote node %s", remote_node)
except Exception:
try:
# The remote node is a Selenoid node
url = '{}/status'.format(self.get_server_url())
requests.get(url).json()['total']
remote_node = self.driver_wrapper.config.get('Server', 'host')
server_type = 'selenoid'
self.logger.debug("Test running in a Selenoid node %s", remote_node)
except Exception:
# The remote node is not a grid node or the session has been closed
remote_node = self.driver_wrapper.config.get('Server', 'host')
server_type = 'selenium'
self.logger.debug("Test running in a Selenium node %s", remote_node)
return server_type, remote_node | def function[get_remote_node, parameter[self]]:
constant[Return the remote node that it's executing the actual test session
:returns: tuple with server type (local, grid, ggr, selenium) and remote node name
]
call[call[name[logging].getLogger, parameter[constant[requests]]].setLevel, parameter[name[logging].WARNING]]
variable[remote_node] assign[=] constant[None]
variable[server_type] assign[=] constant[local]
if call[name[self].driver_wrapper.config.getboolean_optional, parameter[constant[Server], constant[enabled]]] begin[:]
variable[session_id] assign[=] name[self].driver_wrapper.driver.session_id
call[name[self].logger.debug, parameter[constant[Trying to identify remote node]]]
<ast.Try object at 0x7da20c76f040>
return[tuple[[<ast.Name object at 0x7da1b230a320>, <ast.Name object at 0x7da1b230abc0>]]] | keyword[def] identifier[get_remote_node] ( identifier[self] ):
literal[string]
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[WARNING] )
identifier[remote_node] = keyword[None]
identifier[server_type] = literal[string]
keyword[if] identifier[self] . identifier[driver_wrapper] . identifier[config] . identifier[getboolean_optional] ( literal[string] , literal[string] ):
identifier[session_id] = identifier[self] . identifier[driver_wrapper] . identifier[driver] . identifier[session_id]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[try] :
identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[get_server_url] (),
identifier[session_id] )
identifier[proxy_id] = identifier[requests] . identifier[get] ( identifier[url] ). identifier[json] ()[ literal[string] ]
identifier[remote_node] = identifier[urlparse] ( identifier[proxy_id] ). identifier[hostname] keyword[if] identifier[urlparse] ( identifier[proxy_id] ). identifier[hostname] keyword[else] identifier[proxy_id]
identifier[server_type] = literal[string]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[remote_node] )
keyword[except] ( identifier[ValueError] , identifier[KeyError] ):
keyword[try] :
keyword[from] identifier[toolium] . identifier[selenoid] keyword[import] identifier[Selenoid]
identifier[remote_node] = identifier[Selenoid] ( identifier[self] . identifier[driver_wrapper] ). identifier[get_selenoid_info] ()[ literal[string] ]
identifier[server_type] = literal[string]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[remote_node] )
keyword[except] identifier[Exception] :
keyword[try] :
identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[get_server_url] ())
identifier[requests] . identifier[get] ( identifier[url] ). identifier[json] ()[ literal[string] ]
identifier[remote_node] = identifier[self] . identifier[driver_wrapper] . identifier[config] . identifier[get] ( literal[string] , literal[string] )
identifier[server_type] = literal[string]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[remote_node] )
keyword[except] identifier[Exception] :
identifier[remote_node] = identifier[self] . identifier[driver_wrapper] . identifier[config] . identifier[get] ( literal[string] , literal[string] )
identifier[server_type] = literal[string]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[remote_node] )
keyword[return] identifier[server_type] , identifier[remote_node] | def get_remote_node(self):
"""Return the remote node that it's executing the actual test session
:returns: tuple with server type (local, grid, ggr, selenium) and remote node name
"""
logging.getLogger('requests').setLevel(logging.WARNING)
remote_node = None
server_type = 'local'
if self.driver_wrapper.config.getboolean_optional('Server', 'enabled'):
# Request session info from grid hub
session_id = self.driver_wrapper.driver.session_id
self.logger.debug('Trying to identify remote node')
try:
# Request session info from grid hub and extract remote node
url = '{}/grid/api/testsession?session={}'.format(self.get_server_url(), session_id)
proxy_id = requests.get(url).json()['proxyId']
remote_node = urlparse(proxy_id).hostname if urlparse(proxy_id).hostname else proxy_id
server_type = 'grid'
self.logger.debug('Test running in remote node %s', remote_node) # depends on [control=['try'], data=[]]
except (ValueError, KeyError):
try:
# Request session info from GGR and extract remote node
from toolium.selenoid import Selenoid
remote_node = Selenoid(self.driver_wrapper).get_selenoid_info()['Name']
server_type = 'ggr'
self.logger.debug('Test running in a GGR remote node %s', remote_node) # depends on [control=['try'], data=[]]
except Exception:
try:
# The remote node is a Selenoid node
url = '{}/status'.format(self.get_server_url())
requests.get(url).json()['total']
remote_node = self.driver_wrapper.config.get('Server', 'host')
server_type = 'selenoid'
self.logger.debug('Test running in a Selenoid node %s', remote_node) # depends on [control=['try'], data=[]]
except Exception:
# The remote node is not a grid node or the session has been closed
remote_node = self.driver_wrapper.config.get('Server', 'host')
server_type = 'selenium'
self.logger.debug('Test running in a Selenium node %s', remote_node) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return (server_type, remote_node) |
def run(self):
"""Fetch remote code."""
link = self.content[0]
try:
r = requests.get(link)
r.raise_for_status()
self.content = [r.text]
return super(RemoteCodeBlock, self).run()
except Exception:
document = self.state.document
err = 'Unable to resolve ' + link
return [document.reporter.warning(str(err), line=self.lineno)] | def function[run, parameter[self]]:
constant[Fetch remote code.]
variable[link] assign[=] call[name[self].content][constant[0]]
<ast.Try object at 0x7da2054a7400> | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[link] = identifier[self] . identifier[content] [ literal[int] ]
keyword[try] :
identifier[r] = identifier[requests] . identifier[get] ( identifier[link] )
identifier[r] . identifier[raise_for_status] ()
identifier[self] . identifier[content] =[ identifier[r] . identifier[text] ]
keyword[return] identifier[super] ( identifier[RemoteCodeBlock] , identifier[self] ). identifier[run] ()
keyword[except] identifier[Exception] :
identifier[document] = identifier[self] . identifier[state] . identifier[document]
identifier[err] = literal[string] + identifier[link]
keyword[return] [ identifier[document] . identifier[reporter] . identifier[warning] ( identifier[str] ( identifier[err] ), identifier[line] = identifier[self] . identifier[lineno] )] | def run(self):
"""Fetch remote code."""
link = self.content[0]
try:
r = requests.get(link)
r.raise_for_status()
self.content = [r.text]
return super(RemoteCodeBlock, self).run() # depends on [control=['try'], data=[]]
except Exception:
document = self.state.document
err = 'Unable to resolve ' + link
return [document.reporter.warning(str(err), line=self.lineno)] # depends on [control=['except'], data=[]] |
def _onMenuItemSelected(self, evt):
"""Called whenever one of the specific axis menu items is selected"""
current = self._menu.IsChecked(evt.GetId())
if current:
new = False
else:
new = True
self._menu.Check(evt.GetId(), new)
# Lines above would be deleted based on svn tracker ID 2841525;
# not clear whether this matters or not.
self._toolbar.set_active(self.getActiveAxes())
evt.Skip() | def function[_onMenuItemSelected, parameter[self, evt]]:
constant[Called whenever one of the specific axis menu items is selected]
variable[current] assign[=] call[name[self]._menu.IsChecked, parameter[call[name[evt].GetId, parameter[]]]]
if name[current] begin[:]
variable[new] assign[=] constant[False]
call[name[self]._menu.Check, parameter[call[name[evt].GetId, parameter[]], name[new]]]
call[name[self]._toolbar.set_active, parameter[call[name[self].getActiveAxes, parameter[]]]]
call[name[evt].Skip, parameter[]] | keyword[def] identifier[_onMenuItemSelected] ( identifier[self] , identifier[evt] ):
literal[string]
identifier[current] = identifier[self] . identifier[_menu] . identifier[IsChecked] ( identifier[evt] . identifier[GetId] ())
keyword[if] identifier[current] :
identifier[new] = keyword[False]
keyword[else] :
identifier[new] = keyword[True]
identifier[self] . identifier[_menu] . identifier[Check] ( identifier[evt] . identifier[GetId] (), identifier[new] )
identifier[self] . identifier[_toolbar] . identifier[set_active] ( identifier[self] . identifier[getActiveAxes] ())
identifier[evt] . identifier[Skip] () | def _onMenuItemSelected(self, evt):
"""Called whenever one of the specific axis menu items is selected"""
current = self._menu.IsChecked(evt.GetId())
if current:
new = False # depends on [control=['if'], data=[]]
else:
new = True
self._menu.Check(evt.GetId(), new)
# Lines above would be deleted based on svn tracker ID 2841525;
# not clear whether this matters or not.
self._toolbar.set_active(self.getActiveAxes())
evt.Skip() |
def summarize(self):
"""Summarize game."""
if not self._achievements_summarized:
for _ in self.operations():
pass
self._summarize()
return self._summary | def function[summarize, parameter[self]]:
constant[Summarize game.]
if <ast.UnaryOp object at 0x7da1b252ee90> begin[:]
for taget[name[_]] in starred[call[name[self].operations, parameter[]]] begin[:]
pass
call[name[self]._summarize, parameter[]]
return[name[self]._summary] | keyword[def] identifier[summarize] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_achievements_summarized] :
keyword[for] identifier[_] keyword[in] identifier[self] . identifier[operations] ():
keyword[pass]
identifier[self] . identifier[_summarize] ()
keyword[return] identifier[self] . identifier[_summary] | def summarize(self):
"""Summarize game."""
if not self._achievements_summarized:
for _ in self.operations():
pass # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
self._summarize()
return self._summary |
def _tls_decrypt(alg, c):
"""
Provided with a TLSCiphertext instance c, and a stream or block cipher alg,
the function decrypts c.data and returns a newly created TLSPlaintext.
"""
p = TLSPlaintext()
p.type = c.type
p.version = c.version
p.data = alg.decrypt(c.data)
p.len = len(p.data)
return p | def function[_tls_decrypt, parameter[alg, c]]:
constant[
Provided with a TLSCiphertext instance c, and a stream or block cipher alg,
the function decrypts c.data and returns a newly created TLSPlaintext.
]
variable[p] assign[=] call[name[TLSPlaintext], parameter[]]
name[p].type assign[=] name[c].type
name[p].version assign[=] name[c].version
name[p].data assign[=] call[name[alg].decrypt, parameter[name[c].data]]
name[p].len assign[=] call[name[len], parameter[name[p].data]]
return[name[p]] | keyword[def] identifier[_tls_decrypt] ( identifier[alg] , identifier[c] ):
literal[string]
identifier[p] = identifier[TLSPlaintext] ()
identifier[p] . identifier[type] = identifier[c] . identifier[type]
identifier[p] . identifier[version] = identifier[c] . identifier[version]
identifier[p] . identifier[data] = identifier[alg] . identifier[decrypt] ( identifier[c] . identifier[data] )
identifier[p] . identifier[len] = identifier[len] ( identifier[p] . identifier[data] )
keyword[return] identifier[p] | def _tls_decrypt(alg, c):
"""
Provided with a TLSCiphertext instance c, and a stream or block cipher alg,
the function decrypts c.data and returns a newly created TLSPlaintext.
"""
p = TLSPlaintext()
p.type = c.type
p.version = c.version
p.data = alg.decrypt(c.data)
p.len = len(p.data)
return p |
def _get_raw_xsrf_token(self) -> Tuple[Optional[int], bytes, float]:
"""Read or generate the xsrf token in its raw form.
The raw_xsrf_token is a tuple containing:
* version: the version of the cookie from which this token was read,
or None if we generated a new token in this request.
* token: the raw token data; random (non-ascii) bytes.
* timestamp: the time this token was generated (will not be accurate
for version 1 cookies)
"""
if not hasattr(self, "_raw_xsrf_token"):
cookie = self.get_cookie("_xsrf")
if cookie:
version, token, timestamp = self._decode_xsrf_token(cookie)
else:
version, token, timestamp = None, None, None
if token is None:
version = None
token = os.urandom(16)
timestamp = time.time()
assert token is not None
assert timestamp is not None
self._raw_xsrf_token = (version, token, timestamp)
return self._raw_xsrf_token | def function[_get_raw_xsrf_token, parameter[self]]:
constant[Read or generate the xsrf token in its raw form.
The raw_xsrf_token is a tuple containing:
* version: the version of the cookie from which this token was read,
or None if we generated a new token in this request.
* token: the raw token data; random (non-ascii) bytes.
* timestamp: the time this token was generated (will not be accurate
for version 1 cookies)
]
if <ast.UnaryOp object at 0x7da1b20cad70> begin[:]
variable[cookie] assign[=] call[name[self].get_cookie, parameter[constant[_xsrf]]]
if name[cookie] begin[:]
<ast.Tuple object at 0x7da1b20c8640> assign[=] call[name[self]._decode_xsrf_token, parameter[name[cookie]]]
if compare[name[token] is constant[None]] begin[:]
variable[version] assign[=] constant[None]
variable[token] assign[=] call[name[os].urandom, parameter[constant[16]]]
variable[timestamp] assign[=] call[name[time].time, parameter[]]
assert[compare[name[token] is_not constant[None]]]
assert[compare[name[timestamp] is_not constant[None]]]
name[self]._raw_xsrf_token assign[=] tuple[[<ast.Name object at 0x7da1b20cbca0>, <ast.Name object at 0x7da1b20cb7f0>, <ast.Name object at 0x7da1b20c8850>]]
return[name[self]._raw_xsrf_token] | keyword[def] identifier[_get_raw_xsrf_token] ( identifier[self] )-> identifier[Tuple] [ identifier[Optional] [ identifier[int] ], identifier[bytes] , identifier[float] ]:
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[cookie] = identifier[self] . identifier[get_cookie] ( literal[string] )
keyword[if] identifier[cookie] :
identifier[version] , identifier[token] , identifier[timestamp] = identifier[self] . identifier[_decode_xsrf_token] ( identifier[cookie] )
keyword[else] :
identifier[version] , identifier[token] , identifier[timestamp] = keyword[None] , keyword[None] , keyword[None]
keyword[if] identifier[token] keyword[is] keyword[None] :
identifier[version] = keyword[None]
identifier[token] = identifier[os] . identifier[urandom] ( literal[int] )
identifier[timestamp] = identifier[time] . identifier[time] ()
keyword[assert] identifier[token] keyword[is] keyword[not] keyword[None]
keyword[assert] identifier[timestamp] keyword[is] keyword[not] keyword[None]
identifier[self] . identifier[_raw_xsrf_token] =( identifier[version] , identifier[token] , identifier[timestamp] )
keyword[return] identifier[self] . identifier[_raw_xsrf_token] | def _get_raw_xsrf_token(self) -> Tuple[Optional[int], bytes, float]:
"""Read or generate the xsrf token in its raw form.
The raw_xsrf_token is a tuple containing:
* version: the version of the cookie from which this token was read,
or None if we generated a new token in this request.
* token: the raw token data; random (non-ascii) bytes.
* timestamp: the time this token was generated (will not be accurate
for version 1 cookies)
"""
if not hasattr(self, '_raw_xsrf_token'):
cookie = self.get_cookie('_xsrf')
if cookie:
(version, token, timestamp) = self._decode_xsrf_token(cookie) # depends on [control=['if'], data=[]]
else:
(version, token, timestamp) = (None, None, None)
if token is None:
version = None
token = os.urandom(16)
timestamp = time.time() # depends on [control=['if'], data=['token']]
assert token is not None
assert timestamp is not None
self._raw_xsrf_token = (version, token, timestamp) # depends on [control=['if'], data=[]]
return self._raw_xsrf_token |
def build_expression_values(dynamizer, expr_values, kwargs):
""" Build ExpresionAttributeValues from a value or kwargs """
if expr_values:
values = expr_values
return dynamizer.encode_keys(values)
elif kwargs:
values = dict(((':' + k, v) for k, v in six.iteritems(kwargs)))
return dynamizer.encode_keys(values) | def function[build_expression_values, parameter[dynamizer, expr_values, kwargs]]:
constant[ Build ExpresionAttributeValues from a value or kwargs ]
if name[expr_values] begin[:]
variable[values] assign[=] name[expr_values]
return[call[name[dynamizer].encode_keys, parameter[name[values]]]] | keyword[def] identifier[build_expression_values] ( identifier[dynamizer] , identifier[expr_values] , identifier[kwargs] ):
literal[string]
keyword[if] identifier[expr_values] :
identifier[values] = identifier[expr_values]
keyword[return] identifier[dynamizer] . identifier[encode_keys] ( identifier[values] )
keyword[elif] identifier[kwargs] :
identifier[values] = identifier[dict] ((( literal[string] + identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[kwargs] )))
keyword[return] identifier[dynamizer] . identifier[encode_keys] ( identifier[values] ) | def build_expression_values(dynamizer, expr_values, kwargs):
""" Build ExpresionAttributeValues from a value or kwargs """
if expr_values:
values = expr_values
return dynamizer.encode_keys(values) # depends on [control=['if'], data=[]]
elif kwargs:
values = dict(((':' + k, v) for (k, v) in six.iteritems(kwargs)))
return dynamizer.encode_keys(values) # depends on [control=['if'], data=[]] |
def WriteClientSnapshot(self, snapshot, cursor=None):
"""Write new client snapshot."""
insert_history_query = (
"INSERT INTO client_snapshot_history(client_id, timestamp, "
"client_snapshot) VALUES (%s, FROM_UNIXTIME(%s), %s)")
insert_startup_query = (
"INSERT INTO client_startup_history(client_id, timestamp, "
"startup_info) VALUES(%s, FROM_UNIXTIME(%s), %s)")
now = rdfvalue.RDFDatetime.Now()
client_platform = snapshot.knowledge_base.os
current_timestamp = mysql_utils.RDFDatetimeToTimestamp(now)
client_info = {
"last_snapshot_timestamp": current_timestamp,
"last_startup_timestamp": current_timestamp,
"last_version_string": snapshot.GetGRRVersionString(),
"last_platform_release": snapshot.Uname(),
}
update_clauses = [
"last_snapshot_timestamp = FROM_UNIXTIME(%(last_snapshot_timestamp)s)",
"last_startup_timestamp = FROM_UNIXTIME(%(last_startup_timestamp)s)",
"last_version_string = %(last_version_string)s",
"last_platform_release = %(last_platform_release)s",
]
if client_platform:
client_info["last_platform"] = client_platform
update_clauses.append("last_platform = %(last_platform)s")
update_query = (
"UPDATE clients SET {} WHERE client_id = %(client_id)s".format(
", ".join(update_clauses)))
int_client_id = db_utils.ClientIDToInt(snapshot.client_id)
client_info["client_id"] = int_client_id
startup_info = snapshot.startup_info
snapshot.startup_info = None
try:
cursor.execute(
insert_history_query,
(int_client_id, current_timestamp, snapshot.SerializeToString()))
cursor.execute(
insert_startup_query,
(int_client_id, current_timestamp, startup_info.SerializeToString()))
cursor.execute(update_query, client_info)
except MySQLdb.IntegrityError as e:
raise db.UnknownClientError(snapshot.client_id, cause=e)
finally:
snapshot.startup_info = startup_info | def function[WriteClientSnapshot, parameter[self, snapshot, cursor]]:
constant[Write new client snapshot.]
variable[insert_history_query] assign[=] constant[INSERT INTO client_snapshot_history(client_id, timestamp, client_snapshot) VALUES (%s, FROM_UNIXTIME(%s), %s)]
variable[insert_startup_query] assign[=] constant[INSERT INTO client_startup_history(client_id, timestamp, startup_info) VALUES(%s, FROM_UNIXTIME(%s), %s)]
variable[now] assign[=] call[name[rdfvalue].RDFDatetime.Now, parameter[]]
variable[client_platform] assign[=] name[snapshot].knowledge_base.os
variable[current_timestamp] assign[=] call[name[mysql_utils].RDFDatetimeToTimestamp, parameter[name[now]]]
variable[client_info] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c3e530>, <ast.Constant object at 0x7da1b1c3cc10>, <ast.Constant object at 0x7da1b1c3f9d0>, <ast.Constant object at 0x7da1b1c3feb0>], [<ast.Name object at 0x7da1b1c3e830>, <ast.Name object at 0x7da1b1c3d1e0>, <ast.Call object at 0x7da1b1c3e440>, <ast.Call object at 0x7da1b1c3c130>]]
variable[update_clauses] assign[=] list[[<ast.Constant object at 0x7da1b1c3efb0>, <ast.Constant object at 0x7da1b1c3c730>, <ast.Constant object at 0x7da1b1c3dcc0>, <ast.Constant object at 0x7da1b1c3fdf0>]]
if name[client_platform] begin[:]
call[name[client_info]][constant[last_platform]] assign[=] name[client_platform]
call[name[update_clauses].append, parameter[constant[last_platform = %(last_platform)s]]]
variable[update_query] assign[=] call[constant[UPDATE clients SET {} WHERE client_id = %(client_id)s].format, parameter[call[constant[, ].join, parameter[name[update_clauses]]]]]
variable[int_client_id] assign[=] call[name[db_utils].ClientIDToInt, parameter[name[snapshot].client_id]]
call[name[client_info]][constant[client_id]] assign[=] name[int_client_id]
variable[startup_info] assign[=] name[snapshot].startup_info
name[snapshot].startup_info assign[=] constant[None]
<ast.Try object at 0x7da1b1c1b430> | keyword[def] identifier[WriteClientSnapshot] ( identifier[self] , identifier[snapshot] , identifier[cursor] = keyword[None] ):
literal[string]
identifier[insert_history_query] =(
literal[string]
literal[string] )
identifier[insert_startup_query] =(
literal[string]
literal[string] )
identifier[now] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] ()
identifier[client_platform] = identifier[snapshot] . identifier[knowledge_base] . identifier[os]
identifier[current_timestamp] = identifier[mysql_utils] . identifier[RDFDatetimeToTimestamp] ( identifier[now] )
identifier[client_info] ={
literal[string] : identifier[current_timestamp] ,
literal[string] : identifier[current_timestamp] ,
literal[string] : identifier[snapshot] . identifier[GetGRRVersionString] (),
literal[string] : identifier[snapshot] . identifier[Uname] (),
}
identifier[update_clauses] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
keyword[if] identifier[client_platform] :
identifier[client_info] [ literal[string] ]= identifier[client_platform]
identifier[update_clauses] . identifier[append] ( literal[string] )
identifier[update_query] =(
literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[update_clauses] )))
identifier[int_client_id] = identifier[db_utils] . identifier[ClientIDToInt] ( identifier[snapshot] . identifier[client_id] )
identifier[client_info] [ literal[string] ]= identifier[int_client_id]
identifier[startup_info] = identifier[snapshot] . identifier[startup_info]
identifier[snapshot] . identifier[startup_info] = keyword[None]
keyword[try] :
identifier[cursor] . identifier[execute] (
identifier[insert_history_query] ,
( identifier[int_client_id] , identifier[current_timestamp] , identifier[snapshot] . identifier[SerializeToString] ()))
identifier[cursor] . identifier[execute] (
identifier[insert_startup_query] ,
( identifier[int_client_id] , identifier[current_timestamp] , identifier[startup_info] . identifier[SerializeToString] ()))
identifier[cursor] . identifier[execute] ( identifier[update_query] , identifier[client_info] )
keyword[except] identifier[MySQLdb] . identifier[IntegrityError] keyword[as] identifier[e] :
keyword[raise] identifier[db] . identifier[UnknownClientError] ( identifier[snapshot] . identifier[client_id] , identifier[cause] = identifier[e] )
keyword[finally] :
identifier[snapshot] . identifier[startup_info] = identifier[startup_info] | def WriteClientSnapshot(self, snapshot, cursor=None):
"""Write new client snapshot."""
insert_history_query = 'INSERT INTO client_snapshot_history(client_id, timestamp, client_snapshot) VALUES (%s, FROM_UNIXTIME(%s), %s)'
insert_startup_query = 'INSERT INTO client_startup_history(client_id, timestamp, startup_info) VALUES(%s, FROM_UNIXTIME(%s), %s)'
now = rdfvalue.RDFDatetime.Now()
client_platform = snapshot.knowledge_base.os
current_timestamp = mysql_utils.RDFDatetimeToTimestamp(now)
client_info = {'last_snapshot_timestamp': current_timestamp, 'last_startup_timestamp': current_timestamp, 'last_version_string': snapshot.GetGRRVersionString(), 'last_platform_release': snapshot.Uname()}
update_clauses = ['last_snapshot_timestamp = FROM_UNIXTIME(%(last_snapshot_timestamp)s)', 'last_startup_timestamp = FROM_UNIXTIME(%(last_startup_timestamp)s)', 'last_version_string = %(last_version_string)s', 'last_platform_release = %(last_platform_release)s']
if client_platform:
client_info['last_platform'] = client_platform
update_clauses.append('last_platform = %(last_platform)s') # depends on [control=['if'], data=[]]
update_query = 'UPDATE clients SET {} WHERE client_id = %(client_id)s'.format(', '.join(update_clauses))
int_client_id = db_utils.ClientIDToInt(snapshot.client_id)
client_info['client_id'] = int_client_id
startup_info = snapshot.startup_info
snapshot.startup_info = None
try:
cursor.execute(insert_history_query, (int_client_id, current_timestamp, snapshot.SerializeToString()))
cursor.execute(insert_startup_query, (int_client_id, current_timestamp, startup_info.SerializeToString()))
cursor.execute(update_query, client_info) # depends on [control=['try'], data=[]]
except MySQLdb.IntegrityError as e:
raise db.UnknownClientError(snapshot.client_id, cause=e) # depends on [control=['except'], data=['e']]
finally:
snapshot.startup_info = startup_info |
def _get_bottomMargin(self):
"""
This must return an int or float.
If the glyph has no outlines, this must return `None`.
Subclasses may override this method.
"""
bounds = self.bounds
if bounds is None:
return None
xMin, yMin, xMax, yMax = bounds
return yMin | def function[_get_bottomMargin, parameter[self]]:
constant[
This must return an int or float.
If the glyph has no outlines, this must return `None`.
Subclasses may override this method.
]
variable[bounds] assign[=] name[self].bounds
if compare[name[bounds] is constant[None]] begin[:]
return[constant[None]]
<ast.Tuple object at 0x7da20c76de40> assign[=] name[bounds]
return[name[yMin]] | keyword[def] identifier[_get_bottomMargin] ( identifier[self] ):
literal[string]
identifier[bounds] = identifier[self] . identifier[bounds]
keyword[if] identifier[bounds] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[xMin] , identifier[yMin] , identifier[xMax] , identifier[yMax] = identifier[bounds]
keyword[return] identifier[yMin] | def _get_bottomMargin(self):
"""
This must return an int or float.
If the glyph has no outlines, this must return `None`.
Subclasses may override this method.
"""
bounds = self.bounds
if bounds is None:
return None # depends on [control=['if'], data=[]]
(xMin, yMin, xMax, yMax) = bounds
return yMin |
def gradient(self, style=LINEAR, w=1.0, h=1.0, name=""):
"""Creates a gradient layer.
Creates a gradient layer, that is usually used
together with the mask() function.
All the image functions work on gradients,
so they can easily be flipped, rotated, scaled, inverted,
made brighter or darker, ...
Styles for gradients are LINEAR, RADIAL and DIAMOND.
"""
from types import FloatType
w0 = self.w
h0 = self.h
if type(w) == FloatType: w *= w0
if type(h) == FloatType: h *= h0
img = Image.new("L", (int(w),int(h)), 255)
draw = ImageDraw.Draw(img)
if style == LINEAR:
for i in range(int(w)):
k = 255.0 * i/w
draw.rectangle((i, 0, i, h), fill=int(k))
if style == RADIAL:
r = min(w,h)/2
for i in range(int(r)):
k = 255 - 255.0 * i/r
draw.ellipse((w/2-r+i, h/2-r+i, w/2+r-i, h/2+r-i), fill=int(k))
if style == DIAMOND:
r = max(w,h)
for i in range(int(r)):
x = int(i*w/r*0.5)
y = int(i*h/r*0.5)
k = 255.0 * i/r
draw.rectangle((x, y, w-x, h-y), outline=int(k))
img = img.convert("RGBA")
self.layer(img, 0, 0, name="") | def function[gradient, parameter[self, style, w, h, name]]:
constant[Creates a gradient layer.
Creates a gradient layer, that is usually used
together with the mask() function.
All the image functions work on gradients,
so they can easily be flipped, rotated, scaled, inverted,
made brighter or darker, ...
Styles for gradients are LINEAR, RADIAL and DIAMOND.
]
from relative_module[types] import module[FloatType]
variable[w0] assign[=] name[self].w
variable[h0] assign[=] name[self].h
if compare[call[name[type], parameter[name[w]]] equal[==] name[FloatType]] begin[:]
<ast.AugAssign object at 0x7da1b00056f0>
if compare[call[name[type], parameter[name[h]]] equal[==] name[FloatType]] begin[:]
<ast.AugAssign object at 0x7da1b0004ca0>
variable[img] assign[=] call[name[Image].new, parameter[constant[L], tuple[[<ast.Call object at 0x7da1b0004880>, <ast.Call object at 0x7da1b0004250>]], constant[255]]]
variable[draw] assign[=] call[name[ImageDraw].Draw, parameter[name[img]]]
if compare[name[style] equal[==] name[LINEAR]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[call[name[int], parameter[name[w]]]]]] begin[:]
variable[k] assign[=] binary_operation[binary_operation[constant[255.0] * name[i]] / name[w]]
call[name[draw].rectangle, parameter[tuple[[<ast.Name object at 0x7da2041db730>, <ast.Constant object at 0x7da2041dada0>, <ast.Name object at 0x7da2041d9ff0>, <ast.Name object at 0x7da2041d9db0>]]]]
if compare[name[style] equal[==] name[RADIAL]] begin[:]
variable[r] assign[=] binary_operation[call[name[min], parameter[name[w], name[h]]] / constant[2]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[int], parameter[name[r]]]]]] begin[:]
variable[k] assign[=] binary_operation[constant[255] - binary_operation[binary_operation[constant[255.0] * name[i]] / name[r]]]
call[name[draw].ellipse, parameter[tuple[[<ast.BinOp object at 0x7da2041d9480>, <ast.BinOp object at 0x7da2041d9690>, <ast.BinOp object at 0x7da2041da320>, <ast.BinOp object at 0x7da2041d9030>]]]]
if compare[name[style] equal[==] name[DIAMOND]] begin[:]
variable[r] assign[=] call[name[max], parameter[name[w], name[h]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[int], parameter[name[r]]]]]] begin[:]
variable[x] assign[=] call[name[int], parameter[binary_operation[binary_operation[binary_operation[name[i] * name[w]] / name[r]] * constant[0.5]]]]
variable[y] assign[=] call[name[int], parameter[binary_operation[binary_operation[binary_operation[name[i] * name[h]] / name[r]] * constant[0.5]]]]
variable[k] assign[=] binary_operation[binary_operation[constant[255.0] * name[i]] / name[r]]
call[name[draw].rectangle, parameter[tuple[[<ast.Name object at 0x7da2041d8040>, <ast.Name object at 0x7da2041d9bd0>, <ast.BinOp object at 0x7da2041d8dc0>, <ast.BinOp object at 0x7da2041d93c0>]]]]
variable[img] assign[=] call[name[img].convert, parameter[constant[RGBA]]]
call[name[self].layer, parameter[name[img], constant[0], constant[0]]] | keyword[def] identifier[gradient] ( identifier[self] , identifier[style] = identifier[LINEAR] , identifier[w] = literal[int] , identifier[h] = literal[int] , identifier[name] = literal[string] ):
literal[string]
keyword[from] identifier[types] keyword[import] identifier[FloatType]
identifier[w0] = identifier[self] . identifier[w]
identifier[h0] = identifier[self] . identifier[h]
keyword[if] identifier[type] ( identifier[w] )== identifier[FloatType] : identifier[w] *= identifier[w0]
keyword[if] identifier[type] ( identifier[h] )== identifier[FloatType] : identifier[h] *= identifier[h0]
identifier[img] = identifier[Image] . identifier[new] ( literal[string] ,( identifier[int] ( identifier[w] ), identifier[int] ( identifier[h] )), literal[int] )
identifier[draw] = identifier[ImageDraw] . identifier[Draw] ( identifier[img] )
keyword[if] identifier[style] == identifier[LINEAR] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[int] ( identifier[w] )):
identifier[k] = literal[int] * identifier[i] / identifier[w]
identifier[draw] . identifier[rectangle] (( identifier[i] , literal[int] , identifier[i] , identifier[h] ), identifier[fill] = identifier[int] ( identifier[k] ))
keyword[if] identifier[style] == identifier[RADIAL] :
identifier[r] = identifier[min] ( identifier[w] , identifier[h] )/ literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[int] ( identifier[r] )):
identifier[k] = literal[int] - literal[int] * identifier[i] / identifier[r]
identifier[draw] . identifier[ellipse] (( identifier[w] / literal[int] - identifier[r] + identifier[i] , identifier[h] / literal[int] - identifier[r] + identifier[i] , identifier[w] / literal[int] + identifier[r] - identifier[i] , identifier[h] / literal[int] + identifier[r] - identifier[i] ), identifier[fill] = identifier[int] ( identifier[k] ))
keyword[if] identifier[style] == identifier[DIAMOND] :
identifier[r] = identifier[max] ( identifier[w] , identifier[h] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[int] ( identifier[r] )):
identifier[x] = identifier[int] ( identifier[i] * identifier[w] / identifier[r] * literal[int] )
identifier[y] = identifier[int] ( identifier[i] * identifier[h] / identifier[r] * literal[int] )
identifier[k] = literal[int] * identifier[i] / identifier[r]
identifier[draw] . identifier[rectangle] (( identifier[x] , identifier[y] , identifier[w] - identifier[x] , identifier[h] - identifier[y] ), identifier[outline] = identifier[int] ( identifier[k] ))
identifier[img] = identifier[img] . identifier[convert] ( literal[string] )
identifier[self] . identifier[layer] ( identifier[img] , literal[int] , literal[int] , identifier[name] = literal[string] ) | def gradient(self, style=LINEAR, w=1.0, h=1.0, name=''):
"""Creates a gradient layer.
Creates a gradient layer, that is usually used
together with the mask() function.
All the image functions work on gradients,
so they can easily be flipped, rotated, scaled, inverted,
made brighter or darker, ...
Styles for gradients are LINEAR, RADIAL and DIAMOND.
"""
from types import FloatType
w0 = self.w
h0 = self.h
if type(w) == FloatType:
w *= w0 # depends on [control=['if'], data=[]]
if type(h) == FloatType:
h *= h0 # depends on [control=['if'], data=[]]
img = Image.new('L', (int(w), int(h)), 255)
draw = ImageDraw.Draw(img)
if style == LINEAR:
for i in range(int(w)):
k = 255.0 * i / w
draw.rectangle((i, 0, i, h), fill=int(k)) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if style == RADIAL:
r = min(w, h) / 2
for i in range(int(r)):
k = 255 - 255.0 * i / r
draw.ellipse((w / 2 - r + i, h / 2 - r + i, w / 2 + r - i, h / 2 + r - i), fill=int(k)) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if style == DIAMOND:
r = max(w, h)
for i in range(int(r)):
x = int(i * w / r * 0.5)
y = int(i * h / r * 0.5)
k = 255.0 * i / r
draw.rectangle((x, y, w - x, h - y), outline=int(k)) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
img = img.convert('RGBA')
self.layer(img, 0, 0, name='') |
def trackSeek(path, artist, album, track, trackNum, fmt):
"""Actually runs the program"""
hiddenName = "(Hidden Track).{}".format(fmt)
trackName = track + ".{}".format(fmt)
songIn = instantiateSong(path)
times = findGap(songIn)
saveFiles(trackName, hiddenName, splitSong(songIn, times[0], times[1]), artist, album, trackNum)
# return [path, track.rsplit('/',1)[0] +'/{}'.format(hiddenName)]
return [trackName, hiddenName] | def function[trackSeek, parameter[path, artist, album, track, trackNum, fmt]]:
constant[Actually runs the program]
variable[hiddenName] assign[=] call[constant[(Hidden Track).{}].format, parameter[name[fmt]]]
variable[trackName] assign[=] binary_operation[name[track] + call[constant[.{}].format, parameter[name[fmt]]]]
variable[songIn] assign[=] call[name[instantiateSong], parameter[name[path]]]
variable[times] assign[=] call[name[findGap], parameter[name[songIn]]]
call[name[saveFiles], parameter[name[trackName], name[hiddenName], call[name[splitSong], parameter[name[songIn], call[name[times]][constant[0]], call[name[times]][constant[1]]]], name[artist], name[album], name[trackNum]]]
return[list[[<ast.Name object at 0x7da20c795360>, <ast.Name object at 0x7da20c795180>]]] | keyword[def] identifier[trackSeek] ( identifier[path] , identifier[artist] , identifier[album] , identifier[track] , identifier[trackNum] , identifier[fmt] ):
literal[string]
identifier[hiddenName] = literal[string] . identifier[format] ( identifier[fmt] )
identifier[trackName] = identifier[track] + literal[string] . identifier[format] ( identifier[fmt] )
identifier[songIn] = identifier[instantiateSong] ( identifier[path] )
identifier[times] = identifier[findGap] ( identifier[songIn] )
identifier[saveFiles] ( identifier[trackName] , identifier[hiddenName] , identifier[splitSong] ( identifier[songIn] , identifier[times] [ literal[int] ], identifier[times] [ literal[int] ]), identifier[artist] , identifier[album] , identifier[trackNum] )
keyword[return] [ identifier[trackName] , identifier[hiddenName] ] | def trackSeek(path, artist, album, track, trackNum, fmt):
"""Actually runs the program"""
hiddenName = '(Hidden Track).{}'.format(fmt)
trackName = track + '.{}'.format(fmt)
songIn = instantiateSong(path)
times = findGap(songIn)
saveFiles(trackName, hiddenName, splitSong(songIn, times[0], times[1]), artist, album, trackNum)
# return [path, track.rsplit('/',1)[0] +'/{}'.format(hiddenName)]
return [trackName, hiddenName] |
def sample_batch_transitions(self, batch_size, forward_steps=1):
""" Return indexes of next sample"""
batches = [
self._sample_batch_prioritized(
segment_tree, batch_size, self.deque.frame_history, forward_steps=forward_steps
)
for segment_tree in self.segment_trees
]
probs, idxs, tree_idxs = zip(*batches)
return np.stack(probs, axis=1).astype(float), np.stack(idxs, axis=1), np.stack(tree_idxs, axis=1) | def function[sample_batch_transitions, parameter[self, batch_size, forward_steps]]:
constant[ Return indexes of next sample]
variable[batches] assign[=] <ast.ListComp object at 0x7da1b17f97e0>
<ast.Tuple object at 0x7da1b26acc70> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b26aea10>]]
return[tuple[[<ast.Call object at 0x7da1b26adba0>, <ast.Call object at 0x7da1b26afc70>, <ast.Call object at 0x7da1b26ad540>]]] | keyword[def] identifier[sample_batch_transitions] ( identifier[self] , identifier[batch_size] , identifier[forward_steps] = literal[int] ):
literal[string]
identifier[batches] =[
identifier[self] . identifier[_sample_batch_prioritized] (
identifier[segment_tree] , identifier[batch_size] , identifier[self] . identifier[deque] . identifier[frame_history] , identifier[forward_steps] = identifier[forward_steps]
)
keyword[for] identifier[segment_tree] keyword[in] identifier[self] . identifier[segment_trees]
]
identifier[probs] , identifier[idxs] , identifier[tree_idxs] = identifier[zip] (* identifier[batches] )
keyword[return] identifier[np] . identifier[stack] ( identifier[probs] , identifier[axis] = literal[int] ). identifier[astype] ( identifier[float] ), identifier[np] . identifier[stack] ( identifier[idxs] , identifier[axis] = literal[int] ), identifier[np] . identifier[stack] ( identifier[tree_idxs] , identifier[axis] = literal[int] ) | def sample_batch_transitions(self, batch_size, forward_steps=1):
""" Return indexes of next sample"""
batches = [self._sample_batch_prioritized(segment_tree, batch_size, self.deque.frame_history, forward_steps=forward_steps) for segment_tree in self.segment_trees]
(probs, idxs, tree_idxs) = zip(*batches)
return (np.stack(probs, axis=1).astype(float), np.stack(idxs, axis=1), np.stack(tree_idxs, axis=1)) |
def get_text(self, index=None):
"""
Gets text from a given index. If index=None, returns the current value
self.get_text(self.get_value())
"""
if index==None:
return self.get_text(self.get_value())
else:
return str(self._widget.itemText(index)) | def function[get_text, parameter[self, index]]:
constant[
Gets text from a given index. If index=None, returns the current value
self.get_text(self.get_value())
]
if compare[name[index] equal[==] constant[None]] begin[:]
return[call[name[self].get_text, parameter[call[name[self].get_value, parameter[]]]]] | keyword[def] identifier[get_text] ( identifier[self] , identifier[index] = keyword[None] ):
literal[string]
keyword[if] identifier[index] == keyword[None] :
keyword[return] identifier[self] . identifier[get_text] ( identifier[self] . identifier[get_value] ())
keyword[else] :
keyword[return] identifier[str] ( identifier[self] . identifier[_widget] . identifier[itemText] ( identifier[index] )) | def get_text(self, index=None):
"""
Gets text from a given index. If index=None, returns the current value
self.get_text(self.get_value())
"""
if index == None:
return self.get_text(self.get_value()) # depends on [control=['if'], data=[]]
else:
return str(self._widget.itemText(index)) |
def recursive_glob_with_tree(new_base, old_base, treeroot, pattern):
'''generate a list of tuples(new_base, list(paths to put there)
where the files are found inside of old_base/treeroot.
'''
results = []
old_cwd = os.getcwd()
os.chdir(old_base)
for rel_base, dirs, files in os.walk(treeroot):
goodfiles = fnmatch.filter(files, pattern)
one_dir_results = []
for f in goodfiles:
one_dir_results.append(os.path.join(old_base, rel_base, f))
results.append((os.path.join(new_base, rel_base), one_dir_results))
os.chdir(old_cwd)
return results | def function[recursive_glob_with_tree, parameter[new_base, old_base, treeroot, pattern]]:
constant[generate a list of tuples(new_base, list(paths to put there)
where the files are found inside of old_base/treeroot.
]
variable[results] assign[=] list[[]]
variable[old_cwd] assign[=] call[name[os].getcwd, parameter[]]
call[name[os].chdir, parameter[name[old_base]]]
for taget[tuple[[<ast.Name object at 0x7da18f00c3a0>, <ast.Name object at 0x7da18f00d960>, <ast.Name object at 0x7da18f00cee0>]]] in starred[call[name[os].walk, parameter[name[treeroot]]]] begin[:]
variable[goodfiles] assign[=] call[name[fnmatch].filter, parameter[name[files], name[pattern]]]
variable[one_dir_results] assign[=] list[[]]
for taget[name[f]] in starred[name[goodfiles]] begin[:]
call[name[one_dir_results].append, parameter[call[name[os].path.join, parameter[name[old_base], name[rel_base], name[f]]]]]
call[name[results].append, parameter[tuple[[<ast.Call object at 0x7da204960b20>, <ast.Name object at 0x7da204963160>]]]]
call[name[os].chdir, parameter[name[old_cwd]]]
return[name[results]] | keyword[def] identifier[recursive_glob_with_tree] ( identifier[new_base] , identifier[old_base] , identifier[treeroot] , identifier[pattern] ):
literal[string]
identifier[results] =[]
identifier[old_cwd] = identifier[os] . identifier[getcwd] ()
identifier[os] . identifier[chdir] ( identifier[old_base] )
keyword[for] identifier[rel_base] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[treeroot] ):
identifier[goodfiles] = identifier[fnmatch] . identifier[filter] ( identifier[files] , identifier[pattern] )
identifier[one_dir_results] =[]
keyword[for] identifier[f] keyword[in] identifier[goodfiles] :
identifier[one_dir_results] . identifier[append] ( identifier[os] . identifier[path] . identifier[join] ( identifier[old_base] , identifier[rel_base] , identifier[f] ))
identifier[results] . identifier[append] (( identifier[os] . identifier[path] . identifier[join] ( identifier[new_base] , identifier[rel_base] ), identifier[one_dir_results] ))
identifier[os] . identifier[chdir] ( identifier[old_cwd] )
keyword[return] identifier[results] | def recursive_glob_with_tree(new_base, old_base, treeroot, pattern):
"""generate a list of tuples(new_base, list(paths to put there)
where the files are found inside of old_base/treeroot.
"""
results = []
old_cwd = os.getcwd()
os.chdir(old_base)
for (rel_base, dirs, files) in os.walk(treeroot):
goodfiles = fnmatch.filter(files, pattern)
one_dir_results = []
for f in goodfiles:
one_dir_results.append(os.path.join(old_base, rel_base, f)) # depends on [control=['for'], data=['f']]
results.append((os.path.join(new_base, rel_base), one_dir_results)) # depends on [control=['for'], data=[]]
os.chdir(old_cwd)
return results |
def superuser_required(f):
"""Requires the requestor to be a super user."""
@functools.wraps(f)
@login_required
def wrapped(*args, **kwargs):
if not (current_user.is_authenticated() and current_user.superuser):
abort(403)
return f(*args, **kwargs)
return wrapped | def function[superuser_required, parameter[f]]:
constant[Requires the requestor to be a super user.]
def function[wrapped, parameter[]]:
if <ast.UnaryOp object at 0x7da18fe92770> begin[:]
call[name[abort], parameter[constant[403]]]
return[call[name[f], parameter[<ast.Starred object at 0x7da18fe90040>]]]
return[name[wrapped]] | keyword[def] identifier[superuser_required] ( identifier[f] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[f] )
@ identifier[login_required]
keyword[def] identifier[wrapped] (* identifier[args] ,** identifier[kwargs] ):
keyword[if] keyword[not] ( identifier[current_user] . identifier[is_authenticated] () keyword[and] identifier[current_user] . identifier[superuser] ):
identifier[abort] ( literal[int] )
keyword[return] identifier[f] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapped] | def superuser_required(f):
"""Requires the requestor to be a super user."""
@functools.wraps(f)
@login_required
def wrapped(*args, **kwargs):
if not (current_user.is_authenticated() and current_user.superuser):
abort(403) # depends on [control=['if'], data=[]]
return f(*args, **kwargs)
return wrapped |
def main(arguments=None):
"""
Parse options, gather stats and show the results
Takes optional parameter ``arguments`` which can be either
command line string or list of options. This is very useful
for testing purposes. Function returns a tuple of the form::
([user_stats], team_stats)
with the list of all gathered stats objects.
"""
try:
# Parse options, initialize gathered stats
options, header = Options(arguments).parse()
gathered_stats = []
# Check for user email addresses (command line or config)
emails = options.emails or did.base.Config().email
emails = utils.split(emails, separator=re.compile(r"\s*,\s*"))
users = [did.base.User(email=email) for email in emails]
# Print header and prepare team stats object for data merging
utils.eprint(header)
team_stats = UserStats(options=options)
if options.merge:
utils.header("Total Report")
utils.item("Users: {0}".format(len(users)), options=options)
# Check individual user stats
for user in users:
if options.merge:
utils.item(user, 1, options=options)
else:
utils.header(user)
user_stats = UserStats(user=user, options=options)
user_stats.check()
team_stats.merge(user_stats)
gathered_stats.append(user_stats)
# Display merged team report
if options.merge or options.total:
if options.total:
utils.header("Total Report")
team_stats.show()
# Return all gathered stats objects
return gathered_stats, team_stats
except did.base.ConfigFileError as error:
utils.info("Create at least a minimum config file {0}:\n{1}".format(
did.base.Config.path(), did.base.Config.example().strip()))
raise | def function[main, parameter[arguments]]:
constant[
Parse options, gather stats and show the results
Takes optional parameter ``arguments`` which can be either
command line string or list of options. This is very useful
for testing purposes. Function returns a tuple of the form::
([user_stats], team_stats)
with the list of all gathered stats objects.
]
<ast.Try object at 0x7da1b208a980> | keyword[def] identifier[main] ( identifier[arguments] = keyword[None] ):
literal[string]
keyword[try] :
identifier[options] , identifier[header] = identifier[Options] ( identifier[arguments] ). identifier[parse] ()
identifier[gathered_stats] =[]
identifier[emails] = identifier[options] . identifier[emails] keyword[or] identifier[did] . identifier[base] . identifier[Config] (). identifier[email]
identifier[emails] = identifier[utils] . identifier[split] ( identifier[emails] , identifier[separator] = identifier[re] . identifier[compile] ( literal[string] ))
identifier[users] =[ identifier[did] . identifier[base] . identifier[User] ( identifier[email] = identifier[email] ) keyword[for] identifier[email] keyword[in] identifier[emails] ]
identifier[utils] . identifier[eprint] ( identifier[header] )
identifier[team_stats] = identifier[UserStats] ( identifier[options] = identifier[options] )
keyword[if] identifier[options] . identifier[merge] :
identifier[utils] . identifier[header] ( literal[string] )
identifier[utils] . identifier[item] ( literal[string] . identifier[format] ( identifier[len] ( identifier[users] )), identifier[options] = identifier[options] )
keyword[for] identifier[user] keyword[in] identifier[users] :
keyword[if] identifier[options] . identifier[merge] :
identifier[utils] . identifier[item] ( identifier[user] , literal[int] , identifier[options] = identifier[options] )
keyword[else] :
identifier[utils] . identifier[header] ( identifier[user] )
identifier[user_stats] = identifier[UserStats] ( identifier[user] = identifier[user] , identifier[options] = identifier[options] )
identifier[user_stats] . identifier[check] ()
identifier[team_stats] . identifier[merge] ( identifier[user_stats] )
identifier[gathered_stats] . identifier[append] ( identifier[user_stats] )
keyword[if] identifier[options] . identifier[merge] keyword[or] identifier[options] . identifier[total] :
keyword[if] identifier[options] . identifier[total] :
identifier[utils] . identifier[header] ( literal[string] )
identifier[team_stats] . identifier[show] ()
keyword[return] identifier[gathered_stats] , identifier[team_stats]
keyword[except] identifier[did] . identifier[base] . identifier[ConfigFileError] keyword[as] identifier[error] :
identifier[utils] . identifier[info] ( literal[string] . identifier[format] (
identifier[did] . identifier[base] . identifier[Config] . identifier[path] (), identifier[did] . identifier[base] . identifier[Config] . identifier[example] (). identifier[strip] ()))
keyword[raise] | def main(arguments=None):
"""
Parse options, gather stats and show the results
Takes optional parameter ``arguments`` which can be either
command line string or list of options. This is very useful
for testing purposes. Function returns a tuple of the form::
([user_stats], team_stats)
with the list of all gathered stats objects.
"""
try:
# Parse options, initialize gathered stats
(options, header) = Options(arguments).parse()
gathered_stats = []
# Check for user email addresses (command line or config)
emails = options.emails or did.base.Config().email
emails = utils.split(emails, separator=re.compile('\\s*,\\s*'))
users = [did.base.User(email=email) for email in emails]
# Print header and prepare team stats object for data merging
utils.eprint(header)
team_stats = UserStats(options=options)
if options.merge:
utils.header('Total Report')
utils.item('Users: {0}'.format(len(users)), options=options) # depends on [control=['if'], data=[]]
# Check individual user stats
for user in users:
if options.merge:
utils.item(user, 1, options=options) # depends on [control=['if'], data=[]]
else:
utils.header(user)
user_stats = UserStats(user=user, options=options)
user_stats.check()
team_stats.merge(user_stats)
gathered_stats.append(user_stats) # depends on [control=['for'], data=['user']]
# Display merged team report
if options.merge or options.total:
if options.total:
utils.header('Total Report') # depends on [control=['if'], data=[]]
team_stats.show() # depends on [control=['if'], data=[]]
# Return all gathered stats objects
return (gathered_stats, team_stats) # depends on [control=['try'], data=[]]
except did.base.ConfigFileError as error:
utils.info('Create at least a minimum config file {0}:\n{1}'.format(did.base.Config.path(), did.base.Config.example().strip()))
raise # depends on [control=['except'], data=[]] |
def rpXRDS(request):
"""
Return a relying party verification XRDS document
"""
return util.renderXRDS(
request,
[RP_RETURN_TO_URL_TYPE],
[util.getViewURL(request, finishOpenID)]) | def function[rpXRDS, parameter[request]]:
constant[
Return a relying party verification XRDS document
]
return[call[name[util].renderXRDS, parameter[name[request], list[[<ast.Name object at 0x7da207f9a260>]], list[[<ast.Call object at 0x7da18c4ce9e0>]]]]] | keyword[def] identifier[rpXRDS] ( identifier[request] ):
literal[string]
keyword[return] identifier[util] . identifier[renderXRDS] (
identifier[request] ,
[ identifier[RP_RETURN_TO_URL_TYPE] ],
[ identifier[util] . identifier[getViewURL] ( identifier[request] , identifier[finishOpenID] )]) | def rpXRDS(request):
"""
Return a relying party verification XRDS document
"""
return util.renderXRDS(request, [RP_RETURN_TO_URL_TYPE], [util.getViewURL(request, finishOpenID)]) |
def get_img_heatmap(orig_img, activation_map):
"""Draw a heatmap on top of the original image using intensities from activation_map"""
heatmap = cv2.applyColorMap(activation_map, cv2.COLORMAP_COOL)
heatmap = cv2.cvtColor(heatmap, cv2.COLOR_BGR2RGB)
img_heatmap = np.float32(heatmap) + np.float32(orig_img)
img_heatmap = img_heatmap / np.max(img_heatmap)
img_heatmap *= 255
return img_heatmap.astype(int) | def function[get_img_heatmap, parameter[orig_img, activation_map]]:
constant[Draw a heatmap on top of the original image using intensities from activation_map]
variable[heatmap] assign[=] call[name[cv2].applyColorMap, parameter[name[activation_map], name[cv2].COLORMAP_COOL]]
variable[heatmap] assign[=] call[name[cv2].cvtColor, parameter[name[heatmap], name[cv2].COLOR_BGR2RGB]]
variable[img_heatmap] assign[=] binary_operation[call[name[np].float32, parameter[name[heatmap]]] + call[name[np].float32, parameter[name[orig_img]]]]
variable[img_heatmap] assign[=] binary_operation[name[img_heatmap] / call[name[np].max, parameter[name[img_heatmap]]]]
<ast.AugAssign object at 0x7da1b2088e20>
return[call[name[img_heatmap].astype, parameter[name[int]]]] | keyword[def] identifier[get_img_heatmap] ( identifier[orig_img] , identifier[activation_map] ):
literal[string]
identifier[heatmap] = identifier[cv2] . identifier[applyColorMap] ( identifier[activation_map] , identifier[cv2] . identifier[COLORMAP_COOL] )
identifier[heatmap] = identifier[cv2] . identifier[cvtColor] ( identifier[heatmap] , identifier[cv2] . identifier[COLOR_BGR2RGB] )
identifier[img_heatmap] = identifier[np] . identifier[float32] ( identifier[heatmap] )+ identifier[np] . identifier[float32] ( identifier[orig_img] )
identifier[img_heatmap] = identifier[img_heatmap] / identifier[np] . identifier[max] ( identifier[img_heatmap] )
identifier[img_heatmap] *= literal[int]
keyword[return] identifier[img_heatmap] . identifier[astype] ( identifier[int] ) | def get_img_heatmap(orig_img, activation_map):
"""Draw a heatmap on top of the original image using intensities from activation_map"""
heatmap = cv2.applyColorMap(activation_map, cv2.COLORMAP_COOL)
heatmap = cv2.cvtColor(heatmap, cv2.COLOR_BGR2RGB)
img_heatmap = np.float32(heatmap) + np.float32(orig_img)
img_heatmap = img_heatmap / np.max(img_heatmap)
img_heatmap *= 255
return img_heatmap.astype(int) |
def fetch_mid(self):
"""
Gets the next valid MID.
:return: the mid to use
"""
current_mid = self._current_mid
self._current_mid += 1
self._current_mid %= 65535
return current_mid | def function[fetch_mid, parameter[self]]:
constant[
Gets the next valid MID.
:return: the mid to use
]
variable[current_mid] assign[=] name[self]._current_mid
<ast.AugAssign object at 0x7da1b0614580>
<ast.AugAssign object at 0x7da1b0617ee0>
return[name[current_mid]] | keyword[def] identifier[fetch_mid] ( identifier[self] ):
literal[string]
identifier[current_mid] = identifier[self] . identifier[_current_mid]
identifier[self] . identifier[_current_mid] += literal[int]
identifier[self] . identifier[_current_mid] %= literal[int]
keyword[return] identifier[current_mid] | def fetch_mid(self):
"""
Gets the next valid MID.
:return: the mid to use
"""
current_mid = self._current_mid
self._current_mid += 1
self._current_mid %= 65535
return current_mid |
def users_setPresence(self, *, presence: str, **kwargs) -> SlackResponse:
"""Manually sets user presence.
Args:
presence (str): Either 'auto' or 'away'.
"""
kwargs.update({"presence": presence})
return self.api_call("users.setPresence", json=kwargs) | def function[users_setPresence, parameter[self]]:
constant[Manually sets user presence.
Args:
presence (str): Either 'auto' or 'away'.
]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1bec640>], [<ast.Name object at 0x7da1b1bec5b0>]]]]
return[call[name[self].api_call, parameter[constant[users.setPresence]]]] | keyword[def] identifier[users_setPresence] ( identifier[self] ,*, identifier[presence] : identifier[str] ,** identifier[kwargs] )-> identifier[SlackResponse] :
literal[string]
identifier[kwargs] . identifier[update] ({ literal[string] : identifier[presence] })
keyword[return] identifier[self] . identifier[api_call] ( literal[string] , identifier[json] = identifier[kwargs] ) | def users_setPresence(self, *, presence: str, **kwargs) -> SlackResponse:
"""Manually sets user presence.
Args:
presence (str): Either 'auto' or 'away'.
"""
kwargs.update({'presence': presence})
return self.api_call('users.setPresence', json=kwargs) |
def host_url(self):
"""Just the host with scheme as IRI.
See also: :attr:`trusted_hosts`.
"""
return get_current_url(
self.environ, host_only=True, trusted_hosts=self.trusted_hosts
) | def function[host_url, parameter[self]]:
constant[Just the host with scheme as IRI.
See also: :attr:`trusted_hosts`.
]
return[call[name[get_current_url], parameter[name[self].environ]]] | keyword[def] identifier[host_url] ( identifier[self] ):
literal[string]
keyword[return] identifier[get_current_url] (
identifier[self] . identifier[environ] , identifier[host_only] = keyword[True] , identifier[trusted_hosts] = identifier[self] . identifier[trusted_hosts]
) | def host_url(self):
"""Just the host with scheme as IRI.
See also: :attr:`trusted_hosts`.
"""
return get_current_url(self.environ, host_only=True, trusted_hosts=self.trusted_hosts) |
def _pressModifiers(self, modifiers, pressed=True, globally=False):
"""Press given modifiers (provided in list form).
Parameters: modifiers list, global or app specific
Optional: keypressed state (default is True (down))
Returns: Unsigned int representing flags to set
"""
if not isinstance(modifiers, list):
raise TypeError('Please provide modifiers in list form')
if not hasattr(self, 'keyboard'):
self.keyboard = AXKeyboard.loadKeyboard()
modFlags = 0
# Press given modifiers
for nextMod in modifiers:
if nextMod not in self.keyboard:
errStr = 'Key %s not found in keyboard layout'
self._clearEventQueue()
raise ValueError(errStr % self.keyboard[nextMod])
modEvent = Quartz.CGEventCreateKeyboardEvent(
Quartz.CGEventSourceCreate(0),
self.keyboard[nextMod],
pressed
)
if not pressed:
# Clear the modflags:
Quartz.CGEventSetFlags(modEvent, 0)
if globally:
self._queueEvent(Quartz.CGEventPost, (0, modEvent))
else:
# To direct output to the correct application need the PSN (macOS <=10.10) or PID(macOS > 10.10):
macVer, _, _ = platform.mac_ver()
macVer = int(macVer.split('.')[1])
if macVer > 10:
appPid = self._getPid()
self._queueEvent(Quartz.CGEventPostToPid, (appPid, modEvent))
else:
appPsn = self._getPsnForPid(self._getPid())
self._queueEvent(Quartz.CGEventPostToPSN, (appPsn, modEvent))
# Add the modifier flags
modFlags += AXKeyboard.modKeyFlagConstants[nextMod]
return modFlags | def function[_pressModifiers, parameter[self, modifiers, pressed, globally]]:
constant[Press given modifiers (provided in list form).
Parameters: modifiers list, global or app specific
Optional: keypressed state (default is True (down))
Returns: Unsigned int representing flags to set
]
if <ast.UnaryOp object at 0x7da18dc06bf0> begin[:]
<ast.Raise object at 0x7da18dc04490>
if <ast.UnaryOp object at 0x7da18dc07460> begin[:]
name[self].keyboard assign[=] call[name[AXKeyboard].loadKeyboard, parameter[]]
variable[modFlags] assign[=] constant[0]
for taget[name[nextMod]] in starred[name[modifiers]] begin[:]
if compare[name[nextMod] <ast.NotIn object at 0x7da2590d7190> name[self].keyboard] begin[:]
variable[errStr] assign[=] constant[Key %s not found in keyboard layout]
call[name[self]._clearEventQueue, parameter[]]
<ast.Raise object at 0x7da18dc05c90>
variable[modEvent] assign[=] call[name[Quartz].CGEventCreateKeyboardEvent, parameter[call[name[Quartz].CGEventSourceCreate, parameter[constant[0]]], call[name[self].keyboard][name[nextMod]], name[pressed]]]
if <ast.UnaryOp object at 0x7da1aff77a00> begin[:]
call[name[Quartz].CGEventSetFlags, parameter[name[modEvent], constant[0]]]
if name[globally] begin[:]
call[name[self]._queueEvent, parameter[name[Quartz].CGEventPost, tuple[[<ast.Constant object at 0x7da1aff762c0>, <ast.Name object at 0x7da1aff76cb0>]]]]
<ast.AugAssign object at 0x7da1aff74e50>
return[name[modFlags]] | keyword[def] identifier[_pressModifiers] ( identifier[self] , identifier[modifiers] , identifier[pressed] = keyword[True] , identifier[globally] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[modifiers] , identifier[list] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[keyboard] = identifier[AXKeyboard] . identifier[loadKeyboard] ()
identifier[modFlags] = literal[int]
keyword[for] identifier[nextMod] keyword[in] identifier[modifiers] :
keyword[if] identifier[nextMod] keyword[not] keyword[in] identifier[self] . identifier[keyboard] :
identifier[errStr] = literal[string]
identifier[self] . identifier[_clearEventQueue] ()
keyword[raise] identifier[ValueError] ( identifier[errStr] % identifier[self] . identifier[keyboard] [ identifier[nextMod] ])
identifier[modEvent] = identifier[Quartz] . identifier[CGEventCreateKeyboardEvent] (
identifier[Quartz] . identifier[CGEventSourceCreate] ( literal[int] ),
identifier[self] . identifier[keyboard] [ identifier[nextMod] ],
identifier[pressed]
)
keyword[if] keyword[not] identifier[pressed] :
identifier[Quartz] . identifier[CGEventSetFlags] ( identifier[modEvent] , literal[int] )
keyword[if] identifier[globally] :
identifier[self] . identifier[_queueEvent] ( identifier[Quartz] . identifier[CGEventPost] ,( literal[int] , identifier[modEvent] ))
keyword[else] :
identifier[macVer] , identifier[_] , identifier[_] = identifier[platform] . identifier[mac_ver] ()
identifier[macVer] = identifier[int] ( identifier[macVer] . identifier[split] ( literal[string] )[ literal[int] ])
keyword[if] identifier[macVer] > literal[int] :
identifier[appPid] = identifier[self] . identifier[_getPid] ()
identifier[self] . identifier[_queueEvent] ( identifier[Quartz] . identifier[CGEventPostToPid] ,( identifier[appPid] , identifier[modEvent] ))
keyword[else] :
identifier[appPsn] = identifier[self] . identifier[_getPsnForPid] ( identifier[self] . identifier[_getPid] ())
identifier[self] . identifier[_queueEvent] ( identifier[Quartz] . identifier[CGEventPostToPSN] ,( identifier[appPsn] , identifier[modEvent] ))
identifier[modFlags] += identifier[AXKeyboard] . identifier[modKeyFlagConstants] [ identifier[nextMod] ]
keyword[return] identifier[modFlags] | def _pressModifiers(self, modifiers, pressed=True, globally=False):
"""Press given modifiers (provided in list form).
Parameters: modifiers list, global or app specific
Optional: keypressed state (default is True (down))
Returns: Unsigned int representing flags to set
"""
if not isinstance(modifiers, list):
raise TypeError('Please provide modifiers in list form') # depends on [control=['if'], data=[]]
if not hasattr(self, 'keyboard'):
self.keyboard = AXKeyboard.loadKeyboard() # depends on [control=['if'], data=[]]
modFlags = 0
# Press given modifiers
for nextMod in modifiers:
if nextMod not in self.keyboard:
errStr = 'Key %s not found in keyboard layout'
self._clearEventQueue()
raise ValueError(errStr % self.keyboard[nextMod]) # depends on [control=['if'], data=['nextMod']]
modEvent = Quartz.CGEventCreateKeyboardEvent(Quartz.CGEventSourceCreate(0), self.keyboard[nextMod], pressed)
if not pressed:
# Clear the modflags:
Quartz.CGEventSetFlags(modEvent, 0) # depends on [control=['if'], data=[]]
if globally:
self._queueEvent(Quartz.CGEventPost, (0, modEvent)) # depends on [control=['if'], data=[]]
else:
# To direct output to the correct application need the PSN (macOS <=10.10) or PID(macOS > 10.10):
(macVer, _, _) = platform.mac_ver()
macVer = int(macVer.split('.')[1])
if macVer > 10:
appPid = self._getPid()
self._queueEvent(Quartz.CGEventPostToPid, (appPid, modEvent)) # depends on [control=['if'], data=[]]
else:
appPsn = self._getPsnForPid(self._getPid())
self._queueEvent(Quartz.CGEventPostToPSN, (appPsn, modEvent))
# Add the modifier flags
modFlags += AXKeyboard.modKeyFlagConstants[nextMod] # depends on [control=['for'], data=['nextMod']]
return modFlags |
def add_user_grant(self, permission, user_id, recursive=False, headers=None):
"""
Convenience method that provides a quick way to add a canonical user grant to a bucket.
This method retrieves the current ACL, creates a new grant based on the parameters
passed in, adds that grant to the ACL and then PUTs the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ|WRITE|FULL_CONTROL)
:type user_id: string
:param user_id: The canonical user id associated with the GS account you are granting
the permission to.
:type recursive: bool
:param recursive: A boolean value to controls whether the call
will apply the grant to all keys within the bucket
or not. The default value is False. By passing a
True value, the call will iterate through all keys
in the bucket and apply the same grant to each key.
CAUTION: If you have a lot of keys, this could take
a long time!
"""
if permission not in GSPermissions:
raise self.connection.provider.storage_permissions_error(
'Unknown Permission: %s' % permission)
acl = self.get_acl(headers=headers)
acl.add_user_grant(permission, user_id)
self.set_acl(acl, headers=headers)
if recursive:
for key in self:
key.add_user_grant(permission, user_id, headers=headers) | def function[add_user_grant, parameter[self, permission, user_id, recursive, headers]]:
constant[
Convenience method that provides a quick way to add a canonical user grant to a bucket.
This method retrieves the current ACL, creates a new grant based on the parameters
passed in, adds that grant to the ACL and then PUTs the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ|WRITE|FULL_CONTROL)
:type user_id: string
:param user_id: The canonical user id associated with the GS account you are granting
the permission to.
:type recursive: bool
:param recursive: A boolean value to controls whether the call
will apply the grant to all keys within the bucket
or not. The default value is False. By passing a
True value, the call will iterate through all keys
in the bucket and apply the same grant to each key.
CAUTION: If you have a lot of keys, this could take
a long time!
]
if compare[name[permission] <ast.NotIn object at 0x7da2590d7190> name[GSPermissions]] begin[:]
<ast.Raise object at 0x7da1b2677a90>
variable[acl] assign[=] call[name[self].get_acl, parameter[]]
call[name[acl].add_user_grant, parameter[name[permission], name[user_id]]]
call[name[self].set_acl, parameter[name[acl]]]
if name[recursive] begin[:]
for taget[name[key]] in starred[name[self]] begin[:]
call[name[key].add_user_grant, parameter[name[permission], name[user_id]]] | keyword[def] identifier[add_user_grant] ( identifier[self] , identifier[permission] , identifier[user_id] , identifier[recursive] = keyword[False] , identifier[headers] = keyword[None] ):
literal[string]
keyword[if] identifier[permission] keyword[not] keyword[in] identifier[GSPermissions] :
keyword[raise] identifier[self] . identifier[connection] . identifier[provider] . identifier[storage_permissions_error] (
literal[string] % identifier[permission] )
identifier[acl] = identifier[self] . identifier[get_acl] ( identifier[headers] = identifier[headers] )
identifier[acl] . identifier[add_user_grant] ( identifier[permission] , identifier[user_id] )
identifier[self] . identifier[set_acl] ( identifier[acl] , identifier[headers] = identifier[headers] )
keyword[if] identifier[recursive] :
keyword[for] identifier[key] keyword[in] identifier[self] :
identifier[key] . identifier[add_user_grant] ( identifier[permission] , identifier[user_id] , identifier[headers] = identifier[headers] ) | def add_user_grant(self, permission, user_id, recursive=False, headers=None):
"""
Convenience method that provides a quick way to add a canonical user grant to a bucket.
This method retrieves the current ACL, creates a new grant based on the parameters
passed in, adds that grant to the ACL and then PUTs the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ|WRITE|FULL_CONTROL)
:type user_id: string
:param user_id: The canonical user id associated with the GS account you are granting
the permission to.
:type recursive: bool
:param recursive: A boolean value to controls whether the call
will apply the grant to all keys within the bucket
or not. The default value is False. By passing a
True value, the call will iterate through all keys
in the bucket and apply the same grant to each key.
CAUTION: If you have a lot of keys, this could take
a long time!
"""
if permission not in GSPermissions:
raise self.connection.provider.storage_permissions_error('Unknown Permission: %s' % permission) # depends on [control=['if'], data=['permission']]
acl = self.get_acl(headers=headers)
acl.add_user_grant(permission, user_id)
self.set_acl(acl, headers=headers)
if recursive:
for key in self:
key.add_user_grant(permission, user_id, headers=headers) # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] |
def prt_nts(data_nts, prtfmt=None, prt=sys.stdout, nt_fields=None, **kws):
"""Print list of namedtuples into a table using prtfmt."""
prt_txt(prt, data_nts, prtfmt, nt_fields, **kws) | def function[prt_nts, parameter[data_nts, prtfmt, prt, nt_fields]]:
constant[Print list of namedtuples into a table using prtfmt.]
call[name[prt_txt], parameter[name[prt], name[data_nts], name[prtfmt], name[nt_fields]]] | keyword[def] identifier[prt_nts] ( identifier[data_nts] , identifier[prtfmt] = keyword[None] , identifier[prt] = identifier[sys] . identifier[stdout] , identifier[nt_fields] = keyword[None] ,** identifier[kws] ):
literal[string]
identifier[prt_txt] ( identifier[prt] , identifier[data_nts] , identifier[prtfmt] , identifier[nt_fields] ,** identifier[kws] ) | def prt_nts(data_nts, prtfmt=None, prt=sys.stdout, nt_fields=None, **kws):
"""Print list of namedtuples into a table using prtfmt."""
prt_txt(prt, data_nts, prtfmt, nt_fields, **kws) |
def endLoop(self, useDriverLoop):
'''
Called by the engine to stop an event loop.
'''
self._queue = []
self._driver.stop()
if useDriverLoop:
self._driver.endLoop()
else:
self._iterator = None
self.setBusy(True) | def function[endLoop, parameter[self, useDriverLoop]]:
constant[
Called by the engine to stop an event loop.
]
name[self]._queue assign[=] list[[]]
call[name[self]._driver.stop, parameter[]]
if name[useDriverLoop] begin[:]
call[name[self]._driver.endLoop, parameter[]]
call[name[self].setBusy, parameter[constant[True]]] | keyword[def] identifier[endLoop] ( identifier[self] , identifier[useDriverLoop] ):
literal[string]
identifier[self] . identifier[_queue] =[]
identifier[self] . identifier[_driver] . identifier[stop] ()
keyword[if] identifier[useDriverLoop] :
identifier[self] . identifier[_driver] . identifier[endLoop] ()
keyword[else] :
identifier[self] . identifier[_iterator] = keyword[None]
identifier[self] . identifier[setBusy] ( keyword[True] ) | def endLoop(self, useDriverLoop):
"""
Called by the engine to stop an event loop.
"""
self._queue = []
self._driver.stop()
if useDriverLoop:
self._driver.endLoop() # depends on [control=['if'], data=[]]
else:
self._iterator = None
self.setBusy(True) |
def _shadow_contents_from_chunks(contents, chunks, block_out_regexes=None):
"""Remove all contents in spellcheckable :chunks: from contents."""
shadow_contents = [list(l) for l in contents]
for chunk in chunks:
char_offset = chunk.column
line_offset = 0
for index, line in enumerate(chunk.data):
# Block out entire chunk range from shadow_contents
for character_in_line in range(0, len(line)):
shadow_line = chunk.line + line_offset
shadow_char = char_offset + character_in_line
shadow_contents[shadow_line][shadow_char] = 0
# Also block out certain regexps from this chunk
line = filter_nonspellcheckable_tokens(line,
block_out_regexes)
chunk.data[index] = line
line_offset += 1
char_offset = 0
return shadow_contents | def function[_shadow_contents_from_chunks, parameter[contents, chunks, block_out_regexes]]:
constant[Remove all contents in spellcheckable :chunks: from contents.]
variable[shadow_contents] assign[=] <ast.ListComp object at 0x7da1b26ac5b0>
for taget[name[chunk]] in starred[name[chunks]] begin[:]
variable[char_offset] assign[=] name[chunk].column
variable[line_offset] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b26ae530>, <ast.Name object at 0x7da1b26ad2d0>]]] in starred[call[name[enumerate], parameter[name[chunk].data]]] begin[:]
for taget[name[character_in_line]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[line]]]]]] begin[:]
variable[shadow_line] assign[=] binary_operation[name[chunk].line + name[line_offset]]
variable[shadow_char] assign[=] binary_operation[name[char_offset] + name[character_in_line]]
call[call[name[shadow_contents]][name[shadow_line]]][name[shadow_char]] assign[=] constant[0]
variable[line] assign[=] call[name[filter_nonspellcheckable_tokens], parameter[name[line], name[block_out_regexes]]]
call[name[chunk].data][name[index]] assign[=] name[line]
<ast.AugAssign object at 0x7da2054a57b0>
variable[char_offset] assign[=] constant[0]
return[name[shadow_contents]] | keyword[def] identifier[_shadow_contents_from_chunks] ( identifier[contents] , identifier[chunks] , identifier[block_out_regexes] = keyword[None] ):
literal[string]
identifier[shadow_contents] =[ identifier[list] ( identifier[l] ) keyword[for] identifier[l] keyword[in] identifier[contents] ]
keyword[for] identifier[chunk] keyword[in] identifier[chunks] :
identifier[char_offset] = identifier[chunk] . identifier[column]
identifier[line_offset] = literal[int]
keyword[for] identifier[index] , identifier[line] keyword[in] identifier[enumerate] ( identifier[chunk] . identifier[data] ):
keyword[for] identifier[character_in_line] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[line] )):
identifier[shadow_line] = identifier[chunk] . identifier[line] + identifier[line_offset]
identifier[shadow_char] = identifier[char_offset] + identifier[character_in_line]
identifier[shadow_contents] [ identifier[shadow_line] ][ identifier[shadow_char] ]= literal[int]
identifier[line] = identifier[filter_nonspellcheckable_tokens] ( identifier[line] ,
identifier[block_out_regexes] )
identifier[chunk] . identifier[data] [ identifier[index] ]= identifier[line]
identifier[line_offset] += literal[int]
identifier[char_offset] = literal[int]
keyword[return] identifier[shadow_contents] | def _shadow_contents_from_chunks(contents, chunks, block_out_regexes=None):
"""Remove all contents in spellcheckable :chunks: from contents."""
shadow_contents = [list(l) for l in contents]
for chunk in chunks:
char_offset = chunk.column
line_offset = 0
for (index, line) in enumerate(chunk.data):
# Block out entire chunk range from shadow_contents
for character_in_line in range(0, len(line)):
shadow_line = chunk.line + line_offset
shadow_char = char_offset + character_in_line
shadow_contents[shadow_line][shadow_char] = 0 # depends on [control=['for'], data=['character_in_line']]
# Also block out certain regexps from this chunk
line = filter_nonspellcheckable_tokens(line, block_out_regexes)
chunk.data[index] = line
line_offset += 1
char_offset = 0 # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['chunk']]
return shadow_contents |
def _getTempSharedDir(self):
"""
Gets a temporary directory in the hierarchy of directories in self.tempFilesDir.
This directory may contain multiple shared jobs/files.
:rtype : string, path to temporary directory in which to place files/directories.
"""
tempDir = self.tempFilesDir
for i in range(self.levels):
tempDir = os.path.join(tempDir, random.choice(self.validDirs))
if not os.path.exists(tempDir):
try:
os.mkdir(tempDir)
except os.error:
if not os.path.exists(tempDir): # In the case that a collision occurs and
# it is created while we wait then we ignore
raise
return tempDir | def function[_getTempSharedDir, parameter[self]]:
constant[
Gets a temporary directory in the hierarchy of directories in self.tempFilesDir.
This directory may contain multiple shared jobs/files.
:rtype : string, path to temporary directory in which to place files/directories.
]
variable[tempDir] assign[=] name[self].tempFilesDir
for taget[name[i]] in starred[call[name[range], parameter[name[self].levels]]] begin[:]
variable[tempDir] assign[=] call[name[os].path.join, parameter[name[tempDir], call[name[random].choice, parameter[name[self].validDirs]]]]
if <ast.UnaryOp object at 0x7da1b1e5c580> begin[:]
<ast.Try object at 0x7da1b1e5db40>
return[name[tempDir]] | keyword[def] identifier[_getTempSharedDir] ( identifier[self] ):
literal[string]
identifier[tempDir] = identifier[self] . identifier[tempFilesDir]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[levels] ):
identifier[tempDir] = identifier[os] . identifier[path] . identifier[join] ( identifier[tempDir] , identifier[random] . identifier[choice] ( identifier[self] . identifier[validDirs] ))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[tempDir] ):
keyword[try] :
identifier[os] . identifier[mkdir] ( identifier[tempDir] )
keyword[except] identifier[os] . identifier[error] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[tempDir] ):
keyword[raise]
keyword[return] identifier[tempDir] | def _getTempSharedDir(self):
"""
Gets a temporary directory in the hierarchy of directories in self.tempFilesDir.
This directory may contain multiple shared jobs/files.
:rtype : string, path to temporary directory in which to place files/directories.
"""
tempDir = self.tempFilesDir
for i in range(self.levels):
tempDir = os.path.join(tempDir, random.choice(self.validDirs))
if not os.path.exists(tempDir):
try:
os.mkdir(tempDir) # depends on [control=['try'], data=[]]
except os.error:
if not os.path.exists(tempDir): # In the case that a collision occurs and
# it is created while we wait then we ignore
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return tempDir |
def upload_list(self, uploader_id=None, uploader_name=None, source=None):
"""Search and return an uploads list (Requires login).
Parameters:
uploader_id (int): The id of the uploader.
uploader_name (str): The name of the uploader.
source (str): The source of the upload (exact string match).
"""
params = {
'search[uploader_id]': uploader_id,
'search[uploader_name]': uploader_name,
'search[source]': source
}
return self._get('uploads.json', params, auth=True) | def function[upload_list, parameter[self, uploader_id, uploader_name, source]]:
constant[Search and return an uploads list (Requires login).
Parameters:
uploader_id (int): The id of the uploader.
uploader_name (str): The name of the uploader.
source (str): The source of the upload (exact string match).
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0d18340>, <ast.Constant object at 0x7da1b0d1bac0>, <ast.Constant object at 0x7da1b0d180a0>], [<ast.Name object at 0x7da1b0d1b8e0>, <ast.Name object at 0x7da1b0d1a860>, <ast.Name object at 0x7da1b0d1a1a0>]]
return[call[name[self]._get, parameter[constant[uploads.json], name[params]]]] | keyword[def] identifier[upload_list] ( identifier[self] , identifier[uploader_id] = keyword[None] , identifier[uploader_name] = keyword[None] , identifier[source] = keyword[None] ):
literal[string]
identifier[params] ={
literal[string] : identifier[uploader_id] ,
literal[string] : identifier[uploader_name] ,
literal[string] : identifier[source]
}
keyword[return] identifier[self] . identifier[_get] ( literal[string] , identifier[params] , identifier[auth] = keyword[True] ) | def upload_list(self, uploader_id=None, uploader_name=None, source=None):
"""Search and return an uploads list (Requires login).
Parameters:
uploader_id (int): The id of the uploader.
uploader_name (str): The name of the uploader.
source (str): The source of the upload (exact string match).
"""
params = {'search[uploader_id]': uploader_id, 'search[uploader_name]': uploader_name, 'search[source]': source}
return self._get('uploads.json', params, auth=True) |
def NaiveBayesLearner(dataset):
"""Just count how many times each value of each input attribute
occurs, conditional on the target value. Count the different
target values too."""
targetvals = dataset.values[dataset.target]
target_dist = CountingProbDist(targetvals)
attr_dists = dict(((gv, attr), CountingProbDist(dataset.values[attr]))
for gv in targetvals
for attr in dataset.inputs)
for example in dataset.examples:
targetval = example[dataset.target]
target_dist.add(targetval)
for attr in dataset.inputs:
attr_dists[targetval, attr].add(example[attr])
def predict(example):
"""Predict the target value for example. Consider each possible value,
and pick the most likely by looking at each attribute independently."""
def class_probability(targetval):
return (target_dist[targetval]
* product(attr_dists[targetval, attr][example[attr]]
for attr in dataset.inputs))
return argmax(targetvals, class_probability)
return predict | def function[NaiveBayesLearner, parameter[dataset]]:
constant[Just count how many times each value of each input attribute
occurs, conditional on the target value. Count the different
target values too.]
variable[targetvals] assign[=] call[name[dataset].values][name[dataset].target]
variable[target_dist] assign[=] call[name[CountingProbDist], parameter[name[targetvals]]]
variable[attr_dists] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da20e9572b0>]]
for taget[name[example]] in starred[name[dataset].examples] begin[:]
variable[targetval] assign[=] call[name[example]][name[dataset].target]
call[name[target_dist].add, parameter[name[targetval]]]
for taget[name[attr]] in starred[name[dataset].inputs] begin[:]
call[call[name[attr_dists]][tuple[[<ast.Name object at 0x7da20e9542b0>, <ast.Name object at 0x7da20e955870>]]].add, parameter[call[name[example]][name[attr]]]]
def function[predict, parameter[example]]:
constant[Predict the target value for example. Consider each possible value,
and pick the most likely by looking at each attribute independently.]
def function[class_probability, parameter[targetval]]:
return[binary_operation[call[name[target_dist]][name[targetval]] * call[name[product], parameter[<ast.GeneratorExp object at 0x7da20e954dc0>]]]]
return[call[name[argmax], parameter[name[targetvals], name[class_probability]]]]
return[name[predict]] | keyword[def] identifier[NaiveBayesLearner] ( identifier[dataset] ):
literal[string]
identifier[targetvals] = identifier[dataset] . identifier[values] [ identifier[dataset] . identifier[target] ]
identifier[target_dist] = identifier[CountingProbDist] ( identifier[targetvals] )
identifier[attr_dists] = identifier[dict] ((( identifier[gv] , identifier[attr] ), identifier[CountingProbDist] ( identifier[dataset] . identifier[values] [ identifier[attr] ]))
keyword[for] identifier[gv] keyword[in] identifier[targetvals]
keyword[for] identifier[attr] keyword[in] identifier[dataset] . identifier[inputs] )
keyword[for] identifier[example] keyword[in] identifier[dataset] . identifier[examples] :
identifier[targetval] = identifier[example] [ identifier[dataset] . identifier[target] ]
identifier[target_dist] . identifier[add] ( identifier[targetval] )
keyword[for] identifier[attr] keyword[in] identifier[dataset] . identifier[inputs] :
identifier[attr_dists] [ identifier[targetval] , identifier[attr] ]. identifier[add] ( identifier[example] [ identifier[attr] ])
keyword[def] identifier[predict] ( identifier[example] ):
literal[string]
keyword[def] identifier[class_probability] ( identifier[targetval] ):
keyword[return] ( identifier[target_dist] [ identifier[targetval] ]
* identifier[product] ( identifier[attr_dists] [ identifier[targetval] , identifier[attr] ][ identifier[example] [ identifier[attr] ]]
keyword[for] identifier[attr] keyword[in] identifier[dataset] . identifier[inputs] ))
keyword[return] identifier[argmax] ( identifier[targetvals] , identifier[class_probability] )
keyword[return] identifier[predict] | def NaiveBayesLearner(dataset):
"""Just count how many times each value of each input attribute
occurs, conditional on the target value. Count the different
target values too."""
targetvals = dataset.values[dataset.target]
target_dist = CountingProbDist(targetvals)
attr_dists = dict((((gv, attr), CountingProbDist(dataset.values[attr])) for gv in targetvals for attr in dataset.inputs))
for example in dataset.examples:
targetval = example[dataset.target]
target_dist.add(targetval)
for attr in dataset.inputs:
attr_dists[targetval, attr].add(example[attr]) # depends on [control=['for'], data=['attr']] # depends on [control=['for'], data=['example']]
def predict(example):
"""Predict the target value for example. Consider each possible value,
and pick the most likely by looking at each attribute independently."""
def class_probability(targetval):
return target_dist[targetval] * product((attr_dists[targetval, attr][example[attr]] for attr in dataset.inputs))
return argmax(targetvals, class_probability)
return predict |
def clear_uservars(self, user=None):
"""Delete all variables about a user (or all users).
:param str user: The user ID to clear variables for, or else clear all
variables for all users if not provided.
"""
if user is None:
# All the users!
self._session.reset_all()
else:
# Just this one.
self._session.reset(user) | def function[clear_uservars, parameter[self, user]]:
constant[Delete all variables about a user (or all users).
:param str user: The user ID to clear variables for, or else clear all
variables for all users if not provided.
]
if compare[name[user] is constant[None]] begin[:]
call[name[self]._session.reset_all, parameter[]] | keyword[def] identifier[clear_uservars] ( identifier[self] , identifier[user] = keyword[None] ):
literal[string]
keyword[if] identifier[user] keyword[is] keyword[None] :
identifier[self] . identifier[_session] . identifier[reset_all] ()
keyword[else] :
identifier[self] . identifier[_session] . identifier[reset] ( identifier[user] ) | def clear_uservars(self, user=None):
"""Delete all variables about a user (or all users).
:param str user: The user ID to clear variables for, or else clear all
variables for all users if not provided.
"""
if user is None:
# All the users!
self._session.reset_all() # depends on [control=['if'], data=[]]
else:
# Just this one.
self._session.reset(user) |
def ensure_storage_format(root_dir):
"""Checks if the directory looks like a filetracker storage.
Exits with error if it doesn't.
"""
if not os.path.isdir(os.path.join(root_dir, 'blobs')):
print('"blobs/" directory not found')
sys.exit(1)
if not os.path.isdir(os.path.join(root_dir, 'links')):
print('"links/" directory not found')
sys.exit(1)
if not os.path.isdir(os.path.join(root_dir, 'db')):
print('"db/" directory not found')
sys.exit(1) | def function[ensure_storage_format, parameter[root_dir]]:
constant[Checks if the directory looks like a filetracker storage.
Exits with error if it doesn't.
]
if <ast.UnaryOp object at 0x7da2044c2f80> begin[:]
call[name[print], parameter[constant["blobs/" directory not found]]]
call[name[sys].exit, parameter[constant[1]]]
if <ast.UnaryOp object at 0x7da18fe91510> begin[:]
call[name[print], parameter[constant["links/" directory not found]]]
call[name[sys].exit, parameter[constant[1]]]
if <ast.UnaryOp object at 0x7da18fe91a80> begin[:]
call[name[print], parameter[constant["db/" directory not found]]]
call[name[sys].exit, parameter[constant[1]]] | keyword[def] identifier[ensure_storage_format] ( identifier[root_dir] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root_dir] , literal[string] )):
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root_dir] , literal[string] )):
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root_dir] , literal[string] )):
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] ) | def ensure_storage_format(root_dir):
"""Checks if the directory looks like a filetracker storage.
Exits with error if it doesn't.
"""
if not os.path.isdir(os.path.join(root_dir, 'blobs')):
print('"blobs/" directory not found')
sys.exit(1) # depends on [control=['if'], data=[]]
if not os.path.isdir(os.path.join(root_dir, 'links')):
print('"links/" directory not found')
sys.exit(1) # depends on [control=['if'], data=[]]
if not os.path.isdir(os.path.join(root_dir, 'db')):
print('"db/" directory not found')
sys.exit(1) # depends on [control=['if'], data=[]] |
def string_to_int(string, alphabet):
"""
Convert a string to a number, using the given alphabet.
The input is assumed to have the most significant digit first.
"""
number = 0
alpha_len = len(alphabet)
for char in string:
number = number * alpha_len + alphabet.index(char)
return number | def function[string_to_int, parameter[string, alphabet]]:
constant[
Convert a string to a number, using the given alphabet.
The input is assumed to have the most significant digit first.
]
variable[number] assign[=] constant[0]
variable[alpha_len] assign[=] call[name[len], parameter[name[alphabet]]]
for taget[name[char]] in starred[name[string]] begin[:]
variable[number] assign[=] binary_operation[binary_operation[name[number] * name[alpha_len]] + call[name[alphabet].index, parameter[name[char]]]]
return[name[number]] | keyword[def] identifier[string_to_int] ( identifier[string] , identifier[alphabet] ):
literal[string]
identifier[number] = literal[int]
identifier[alpha_len] = identifier[len] ( identifier[alphabet] )
keyword[for] identifier[char] keyword[in] identifier[string] :
identifier[number] = identifier[number] * identifier[alpha_len] + identifier[alphabet] . identifier[index] ( identifier[char] )
keyword[return] identifier[number] | def string_to_int(string, alphabet):
"""
Convert a string to a number, using the given alphabet.
The input is assumed to have the most significant digit first.
"""
number = 0
alpha_len = len(alphabet)
for char in string:
number = number * alpha_len + alphabet.index(char) # depends on [control=['for'], data=['char']]
return number |
def scale_library_ports_meta_data(state_m, gaphas_editor=True):
"""Scale the ports of library model accordingly relative to state_copy meta size.
The function assumes that the meta data of ports of the state_copy of the library was copied to
respective elements in the library and that those was not adjusted before.
"""
if state_m.meta_data_was_scaled:
return
state_m.income.set_meta_data_editor('rel_pos', state_m.state_copy.income.get_meta_data_editor()['rel_pos'])
# print("scale_library_ports_meta_data ", state_m.get_meta_data_editor()['size'], \)
# state_m.state_copy.get_meta_data_editor()['size']
factor = divide_two_vectors(state_m.get_meta_data_editor()['size'],
state_m.state_copy.get_meta_data_editor()['size'])
# print("scale_library_ports_meta_data -> resize_state_port_meta", factor)
if contains_geometric_info(factor):
resize_state_port_meta(state_m, factor, True)
state_m.meta_data_was_scaled = True
else:
logger.info("Skip resize of library ports meta data {0}".format(state_m)) | def function[scale_library_ports_meta_data, parameter[state_m, gaphas_editor]]:
constant[Scale the ports of library model accordingly relative to state_copy meta size.
The function assumes that the meta data of ports of the state_copy of the library was copied to
respective elements in the library and that those was not adjusted before.
]
if name[state_m].meta_data_was_scaled begin[:]
return[None]
call[name[state_m].income.set_meta_data_editor, parameter[constant[rel_pos], call[call[name[state_m].state_copy.income.get_meta_data_editor, parameter[]]][constant[rel_pos]]]]
variable[factor] assign[=] call[name[divide_two_vectors], parameter[call[call[name[state_m].get_meta_data_editor, parameter[]]][constant[size]], call[call[name[state_m].state_copy.get_meta_data_editor, parameter[]]][constant[size]]]]
if call[name[contains_geometric_info], parameter[name[factor]]] begin[:]
call[name[resize_state_port_meta], parameter[name[state_m], name[factor], constant[True]]]
name[state_m].meta_data_was_scaled assign[=] constant[True] | keyword[def] identifier[scale_library_ports_meta_data] ( identifier[state_m] , identifier[gaphas_editor] = keyword[True] ):
literal[string]
keyword[if] identifier[state_m] . identifier[meta_data_was_scaled] :
keyword[return]
identifier[state_m] . identifier[income] . identifier[set_meta_data_editor] ( literal[string] , identifier[state_m] . identifier[state_copy] . identifier[income] . identifier[get_meta_data_editor] ()[ literal[string] ])
identifier[factor] = identifier[divide_two_vectors] ( identifier[state_m] . identifier[get_meta_data_editor] ()[ literal[string] ],
identifier[state_m] . identifier[state_copy] . identifier[get_meta_data_editor] ()[ literal[string] ])
keyword[if] identifier[contains_geometric_info] ( identifier[factor] ):
identifier[resize_state_port_meta] ( identifier[state_m] , identifier[factor] , keyword[True] )
identifier[state_m] . identifier[meta_data_was_scaled] = keyword[True]
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[state_m] )) | def scale_library_ports_meta_data(state_m, gaphas_editor=True):
"""Scale the ports of library model accordingly relative to state_copy meta size.
The function assumes that the meta data of ports of the state_copy of the library was copied to
respective elements in the library and that those was not adjusted before.
"""
if state_m.meta_data_was_scaled:
return # depends on [control=['if'], data=[]]
state_m.income.set_meta_data_editor('rel_pos', state_m.state_copy.income.get_meta_data_editor()['rel_pos'])
# print("scale_library_ports_meta_data ", state_m.get_meta_data_editor()['size'], \)
# state_m.state_copy.get_meta_data_editor()['size']
factor = divide_two_vectors(state_m.get_meta_data_editor()['size'], state_m.state_copy.get_meta_data_editor()['size'])
# print("scale_library_ports_meta_data -> resize_state_port_meta", factor)
if contains_geometric_info(factor):
resize_state_port_meta(state_m, factor, True)
state_m.meta_data_was_scaled = True # depends on [control=['if'], data=[]]
else:
logger.info('Skip resize of library ports meta data {0}'.format(state_m)) |
def jsonise(dic):
"""Renvoie un dictionnaire dont les champs dont compatibles avec SQL
Utilise Json. Attention à None : il faut laisser None et non pas null"""
d = {}
for k, v in dic.items():
if type(v) in abstractRequetesSQL.TYPES_PERMIS:
d[k] = v
else:
try:
d[k] = json.dumps(v, ensure_ascii=False, cls=formats.JsonEncoder)
except ValueError as e:
logging.exception("Erreur d'encodage JSON !")
raise e
return d | def function[jsonise, parameter[dic]]:
constant[Renvoie un dictionnaire dont les champs dont compatibles avec SQL
Utilise Json. Attention à None : il faut laisser None et non pas null]
variable[d] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b11a2d10>, <ast.Name object at 0x7da1b11a3a30>]]] in starred[call[name[dic].items, parameter[]]] begin[:]
if compare[call[name[type], parameter[name[v]]] in name[abstractRequetesSQL].TYPES_PERMIS] begin[:]
call[name[d]][name[k]] assign[=] name[v]
return[name[d]] | keyword[def] identifier[jsonise] ( identifier[dic] ):
literal[string]
identifier[d] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[dic] . identifier[items] ():
keyword[if] identifier[type] ( identifier[v] ) keyword[in] identifier[abstractRequetesSQL] . identifier[TYPES_PERMIS] :
identifier[d] [ identifier[k] ]= identifier[v]
keyword[else] :
keyword[try] :
identifier[d] [ identifier[k] ]= identifier[json] . identifier[dumps] ( identifier[v] , identifier[ensure_ascii] = keyword[False] , identifier[cls] = identifier[formats] . identifier[JsonEncoder] )
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
identifier[logging] . identifier[exception] ( literal[string] )
keyword[raise] identifier[e]
keyword[return] identifier[d] | def jsonise(dic):
"""Renvoie un dictionnaire dont les champs dont compatibles avec SQL
Utilise Json. Attention à None : il faut laisser None et non pas null"""
d = {}
for (k, v) in dic.items():
if type(v) in abstractRequetesSQL.TYPES_PERMIS:
d[k] = v # depends on [control=['if'], data=[]]
else:
try:
d[k] = json.dumps(v, ensure_ascii=False, cls=formats.JsonEncoder) # depends on [control=['try'], data=[]]
except ValueError as e:
logging.exception("Erreur d'encodage JSON !")
raise e # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=[]]
return d |
def remove_child(self, group_lookup_attribute_value):
""" Attempts to remove a child from the AD group.
:param group_lookup_attribute_value: The value for the LDAP_GROUPS_GROUP_LOOKUP_ATTRIBUTE.
:type group_lookup_attribute_value: str
:raises: **GroupDoesNotExist** if the provided group doesn't exist in the active directory.
(inherited from _get_group_dn)
:raises: **EntryAlreadyExists** if the child already exists in this group. (subclass of ModificationFailed)
:raises: **InsufficientPermissions** if the bind user does not have permission to modify this group.
(subclass of ModificationFailed)
:raises: **ModificationFailed** if the modification could not be performed for an unforseen reason.
"""
remove_child = {'member': (MODIFY_DELETE, [self._get_group_dn(group_lookup_attribute_value)])}
self._attempt_modification("child", group_lookup_attribute_value, remove_child) | def function[remove_child, parameter[self, group_lookup_attribute_value]]:
constant[ Attempts to remove a child from the AD group.
:param group_lookup_attribute_value: The value for the LDAP_GROUPS_GROUP_LOOKUP_ATTRIBUTE.
:type group_lookup_attribute_value: str
:raises: **GroupDoesNotExist** if the provided group doesn't exist in the active directory.
(inherited from _get_group_dn)
:raises: **EntryAlreadyExists** if the child already exists in this group. (subclass of ModificationFailed)
:raises: **InsufficientPermissions** if the bind user does not have permission to modify this group.
(subclass of ModificationFailed)
:raises: **ModificationFailed** if the modification could not be performed for an unforseen reason.
]
variable[remove_child] assign[=] dictionary[[<ast.Constant object at 0x7da1b25e8fa0>], [<ast.Tuple object at 0x7da1b25eb400>]]
call[name[self]._attempt_modification, parameter[constant[child], name[group_lookup_attribute_value], name[remove_child]]] | keyword[def] identifier[remove_child] ( identifier[self] , identifier[group_lookup_attribute_value] ):
literal[string]
identifier[remove_child] ={ literal[string] :( identifier[MODIFY_DELETE] ,[ identifier[self] . identifier[_get_group_dn] ( identifier[group_lookup_attribute_value] )])}
identifier[self] . identifier[_attempt_modification] ( literal[string] , identifier[group_lookup_attribute_value] , identifier[remove_child] ) | def remove_child(self, group_lookup_attribute_value):
""" Attempts to remove a child from the AD group.
:param group_lookup_attribute_value: The value for the LDAP_GROUPS_GROUP_LOOKUP_ATTRIBUTE.
:type group_lookup_attribute_value: str
:raises: **GroupDoesNotExist** if the provided group doesn't exist in the active directory.
(inherited from _get_group_dn)
:raises: **EntryAlreadyExists** if the child already exists in this group. (subclass of ModificationFailed)
:raises: **InsufficientPermissions** if the bind user does not have permission to modify this group.
(subclass of ModificationFailed)
:raises: **ModificationFailed** if the modification could not be performed for an unforseen reason.
"""
remove_child = {'member': (MODIFY_DELETE, [self._get_group_dn(group_lookup_attribute_value)])}
self._attempt_modification('child', group_lookup_attribute_value, remove_child) |
def check_for_interest(self, client, recipient, message):
"""Determine whether this line is addressing us."""
for prefix in self.prefixes:
if message.startswith(prefix):
return True, message[len(prefix):]
# Don't require a prefix if addressed in PM.
# This comes after the prefix checks because
# if the user does include a prefix, we want
# to strip it, even in PM.
if not isinstance(recipient, Channel):
return True, message
return False, None | def function[check_for_interest, parameter[self, client, recipient, message]]:
constant[Determine whether this line is addressing us.]
for taget[name[prefix]] in starred[name[self].prefixes] begin[:]
if call[name[message].startswith, parameter[name[prefix]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da204564490>, <ast.Subscript object at 0x7da204567850>]]]
if <ast.UnaryOp object at 0x7da204565d50> begin[:]
return[tuple[[<ast.Constant object at 0x7da2045667a0>, <ast.Name object at 0x7da204566b30>]]]
return[tuple[[<ast.Constant object at 0x7da204567c40>, <ast.Constant object at 0x7da1b0a667d0>]]] | keyword[def] identifier[check_for_interest] ( identifier[self] , identifier[client] , identifier[recipient] , identifier[message] ):
literal[string]
keyword[for] identifier[prefix] keyword[in] identifier[self] . identifier[prefixes] :
keyword[if] identifier[message] . identifier[startswith] ( identifier[prefix] ):
keyword[return] keyword[True] , identifier[message] [ identifier[len] ( identifier[prefix] ):]
keyword[if] keyword[not] identifier[isinstance] ( identifier[recipient] , identifier[Channel] ):
keyword[return] keyword[True] , identifier[message]
keyword[return] keyword[False] , keyword[None] | def check_for_interest(self, client, recipient, message):
"""Determine whether this line is addressing us."""
for prefix in self.prefixes:
if message.startswith(prefix):
return (True, message[len(prefix):]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prefix']]
# Don't require a prefix if addressed in PM.
# This comes after the prefix checks because
# if the user does include a prefix, we want
# to strip it, even in PM.
if not isinstance(recipient, Channel):
return (True, message) # depends on [control=['if'], data=[]]
return (False, None) |
def backend_widget(backend):
"""Creates a backend widget.
"""
config = backend.configuration().to_dict()
props = backend.properties().to_dict()
name = widgets.HTML(value="<h4>{name}</h4>".format(name=backend.name()),
layout=widgets.Layout())
n_qubits = config['n_qubits']
qubit_count = widgets.HTML(value="<h5><b>{qubits}</b></h5>".format(qubits=n_qubits),
layout=widgets.Layout(justify_content='center'))
cmap = widgets.Output(layout=widgets.Layout(min_width='250px', max_width='250px',
max_height='250px',
min_height='250px',
justify_content='center',
align_items='center',
margin='0px 0px 0px 0px'))
with cmap:
_cmap_fig = plot_gate_map(backend,
plot_directed=False,
label_qubits=False)
if _cmap_fig is not None:
display(_cmap_fig)
# Prevents plot from showing up twice.
plt.close(_cmap_fig)
pending = generate_jobs_pending_widget()
is_oper = widgets.HTML(value="<h5></h5>",
layout=widgets.Layout(justify_content='center'))
least_busy = widgets.HTML(value="<h5></h5>",
layout=widgets.Layout(justify_content='center'))
t1_units = props['qubits'][0][0]['unit']
avg_t1 = round(sum([q[0]['value'] for q in props['qubits']])/n_qubits, 1)
t1_widget = widgets.HTML(value="<h5>{t1} {units}</h5>".format(t1=avg_t1, units=t1_units),
layout=widgets.Layout())
t2_units = props['qubits'][0][1]['unit']
avg_t2 = round(sum([q[1]['value'] for q in props['qubits']])/n_qubits, 1)
t2_widget = widgets.HTML(value="<h5>{t2} {units}</h5>".format(t2=avg_t2, units=t2_units),
layout=widgets.Layout())
out = widgets.VBox([name, cmap, qubit_count, pending,
least_busy, is_oper, t1_widget, t2_widget],
layout=widgets.Layout(display='inline-flex',
flex_flow='column',
align_items='center'))
out._is_alive = True
return out | def function[backend_widget, parameter[backend]]:
constant[Creates a backend widget.
]
variable[config] assign[=] call[call[name[backend].configuration, parameter[]].to_dict, parameter[]]
variable[props] assign[=] call[call[name[backend].properties, parameter[]].to_dict, parameter[]]
variable[name] assign[=] call[name[widgets].HTML, parameter[]]
variable[n_qubits] assign[=] call[name[config]][constant[n_qubits]]
variable[qubit_count] assign[=] call[name[widgets].HTML, parameter[]]
variable[cmap] assign[=] call[name[widgets].Output, parameter[]]
with name[cmap] begin[:]
variable[_cmap_fig] assign[=] call[name[plot_gate_map], parameter[name[backend]]]
if compare[name[_cmap_fig] is_not constant[None]] begin[:]
call[name[display], parameter[name[_cmap_fig]]]
call[name[plt].close, parameter[name[_cmap_fig]]]
variable[pending] assign[=] call[name[generate_jobs_pending_widget], parameter[]]
variable[is_oper] assign[=] call[name[widgets].HTML, parameter[]]
variable[least_busy] assign[=] call[name[widgets].HTML, parameter[]]
variable[t1_units] assign[=] call[call[call[call[name[props]][constant[qubits]]][constant[0]]][constant[0]]][constant[unit]]
variable[avg_t1] assign[=] call[name[round], parameter[binary_operation[call[name[sum], parameter[<ast.ListComp object at 0x7da1b0396890>]] / name[n_qubits]], constant[1]]]
variable[t1_widget] assign[=] call[name[widgets].HTML, parameter[]]
variable[t2_units] assign[=] call[call[call[call[name[props]][constant[qubits]]][constant[0]]][constant[1]]][constant[unit]]
variable[avg_t2] assign[=] call[name[round], parameter[binary_operation[call[name[sum], parameter[<ast.ListComp object at 0x7da1b0380c10>]] / name[n_qubits]], constant[1]]]
variable[t2_widget] assign[=] call[name[widgets].HTML, parameter[]]
variable[out] assign[=] call[name[widgets].VBox, parameter[list[[<ast.Name object at 0x7da1b0382380>, <ast.Name object at 0x7da1b03813c0>, <ast.Name object at 0x7da1b0383610>, <ast.Name object at 0x7da1b03804f0>, <ast.Name object at 0x7da1b03807f0>, <ast.Name object at 0x7da1b03815a0>, <ast.Name object at 0x7da1b0383880>, <ast.Name object at 0x7da1b0382e30>]]]]
name[out]._is_alive assign[=] constant[True]
return[name[out]] | keyword[def] identifier[backend_widget] ( identifier[backend] ):
literal[string]
identifier[config] = identifier[backend] . identifier[configuration] (). identifier[to_dict] ()
identifier[props] = identifier[backend] . identifier[properties] (). identifier[to_dict] ()
identifier[name] = identifier[widgets] . identifier[HTML] ( identifier[value] = literal[string] . identifier[format] ( identifier[name] = identifier[backend] . identifier[name] ()),
identifier[layout] = identifier[widgets] . identifier[Layout] ())
identifier[n_qubits] = identifier[config] [ literal[string] ]
identifier[qubit_count] = identifier[widgets] . identifier[HTML] ( identifier[value] = literal[string] . identifier[format] ( identifier[qubits] = identifier[n_qubits] ),
identifier[layout] = identifier[widgets] . identifier[Layout] ( identifier[justify_content] = literal[string] ))
identifier[cmap] = identifier[widgets] . identifier[Output] ( identifier[layout] = identifier[widgets] . identifier[Layout] ( identifier[min_width] = literal[string] , identifier[max_width] = literal[string] ,
identifier[max_height] = literal[string] ,
identifier[min_height] = literal[string] ,
identifier[justify_content] = literal[string] ,
identifier[align_items] = literal[string] ,
identifier[margin] = literal[string] ))
keyword[with] identifier[cmap] :
identifier[_cmap_fig] = identifier[plot_gate_map] ( identifier[backend] ,
identifier[plot_directed] = keyword[False] ,
identifier[label_qubits] = keyword[False] )
keyword[if] identifier[_cmap_fig] keyword[is] keyword[not] keyword[None] :
identifier[display] ( identifier[_cmap_fig] )
identifier[plt] . identifier[close] ( identifier[_cmap_fig] )
identifier[pending] = identifier[generate_jobs_pending_widget] ()
identifier[is_oper] = identifier[widgets] . identifier[HTML] ( identifier[value] = literal[string] ,
identifier[layout] = identifier[widgets] . identifier[Layout] ( identifier[justify_content] = literal[string] ))
identifier[least_busy] = identifier[widgets] . identifier[HTML] ( identifier[value] = literal[string] ,
identifier[layout] = identifier[widgets] . identifier[Layout] ( identifier[justify_content] = literal[string] ))
identifier[t1_units] = identifier[props] [ literal[string] ][ literal[int] ][ literal[int] ][ literal[string] ]
identifier[avg_t1] = identifier[round] ( identifier[sum] ([ identifier[q] [ literal[int] ][ literal[string] ] keyword[for] identifier[q] keyword[in] identifier[props] [ literal[string] ]])/ identifier[n_qubits] , literal[int] )
identifier[t1_widget] = identifier[widgets] . identifier[HTML] ( identifier[value] = literal[string] . identifier[format] ( identifier[t1] = identifier[avg_t1] , identifier[units] = identifier[t1_units] ),
identifier[layout] = identifier[widgets] . identifier[Layout] ())
identifier[t2_units] = identifier[props] [ literal[string] ][ literal[int] ][ literal[int] ][ literal[string] ]
identifier[avg_t2] = identifier[round] ( identifier[sum] ([ identifier[q] [ literal[int] ][ literal[string] ] keyword[for] identifier[q] keyword[in] identifier[props] [ literal[string] ]])/ identifier[n_qubits] , literal[int] )
identifier[t2_widget] = identifier[widgets] . identifier[HTML] ( identifier[value] = literal[string] . identifier[format] ( identifier[t2] = identifier[avg_t2] , identifier[units] = identifier[t2_units] ),
identifier[layout] = identifier[widgets] . identifier[Layout] ())
identifier[out] = identifier[widgets] . identifier[VBox] ([ identifier[name] , identifier[cmap] , identifier[qubit_count] , identifier[pending] ,
identifier[least_busy] , identifier[is_oper] , identifier[t1_widget] , identifier[t2_widget] ],
identifier[layout] = identifier[widgets] . identifier[Layout] ( identifier[display] = literal[string] ,
identifier[flex_flow] = literal[string] ,
identifier[align_items] = literal[string] ))
identifier[out] . identifier[_is_alive] = keyword[True]
keyword[return] identifier[out] | def backend_widget(backend):
"""Creates a backend widget.
"""
config = backend.configuration().to_dict()
props = backend.properties().to_dict()
name = widgets.HTML(value='<h4>{name}</h4>'.format(name=backend.name()), layout=widgets.Layout())
n_qubits = config['n_qubits']
qubit_count = widgets.HTML(value='<h5><b>{qubits}</b></h5>'.format(qubits=n_qubits), layout=widgets.Layout(justify_content='center'))
cmap = widgets.Output(layout=widgets.Layout(min_width='250px', max_width='250px', max_height='250px', min_height='250px', justify_content='center', align_items='center', margin='0px 0px 0px 0px'))
with cmap:
_cmap_fig = plot_gate_map(backend, plot_directed=False, label_qubits=False)
if _cmap_fig is not None:
display(_cmap_fig)
# Prevents plot from showing up twice.
plt.close(_cmap_fig) # depends on [control=['if'], data=['_cmap_fig']] # depends on [control=['with'], data=[]]
pending = generate_jobs_pending_widget()
is_oper = widgets.HTML(value='<h5></h5>', layout=widgets.Layout(justify_content='center'))
least_busy = widgets.HTML(value='<h5></h5>', layout=widgets.Layout(justify_content='center'))
t1_units = props['qubits'][0][0]['unit']
avg_t1 = round(sum([q[0]['value'] for q in props['qubits']]) / n_qubits, 1)
t1_widget = widgets.HTML(value='<h5>{t1} {units}</h5>'.format(t1=avg_t1, units=t1_units), layout=widgets.Layout())
t2_units = props['qubits'][0][1]['unit']
avg_t2 = round(sum([q[1]['value'] for q in props['qubits']]) / n_qubits, 1)
t2_widget = widgets.HTML(value='<h5>{t2} {units}</h5>'.format(t2=avg_t2, units=t2_units), layout=widgets.Layout())
out = widgets.VBox([name, cmap, qubit_count, pending, least_busy, is_oper, t1_widget, t2_widget], layout=widgets.Layout(display='inline-flex', flex_flow='column', align_items='center'))
out._is_alive = True
return out |
def isSelfClosingTag(self, name):
"""Returns true iff the given string is the name of a
self-closing tag according to this parser."""
return self.SELF_CLOSING_TAGS.has_key(name) \
or self.instanceSelfClosingTags.has_key(name) | def function[isSelfClosingTag, parameter[self, name]]:
constant[Returns true iff the given string is the name of a
self-closing tag according to this parser.]
return[<ast.BoolOp object at 0x7da1b004cfd0>] | keyword[def] identifier[isSelfClosingTag] ( identifier[self] , identifier[name] ):
literal[string]
keyword[return] identifier[self] . identifier[SELF_CLOSING_TAGS] . identifier[has_key] ( identifier[name] ) keyword[or] identifier[self] . identifier[instanceSelfClosingTags] . identifier[has_key] ( identifier[name] ) | def isSelfClosingTag(self, name):
"""Returns true iff the given string is the name of a
self-closing tag according to this parser."""
return self.SELF_CLOSING_TAGS.has_key(name) or self.instanceSelfClosingTags.has_key(name) |
def nearly_unique(arr, rel_tol=1e-4, verbose=0):
'''Heuristic method to return the uniques within some precision in a numpy array'''
results = np.array([arr[0]])
for x in arr:
if np.abs(results - x).min() > rel_tol:
results = np.append(results, x)
return results | def function[nearly_unique, parameter[arr, rel_tol, verbose]]:
constant[Heuristic method to return the uniques within some precision in a numpy array]
variable[results] assign[=] call[name[np].array, parameter[list[[<ast.Subscript object at 0x7da18dc98310>]]]]
for taget[name[x]] in starred[name[arr]] begin[:]
if compare[call[call[name[np].abs, parameter[binary_operation[name[results] - name[x]]]].min, parameter[]] greater[>] name[rel_tol]] begin[:]
variable[results] assign[=] call[name[np].append, parameter[name[results], name[x]]]
return[name[results]] | keyword[def] identifier[nearly_unique] ( identifier[arr] , identifier[rel_tol] = literal[int] , identifier[verbose] = literal[int] ):
literal[string]
identifier[results] = identifier[np] . identifier[array] ([ identifier[arr] [ literal[int] ]])
keyword[for] identifier[x] keyword[in] identifier[arr] :
keyword[if] identifier[np] . identifier[abs] ( identifier[results] - identifier[x] ). identifier[min] ()> identifier[rel_tol] :
identifier[results] = identifier[np] . identifier[append] ( identifier[results] , identifier[x] )
keyword[return] identifier[results] | def nearly_unique(arr, rel_tol=0.0001, verbose=0):
"""Heuristic method to return the uniques within some precision in a numpy array"""
results = np.array([arr[0]])
for x in arr:
if np.abs(results - x).min() > rel_tol:
results = np.append(results, x) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
return results |
def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` time and produces a
key of `keylen` bytes. By default SHA-1 is used as hash function,
a different hashlib `hashfunc` can be provided.
"""
hashfunc = hashfunc or hashlib.sha1
mac = hmac.new(bytes_(data), None, hashfunc)
def _pseudorandom(x, mac=mac):
h = mac.copy()
h.update(bytes_(x))
if not PY2:
return [x for x in h.digest()]
else:
return map(ord, h.digest())
buf = []
for block in range_(1, -(-keylen // mac.digest_size) + 1):
rv = u = _pseudorandom(bytes_(salt) + _pack_int(block))
for i in range_(iterations - 1):
if not PY2:
u = _pseudorandom(bytes(u))
else:
u = _pseudorandom(''.join(map(chr, u)))
rv = starmap(xor, zip(rv, u))
buf.extend(rv)
if not PY2:
return bytes(buf)[:keylen]
else:
return ''.join(map(chr, buf))[:keylen] | def function[pbkdf2_bin, parameter[data, salt, iterations, keylen, hashfunc]]:
constant[Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` time and produces a
key of `keylen` bytes. By default SHA-1 is used as hash function,
a different hashlib `hashfunc` can be provided.
]
variable[hashfunc] assign[=] <ast.BoolOp object at 0x7da1b095f670>
variable[mac] assign[=] call[name[hmac].new, parameter[call[name[bytes_], parameter[name[data]]], constant[None], name[hashfunc]]]
def function[_pseudorandom, parameter[x, mac]]:
variable[h] assign[=] call[name[mac].copy, parameter[]]
call[name[h].update, parameter[call[name[bytes_], parameter[name[x]]]]]
if <ast.UnaryOp object at 0x7da1b095d7e0> begin[:]
return[<ast.ListComp object at 0x7da1b095da50>]
variable[buf] assign[=] list[[]]
for taget[name[block]] in starred[call[name[range_], parameter[constant[1], binary_operation[<ast.UnaryOp object at 0x7da1b09eaad0> + constant[1]]]]] begin[:]
variable[rv] assign[=] call[name[_pseudorandom], parameter[binary_operation[call[name[bytes_], parameter[name[salt]]] + call[name[_pack_int], parameter[name[block]]]]]]
for taget[name[i]] in starred[call[name[range_], parameter[binary_operation[name[iterations] - constant[1]]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b09eaf50> begin[:]
variable[u] assign[=] call[name[_pseudorandom], parameter[call[name[bytes], parameter[name[u]]]]]
variable[rv] assign[=] call[name[starmap], parameter[name[xor], call[name[zip], parameter[name[rv], name[u]]]]]
call[name[buf].extend, parameter[name[rv]]]
if <ast.UnaryOp object at 0x7da1b095d210> begin[:]
return[call[call[name[bytes], parameter[name[buf]]]][<ast.Slice object at 0x7da1b0948a90>]] | keyword[def] identifier[pbkdf2_bin] ( identifier[data] , identifier[salt] , identifier[iterations] = literal[int] , identifier[keylen] = literal[int] , identifier[hashfunc] = keyword[None] ):
literal[string]
identifier[hashfunc] = identifier[hashfunc] keyword[or] identifier[hashlib] . identifier[sha1]
identifier[mac] = identifier[hmac] . identifier[new] ( identifier[bytes_] ( identifier[data] ), keyword[None] , identifier[hashfunc] )
keyword[def] identifier[_pseudorandom] ( identifier[x] , identifier[mac] = identifier[mac] ):
identifier[h] = identifier[mac] . identifier[copy] ()
identifier[h] . identifier[update] ( identifier[bytes_] ( identifier[x] ))
keyword[if] keyword[not] identifier[PY2] :
keyword[return] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[h] . identifier[digest] ()]
keyword[else] :
keyword[return] identifier[map] ( identifier[ord] , identifier[h] . identifier[digest] ())
identifier[buf] =[]
keyword[for] identifier[block] keyword[in] identifier[range_] ( literal[int] ,-(- identifier[keylen] // identifier[mac] . identifier[digest_size] )+ literal[int] ):
identifier[rv] = identifier[u] = identifier[_pseudorandom] ( identifier[bytes_] ( identifier[salt] )+ identifier[_pack_int] ( identifier[block] ))
keyword[for] identifier[i] keyword[in] identifier[range_] ( identifier[iterations] - literal[int] ):
keyword[if] keyword[not] identifier[PY2] :
identifier[u] = identifier[_pseudorandom] ( identifier[bytes] ( identifier[u] ))
keyword[else] :
identifier[u] = identifier[_pseudorandom] ( literal[string] . identifier[join] ( identifier[map] ( identifier[chr] , identifier[u] )))
identifier[rv] = identifier[starmap] ( identifier[xor] , identifier[zip] ( identifier[rv] , identifier[u] ))
identifier[buf] . identifier[extend] ( identifier[rv] )
keyword[if] keyword[not] identifier[PY2] :
keyword[return] identifier[bytes] ( identifier[buf] )[: identifier[keylen] ]
keyword[else] :
keyword[return] literal[string] . identifier[join] ( identifier[map] ( identifier[chr] , identifier[buf] ))[: identifier[keylen] ] | def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` time and produces a
key of `keylen` bytes. By default SHA-1 is used as hash function,
a different hashlib `hashfunc` can be provided.
"""
hashfunc = hashfunc or hashlib.sha1
mac = hmac.new(bytes_(data), None, hashfunc)
def _pseudorandom(x, mac=mac):
h = mac.copy()
h.update(bytes_(x))
if not PY2:
return [x for x in h.digest()] # depends on [control=['if'], data=[]]
else:
return map(ord, h.digest())
buf = []
for block in range_(1, -(-keylen // mac.digest_size) + 1):
rv = u = _pseudorandom(bytes_(salt) + _pack_int(block))
for i in range_(iterations - 1):
if not PY2:
u = _pseudorandom(bytes(u)) # depends on [control=['if'], data=[]]
else:
u = _pseudorandom(''.join(map(chr, u)))
rv = starmap(xor, zip(rv, u)) # depends on [control=['for'], data=[]]
buf.extend(rv) # depends on [control=['for'], data=['block']]
if not PY2:
return bytes(buf)[:keylen] # depends on [control=['if'], data=[]]
else:
return ''.join(map(chr, buf))[:keylen] |
def transform(self, X):
"""Encode categorical columns into sparse matrix with one-hot-encoding.
Args:
X (pandas.DataFrame): categorical columns to encode
Returns:
X_new (scipy.sparse.coo_matrix): sparse matrix encoding categorical
variables into dummy variables
"""
for i, col in enumerate(X.columns):
X_col = self._transform_col(X[col], i)
if X_col is not None:
if i == 0:
X_new = X_col
else:
X_new = sparse.hstack((X_new, X_col))
logger.debug('{} --> {} features'.format(
col, self.label_encoder.label_maxes[i])
)
return X_new | def function[transform, parameter[self, X]]:
constant[Encode categorical columns into sparse matrix with one-hot-encoding.
Args:
X (pandas.DataFrame): categorical columns to encode
Returns:
X_new (scipy.sparse.coo_matrix): sparse matrix encoding categorical
variables into dummy variables
]
for taget[tuple[[<ast.Name object at 0x7da2054a7eb0>, <ast.Name object at 0x7da2054a6e00>]]] in starred[call[name[enumerate], parameter[name[X].columns]]] begin[:]
variable[X_col] assign[=] call[name[self]._transform_col, parameter[call[name[X]][name[col]], name[i]]]
if compare[name[X_col] is_not constant[None]] begin[:]
if compare[name[i] equal[==] constant[0]] begin[:]
variable[X_new] assign[=] name[X_col]
call[name[logger].debug, parameter[call[constant[{} --> {} features].format, parameter[name[col], call[name[self].label_encoder.label_maxes][name[i]]]]]]
return[name[X_new]] | keyword[def] identifier[transform] ( identifier[self] , identifier[X] ):
literal[string]
keyword[for] identifier[i] , identifier[col] keyword[in] identifier[enumerate] ( identifier[X] . identifier[columns] ):
identifier[X_col] = identifier[self] . identifier[_transform_col] ( identifier[X] [ identifier[col] ], identifier[i] )
keyword[if] identifier[X_col] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[i] == literal[int] :
identifier[X_new] = identifier[X_col]
keyword[else] :
identifier[X_new] = identifier[sparse] . identifier[hstack] (( identifier[X_new] , identifier[X_col] ))
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[col] , identifier[self] . identifier[label_encoder] . identifier[label_maxes] [ identifier[i] ])
)
keyword[return] identifier[X_new] | def transform(self, X):
"""Encode categorical columns into sparse matrix with one-hot-encoding.
Args:
X (pandas.DataFrame): categorical columns to encode
Returns:
X_new (scipy.sparse.coo_matrix): sparse matrix encoding categorical
variables into dummy variables
"""
for (i, col) in enumerate(X.columns):
X_col = self._transform_col(X[col], i)
if X_col is not None:
if i == 0:
X_new = X_col # depends on [control=['if'], data=[]]
else:
X_new = sparse.hstack((X_new, X_col)) # depends on [control=['if'], data=['X_col']]
logger.debug('{} --> {} features'.format(col, self.label_encoder.label_maxes[i])) # depends on [control=['for'], data=[]]
return X_new |
def logical_chassis_fwdl_sanity_output_cluster_output_fwdl_msg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
logical_chassis_fwdl_sanity = ET.Element("logical_chassis_fwdl_sanity")
config = logical_chassis_fwdl_sanity
output = ET.SubElement(logical_chassis_fwdl_sanity, "output")
cluster_output = ET.SubElement(output, "cluster-output")
fwdl_msg = ET.SubElement(cluster_output, "fwdl-msg")
fwdl_msg.text = kwargs.pop('fwdl_msg')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[logical_chassis_fwdl_sanity_output_cluster_output_fwdl_msg, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[logical_chassis_fwdl_sanity] assign[=] call[name[ET].Element, parameter[constant[logical_chassis_fwdl_sanity]]]
variable[config] assign[=] name[logical_chassis_fwdl_sanity]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[logical_chassis_fwdl_sanity], constant[output]]]
variable[cluster_output] assign[=] call[name[ET].SubElement, parameter[name[output], constant[cluster-output]]]
variable[fwdl_msg] assign[=] call[name[ET].SubElement, parameter[name[cluster_output], constant[fwdl-msg]]]
name[fwdl_msg].text assign[=] call[name[kwargs].pop, parameter[constant[fwdl_msg]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[logical_chassis_fwdl_sanity_output_cluster_output_fwdl_msg] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[logical_chassis_fwdl_sanity] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[logical_chassis_fwdl_sanity]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[logical_chassis_fwdl_sanity] , literal[string] )
identifier[cluster_output] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[fwdl_msg] = identifier[ET] . identifier[SubElement] ( identifier[cluster_output] , literal[string] )
identifier[fwdl_msg] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def logical_chassis_fwdl_sanity_output_cluster_output_fwdl_msg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
logical_chassis_fwdl_sanity = ET.Element('logical_chassis_fwdl_sanity')
config = logical_chassis_fwdl_sanity
output = ET.SubElement(logical_chassis_fwdl_sanity, 'output')
cluster_output = ET.SubElement(output, 'cluster-output')
fwdl_msg = ET.SubElement(cluster_output, 'fwdl-msg')
fwdl_msg.text = kwargs.pop('fwdl_msg')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def getAnalysisCategories(self):
"""Return all available analysis categories
"""
bsc = api.get_tool("bika_setup_catalog")
cats = []
for st in bsc(portal_type="AnalysisCategory",
is_active=True,
sort_on="sortable_title"):
cats.append((st.UID, st.Title))
return DisplayList(cats) | def function[getAnalysisCategories, parameter[self]]:
constant[Return all available analysis categories
]
variable[bsc] assign[=] call[name[api].get_tool, parameter[constant[bika_setup_catalog]]]
variable[cats] assign[=] list[[]]
for taget[name[st]] in starred[call[name[bsc], parameter[]]] begin[:]
call[name[cats].append, parameter[tuple[[<ast.Attribute object at 0x7da1b231d5d0>, <ast.Attribute object at 0x7da1b231da20>]]]]
return[call[name[DisplayList], parameter[name[cats]]]] | keyword[def] identifier[getAnalysisCategories] ( identifier[self] ):
literal[string]
identifier[bsc] = identifier[api] . identifier[get_tool] ( literal[string] )
identifier[cats] =[]
keyword[for] identifier[st] keyword[in] identifier[bsc] ( identifier[portal_type] = literal[string] ,
identifier[is_active] = keyword[True] ,
identifier[sort_on] = literal[string] ):
identifier[cats] . identifier[append] (( identifier[st] . identifier[UID] , identifier[st] . identifier[Title] ))
keyword[return] identifier[DisplayList] ( identifier[cats] ) | def getAnalysisCategories(self):
"""Return all available analysis categories
"""
bsc = api.get_tool('bika_setup_catalog')
cats = []
for st in bsc(portal_type='AnalysisCategory', is_active=True, sort_on='sortable_title'):
cats.append((st.UID, st.Title)) # depends on [control=['for'], data=['st']]
return DisplayList(cats) |
def form_user_term_matrix(user_twitter_list_keywords_gen, id_to_node, lemma_set=None, keyword_to_topic_manual=None):
"""
Forms a user-term matrix.
Input: - user_twitter_list_keywords_gen: A python generator that yields a user Twitter id and a bag-of-words.
- id_to_node: A Twitter id to node map as a python dictionary.
- lemma_set: For the labelling, we use only lemmas in this set. Default: None
Outputs: - user_term_matrix: A user-to-term matrix in scipy sparse matrix format.
- annotated_nodes: A numpy array containing graph nodes.
- label_to_topic: A python dictionary that maps a numerical label to a string topic/keyword.
- node_to_lemma_tokeywordbag: A python dictionary that maps nodes to lemma-to-keyword bags.
"""
# Prepare for iteration.
term_to_attribute = dict()
user_term_matrix_row = list()
user_term_matrix_col = list()
user_term_matrix_data = list()
append_user_term_matrix_row = user_term_matrix_row.append
append_user_term_matrix_col = user_term_matrix_col.append
append_user_term_matrix_data = user_term_matrix_data.append
annotated_nodes = list()
append_node = annotated_nodes.append
node_to_lemma_tokeywordbag = dict()
invalid_terms = list()
counter = 0
if keyword_to_topic_manual is not None:
manual_keyword_list = list(keyword_to_topic_manual.keys())
for user_twitter_id, user_annotation in user_twitter_list_keywords_gen:
counter += 1
# print(counter)
bag_of_words = user_annotation["bag_of_lemmas"]
lemma_to_keywordbag = user_annotation["lemma_to_keywordbag"]
if lemma_set is not None:
bag_of_words = {lemma: multiplicity for lemma, multiplicity in bag_of_words.items() if lemma in lemma_set}
lemma_to_keywordbag = {lemma: keywordbag for lemma, keywordbag in lemma_to_keywordbag.items() if lemma in lemma_set}
node = id_to_node[user_twitter_id]
append_node(node)
node_to_lemma_tokeywordbag[node] = lemma_to_keywordbag
for term, multiplicity in bag_of_words.items():
if term == "news":
continue
if keyword_to_topic_manual is not None:
keyword_bag = lemma_to_keywordbag[term]
term = max(keyword_bag.keys(), key=(lambda key: keyword_bag[key]))
found_list_of_words = simple_word_query(term, manual_keyword_list, edit_distance=1)
if len(found_list_of_words) > 0:
term = found_list_of_words[0]
try:
term = keyword_to_topic_manual[term]
except KeyError:
print(term)
vocabulary_size = len(term_to_attribute)
attribute = term_to_attribute.setdefault(term, vocabulary_size)
append_user_term_matrix_row(node)
append_user_term_matrix_col(attribute)
append_user_term_matrix_data(multiplicity)
annotated_nodes = np.array(list(set(annotated_nodes)), dtype=np.int64)
user_term_matrix_row = np.array(user_term_matrix_row, dtype=np.int64)
user_term_matrix_col = np.array(user_term_matrix_col, dtype=np.int64)
user_term_matrix_data = np.array(user_term_matrix_data, dtype=np.float64)
user_term_matrix = sparse.coo_matrix((user_term_matrix_data,
(user_term_matrix_row, user_term_matrix_col)),
shape=(len(id_to_node), len(term_to_attribute)))
label_to_topic = dict(zip(term_to_attribute.values(), term_to_attribute.keys()))
# print(user_term_matrix.shape)
# print(len(label_to_topic))
# print(invalid_terms)
return user_term_matrix, annotated_nodes, label_to_topic, node_to_lemma_tokeywordbag | def function[form_user_term_matrix, parameter[user_twitter_list_keywords_gen, id_to_node, lemma_set, keyword_to_topic_manual]]:
constant[
Forms a user-term matrix.
Input: - user_twitter_list_keywords_gen: A python generator that yields a user Twitter id and a bag-of-words.
- id_to_node: A Twitter id to node map as a python dictionary.
- lemma_set: For the labelling, we use only lemmas in this set. Default: None
Outputs: - user_term_matrix: A user-to-term matrix in scipy sparse matrix format.
- annotated_nodes: A numpy array containing graph nodes.
- label_to_topic: A python dictionary that maps a numerical label to a string topic/keyword.
- node_to_lemma_tokeywordbag: A python dictionary that maps nodes to lemma-to-keyword bags.
]
variable[term_to_attribute] assign[=] call[name[dict], parameter[]]
variable[user_term_matrix_row] assign[=] call[name[list], parameter[]]
variable[user_term_matrix_col] assign[=] call[name[list], parameter[]]
variable[user_term_matrix_data] assign[=] call[name[list], parameter[]]
variable[append_user_term_matrix_row] assign[=] name[user_term_matrix_row].append
variable[append_user_term_matrix_col] assign[=] name[user_term_matrix_col].append
variable[append_user_term_matrix_data] assign[=] name[user_term_matrix_data].append
variable[annotated_nodes] assign[=] call[name[list], parameter[]]
variable[append_node] assign[=] name[annotated_nodes].append
variable[node_to_lemma_tokeywordbag] assign[=] call[name[dict], parameter[]]
variable[invalid_terms] assign[=] call[name[list], parameter[]]
variable[counter] assign[=] constant[0]
if compare[name[keyword_to_topic_manual] is_not constant[None]] begin[:]
variable[manual_keyword_list] assign[=] call[name[list], parameter[call[name[keyword_to_topic_manual].keys, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da1b28ac790>, <ast.Name object at 0x7da1b28af7f0>]]] in starred[name[user_twitter_list_keywords_gen]] begin[:]
<ast.AugAssign object at 0x7da1b28ac8e0>
variable[bag_of_words] assign[=] call[name[user_annotation]][constant[bag_of_lemmas]]
variable[lemma_to_keywordbag] assign[=] call[name[user_annotation]][constant[lemma_to_keywordbag]]
if compare[name[lemma_set] is_not constant[None]] begin[:]
variable[bag_of_words] assign[=] <ast.DictComp object at 0x7da1b27e0820>
variable[lemma_to_keywordbag] assign[=] <ast.DictComp object at 0x7da1b27e1150>
variable[node] assign[=] call[name[id_to_node]][name[user_twitter_id]]
call[name[append_node], parameter[name[node]]]
call[name[node_to_lemma_tokeywordbag]][name[node]] assign[=] name[lemma_to_keywordbag]
for taget[tuple[[<ast.Name object at 0x7da1b28ac490>, <ast.Name object at 0x7da1b28ad7e0>]]] in starred[call[name[bag_of_words].items, parameter[]]] begin[:]
if compare[name[term] equal[==] constant[news]] begin[:]
continue
if compare[name[keyword_to_topic_manual] is_not constant[None]] begin[:]
variable[keyword_bag] assign[=] call[name[lemma_to_keywordbag]][name[term]]
variable[term] assign[=] call[name[max], parameter[call[name[keyword_bag].keys, parameter[]]]]
variable[found_list_of_words] assign[=] call[name[simple_word_query], parameter[name[term], name[manual_keyword_list]]]
if compare[call[name[len], parameter[name[found_list_of_words]]] greater[>] constant[0]] begin[:]
variable[term] assign[=] call[name[found_list_of_words]][constant[0]]
<ast.Try object at 0x7da1b28aea40>
variable[vocabulary_size] assign[=] call[name[len], parameter[name[term_to_attribute]]]
variable[attribute] assign[=] call[name[term_to_attribute].setdefault, parameter[name[term], name[vocabulary_size]]]
call[name[append_user_term_matrix_row], parameter[name[node]]]
call[name[append_user_term_matrix_col], parameter[name[attribute]]]
call[name[append_user_term_matrix_data], parameter[name[multiplicity]]]
variable[annotated_nodes] assign[=] call[name[np].array, parameter[call[name[list], parameter[call[name[set], parameter[name[annotated_nodes]]]]]]]
variable[user_term_matrix_row] assign[=] call[name[np].array, parameter[name[user_term_matrix_row]]]
variable[user_term_matrix_col] assign[=] call[name[np].array, parameter[name[user_term_matrix_col]]]
variable[user_term_matrix_data] assign[=] call[name[np].array, parameter[name[user_term_matrix_data]]]
variable[user_term_matrix] assign[=] call[name[sparse].coo_matrix, parameter[tuple[[<ast.Name object at 0x7da1b28ace80>, <ast.Tuple object at 0x7da1b28f2740>]]]]
variable[label_to_topic] assign[=] call[name[dict], parameter[call[name[zip], parameter[call[name[term_to_attribute].values, parameter[]], call[name[term_to_attribute].keys, parameter[]]]]]]
return[tuple[[<ast.Name object at 0x7da1b28f2110>, <ast.Name object at 0x7da1b28f0700>, <ast.Name object at 0x7da1b28f14b0>, <ast.Name object at 0x7da1b28f2980>]]] | keyword[def] identifier[form_user_term_matrix] ( identifier[user_twitter_list_keywords_gen] , identifier[id_to_node] , identifier[lemma_set] = keyword[None] , identifier[keyword_to_topic_manual] = keyword[None] ):
literal[string]
identifier[term_to_attribute] = identifier[dict] ()
identifier[user_term_matrix_row] = identifier[list] ()
identifier[user_term_matrix_col] = identifier[list] ()
identifier[user_term_matrix_data] = identifier[list] ()
identifier[append_user_term_matrix_row] = identifier[user_term_matrix_row] . identifier[append]
identifier[append_user_term_matrix_col] = identifier[user_term_matrix_col] . identifier[append]
identifier[append_user_term_matrix_data] = identifier[user_term_matrix_data] . identifier[append]
identifier[annotated_nodes] = identifier[list] ()
identifier[append_node] = identifier[annotated_nodes] . identifier[append]
identifier[node_to_lemma_tokeywordbag] = identifier[dict] ()
identifier[invalid_terms] = identifier[list] ()
identifier[counter] = literal[int]
keyword[if] identifier[keyword_to_topic_manual] keyword[is] keyword[not] keyword[None] :
identifier[manual_keyword_list] = identifier[list] ( identifier[keyword_to_topic_manual] . identifier[keys] ())
keyword[for] identifier[user_twitter_id] , identifier[user_annotation] keyword[in] identifier[user_twitter_list_keywords_gen] :
identifier[counter] += literal[int]
identifier[bag_of_words] = identifier[user_annotation] [ literal[string] ]
identifier[lemma_to_keywordbag] = identifier[user_annotation] [ literal[string] ]
keyword[if] identifier[lemma_set] keyword[is] keyword[not] keyword[None] :
identifier[bag_of_words] ={ identifier[lemma] : identifier[multiplicity] keyword[for] identifier[lemma] , identifier[multiplicity] keyword[in] identifier[bag_of_words] . identifier[items] () keyword[if] identifier[lemma] keyword[in] identifier[lemma_set] }
identifier[lemma_to_keywordbag] ={ identifier[lemma] : identifier[keywordbag] keyword[for] identifier[lemma] , identifier[keywordbag] keyword[in] identifier[lemma_to_keywordbag] . identifier[items] () keyword[if] identifier[lemma] keyword[in] identifier[lemma_set] }
identifier[node] = identifier[id_to_node] [ identifier[user_twitter_id] ]
identifier[append_node] ( identifier[node] )
identifier[node_to_lemma_tokeywordbag] [ identifier[node] ]= identifier[lemma_to_keywordbag]
keyword[for] identifier[term] , identifier[multiplicity] keyword[in] identifier[bag_of_words] . identifier[items] ():
keyword[if] identifier[term] == literal[string] :
keyword[continue]
keyword[if] identifier[keyword_to_topic_manual] keyword[is] keyword[not] keyword[None] :
identifier[keyword_bag] = identifier[lemma_to_keywordbag] [ identifier[term] ]
identifier[term] = identifier[max] ( identifier[keyword_bag] . identifier[keys] (), identifier[key] =( keyword[lambda] identifier[key] : identifier[keyword_bag] [ identifier[key] ]))
identifier[found_list_of_words] = identifier[simple_word_query] ( identifier[term] , identifier[manual_keyword_list] , identifier[edit_distance] = literal[int] )
keyword[if] identifier[len] ( identifier[found_list_of_words] )> literal[int] :
identifier[term] = identifier[found_list_of_words] [ literal[int] ]
keyword[try] :
identifier[term] = identifier[keyword_to_topic_manual] [ identifier[term] ]
keyword[except] identifier[KeyError] :
identifier[print] ( identifier[term] )
identifier[vocabulary_size] = identifier[len] ( identifier[term_to_attribute] )
identifier[attribute] = identifier[term_to_attribute] . identifier[setdefault] ( identifier[term] , identifier[vocabulary_size] )
identifier[append_user_term_matrix_row] ( identifier[node] )
identifier[append_user_term_matrix_col] ( identifier[attribute] )
identifier[append_user_term_matrix_data] ( identifier[multiplicity] )
identifier[annotated_nodes] = identifier[np] . identifier[array] ( identifier[list] ( identifier[set] ( identifier[annotated_nodes] )), identifier[dtype] = identifier[np] . identifier[int64] )
identifier[user_term_matrix_row] = identifier[np] . identifier[array] ( identifier[user_term_matrix_row] , identifier[dtype] = identifier[np] . identifier[int64] )
identifier[user_term_matrix_col] = identifier[np] . identifier[array] ( identifier[user_term_matrix_col] , identifier[dtype] = identifier[np] . identifier[int64] )
identifier[user_term_matrix_data] = identifier[np] . identifier[array] ( identifier[user_term_matrix_data] , identifier[dtype] = identifier[np] . identifier[float64] )
identifier[user_term_matrix] = identifier[sparse] . identifier[coo_matrix] (( identifier[user_term_matrix_data] ,
( identifier[user_term_matrix_row] , identifier[user_term_matrix_col] )),
identifier[shape] =( identifier[len] ( identifier[id_to_node] ), identifier[len] ( identifier[term_to_attribute] )))
identifier[label_to_topic] = identifier[dict] ( identifier[zip] ( identifier[term_to_attribute] . identifier[values] (), identifier[term_to_attribute] . identifier[keys] ()))
keyword[return] identifier[user_term_matrix] , identifier[annotated_nodes] , identifier[label_to_topic] , identifier[node_to_lemma_tokeywordbag] | def form_user_term_matrix(user_twitter_list_keywords_gen, id_to_node, lemma_set=None, keyword_to_topic_manual=None):
"""
Forms a user-term matrix.
Input: - user_twitter_list_keywords_gen: A python generator that yields a user Twitter id and a bag-of-words.
- id_to_node: A Twitter id to node map as a python dictionary.
- lemma_set: For the labelling, we use only lemmas in this set. Default: None
Outputs: - user_term_matrix: A user-to-term matrix in scipy sparse matrix format.
- annotated_nodes: A numpy array containing graph nodes.
- label_to_topic: A python dictionary that maps a numerical label to a string topic/keyword.
- node_to_lemma_tokeywordbag: A python dictionary that maps nodes to lemma-to-keyword bags.
"""
# Prepare for iteration.
term_to_attribute = dict()
user_term_matrix_row = list()
user_term_matrix_col = list()
user_term_matrix_data = list()
append_user_term_matrix_row = user_term_matrix_row.append
append_user_term_matrix_col = user_term_matrix_col.append
append_user_term_matrix_data = user_term_matrix_data.append
annotated_nodes = list()
append_node = annotated_nodes.append
node_to_lemma_tokeywordbag = dict()
invalid_terms = list()
counter = 0
if keyword_to_topic_manual is not None:
manual_keyword_list = list(keyword_to_topic_manual.keys()) # depends on [control=['if'], data=['keyword_to_topic_manual']]
for (user_twitter_id, user_annotation) in user_twitter_list_keywords_gen:
counter += 1
# print(counter)
bag_of_words = user_annotation['bag_of_lemmas']
lemma_to_keywordbag = user_annotation['lemma_to_keywordbag']
if lemma_set is not None:
bag_of_words = {lemma: multiplicity for (lemma, multiplicity) in bag_of_words.items() if lemma in lemma_set}
lemma_to_keywordbag = {lemma: keywordbag for (lemma, keywordbag) in lemma_to_keywordbag.items() if lemma in lemma_set} # depends on [control=['if'], data=['lemma_set']]
node = id_to_node[user_twitter_id]
append_node(node)
node_to_lemma_tokeywordbag[node] = lemma_to_keywordbag
for (term, multiplicity) in bag_of_words.items():
if term == 'news':
continue # depends on [control=['if'], data=[]]
if keyword_to_topic_manual is not None:
keyword_bag = lemma_to_keywordbag[term]
term = max(keyword_bag.keys(), key=lambda key: keyword_bag[key])
found_list_of_words = simple_word_query(term, manual_keyword_list, edit_distance=1)
if len(found_list_of_words) > 0:
term = found_list_of_words[0] # depends on [control=['if'], data=[]]
try:
term = keyword_to_topic_manual[term] # depends on [control=['try'], data=[]]
except KeyError:
print(term) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['keyword_to_topic_manual']]
vocabulary_size = len(term_to_attribute)
attribute = term_to_attribute.setdefault(term, vocabulary_size)
append_user_term_matrix_row(node)
append_user_term_matrix_col(attribute)
append_user_term_matrix_data(multiplicity) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
annotated_nodes = np.array(list(set(annotated_nodes)), dtype=np.int64)
user_term_matrix_row = np.array(user_term_matrix_row, dtype=np.int64)
user_term_matrix_col = np.array(user_term_matrix_col, dtype=np.int64)
user_term_matrix_data = np.array(user_term_matrix_data, dtype=np.float64)
user_term_matrix = sparse.coo_matrix((user_term_matrix_data, (user_term_matrix_row, user_term_matrix_col)), shape=(len(id_to_node), len(term_to_attribute)))
label_to_topic = dict(zip(term_to_attribute.values(), term_to_attribute.keys()))
# print(user_term_matrix.shape)
# print(len(label_to_topic))
# print(invalid_terms)
return (user_term_matrix, annotated_nodes, label_to_topic, node_to_lemma_tokeywordbag) |
def _set_ether_stats_entry(self, v, load=False):
"""
Setter method for ether_stats_entry, mapped from YANG variable /interface/fortygigabitethernet/rmon/collection/ether_stats_entry (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ether_stats_entry is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ether_stats_entry() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("ether_stats_index",ether_stats_entry.ether_stats_entry, yang_name="ether-stats-entry", rest_name="stats", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ether-stats-index', extensions={u'tailf-common': {u'info': u'RMON ether statistics collection', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'stats', u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'rmon_stats'}}), is_container='list', yang_name="ether-stats-entry", rest_name="stats", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'RMON ether statistics collection', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'stats', u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'rmon_stats'}}, namespace='urn:brocade.com:mgmt:brocade-rmon', defining_module='brocade-rmon', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ether_stats_entry must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("ether_stats_index",ether_stats_entry.ether_stats_entry, yang_name="ether-stats-entry", rest_name="stats", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ether-stats-index', extensions={u'tailf-common': {u'info': u'RMON ether statistics collection', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'stats', u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'rmon_stats'}}), is_container='list', yang_name="ether-stats-entry", rest_name="stats", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'RMON ether statistics collection', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'stats', u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'rmon_stats'}}, namespace='urn:brocade.com:mgmt:brocade-rmon', defining_module='brocade-rmon', yang_type='list', is_config=True)""",
})
self.__ether_stats_entry = t
if hasattr(self, '_set'):
self._set() | def function[_set_ether_stats_entry, parameter[self, v, load]]:
constant[
Setter method for ether_stats_entry, mapped from YANG variable /interface/fortygigabitethernet/rmon/collection/ether_stats_entry (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ether_stats_entry is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ether_stats_entry() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da1b2589d50>
name[self].__ether_stats_entry assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_ether_stats_entry] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[ether_stats_entry] . identifier[ether_stats_entry] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__ether_stats_entry] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_ether_stats_entry(self, v, load=False):
"""
Setter method for ether_stats_entry, mapped from YANG variable /interface/fortygigabitethernet/rmon/collection/ether_stats_entry (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ether_stats_entry is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ether_stats_entry() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('ether_stats_index', ether_stats_entry.ether_stats_entry, yang_name='ether-stats-entry', rest_name='stats', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ether-stats-index', extensions={u'tailf-common': {u'info': u'RMON ether statistics collection', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'stats', u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'rmon_stats'}}), is_container='list', yang_name='ether-stats-entry', rest_name='stats', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'RMON ether statistics collection', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'stats', u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'rmon_stats'}}, namespace='urn:brocade.com:mgmt:brocade-rmon', defining_module='brocade-rmon', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'ether_stats_entry must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("ether_stats_index",ether_stats_entry.ether_stats_entry, yang_name="ether-stats-entry", rest_name="stats", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'ether-stats-index\', extensions={u\'tailf-common\': {u\'info\': u\'RMON ether statistics collection\', u\'cli-no-key-completion\': None, u\'cli-suppress-mode\': None, u\'cli-suppress-list-no\': None, u\'cli-full-no\': None, u\'alt-name\': u\'stats\', u\'cli-compact-syntax\': None, u\'cli-suppress-key-abbreviation\': None, u\'callpoint\': u\'rmon_stats\'}}), is_container=\'list\', yang_name="ether-stats-entry", rest_name="stats", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'RMON ether statistics collection\', u\'cli-no-key-completion\': None, u\'cli-suppress-mode\': None, u\'cli-suppress-list-no\': None, u\'cli-full-no\': None, u\'alt-name\': u\'stats\', u\'cli-compact-syntax\': None, u\'cli-suppress-key-abbreviation\': None, u\'callpoint\': u\'rmon_stats\'}}, namespace=\'urn:brocade.com:mgmt:brocade-rmon\', defining_module=\'brocade-rmon\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__ether_stats_entry = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'session_id') and self.session_id is not None:
_dict['session_id'] = self.session_id
return _dict | def function[_to_dict, parameter[self]]:
constant[Return a json dictionary representing this model.]
variable[_dict] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da1b1b47d30> begin[:]
call[name[_dict]][constant[session_id]] assign[=] name[self].session_id
return[name[_dict]] | keyword[def] identifier[_to_dict] ( identifier[self] ):
literal[string]
identifier[_dict] ={}
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[session_id] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]= identifier[self] . identifier[session_id]
keyword[return] identifier[_dict] | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'session_id') and self.session_id is not None:
_dict['session_id'] = self.session_id # depends on [control=['if'], data=[]]
return _dict |
def compare_table_cols(a, b):
"""
Return False if the two tables a and b have the same columns
(ignoring order) according to LIGO LW name conventions, return True
otherwise.
"""
return cmp(sorted((col.Name, col.Type) for col in a.getElementsByTagName(ligolw.Column.tagName)), sorted((col.Name, col.Type) for col in b.getElementsByTagName(ligolw.Column.tagName))) | def function[compare_table_cols, parameter[a, b]]:
constant[
Return False if the two tables a and b have the same columns
(ignoring order) according to LIGO LW name conventions, return True
otherwise.
]
return[call[name[cmp], parameter[call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da1b0b54df0>]], call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da1b0b54a30>]]]]] | keyword[def] identifier[compare_table_cols] ( identifier[a] , identifier[b] ):
literal[string]
keyword[return] identifier[cmp] ( identifier[sorted] (( identifier[col] . identifier[Name] , identifier[col] . identifier[Type] ) keyword[for] identifier[col] keyword[in] identifier[a] . identifier[getElementsByTagName] ( identifier[ligolw] . identifier[Column] . identifier[tagName] )), identifier[sorted] (( identifier[col] . identifier[Name] , identifier[col] . identifier[Type] ) keyword[for] identifier[col] keyword[in] identifier[b] . identifier[getElementsByTagName] ( identifier[ligolw] . identifier[Column] . identifier[tagName] ))) | def compare_table_cols(a, b):
"""
Return False if the two tables a and b have the same columns
(ignoring order) according to LIGO LW name conventions, return True
otherwise.
"""
return cmp(sorted(((col.Name, col.Type) for col in a.getElementsByTagName(ligolw.Column.tagName))), sorted(((col.Name, col.Type) for col in b.getElementsByTagName(ligolw.Column.tagName)))) |
def l_objective(model,objective=None):
"""
A replacement for pyomo's Objective that quickly builds linear
objectives.
Instead of
model.objective = Objective(expr=sum(vars[i]*coeffs[i] for i in index)+constant)
call instead
l_objective(model,objective)
where objective is an LExpression.
Variables may be repeated with different coefficients, which pyomo
will sum up.
Parameters
----------
model : pyomo.environ.ConcreteModel
objective : LExpression
"""
if objective is None:
objective = LExpression()
#initialise with a dummy
model.objective = Objective(expr = 0.)
model.objective._expr = _build_sum_expression(objective.variables, constant=objective.constant) | def function[l_objective, parameter[model, objective]]:
constant[
A replacement for pyomo's Objective that quickly builds linear
objectives.
Instead of
model.objective = Objective(expr=sum(vars[i]*coeffs[i] for i in index)+constant)
call instead
l_objective(model,objective)
where objective is an LExpression.
Variables may be repeated with different coefficients, which pyomo
will sum up.
Parameters
----------
model : pyomo.environ.ConcreteModel
objective : LExpression
]
if compare[name[objective] is constant[None]] begin[:]
variable[objective] assign[=] call[name[LExpression], parameter[]]
name[model].objective assign[=] call[name[Objective], parameter[]]
name[model].objective._expr assign[=] call[name[_build_sum_expression], parameter[name[objective].variables]] | keyword[def] identifier[l_objective] ( identifier[model] , identifier[objective] = keyword[None] ):
literal[string]
keyword[if] identifier[objective] keyword[is] keyword[None] :
identifier[objective] = identifier[LExpression] ()
identifier[model] . identifier[objective] = identifier[Objective] ( identifier[expr] = literal[int] )
identifier[model] . identifier[objective] . identifier[_expr] = identifier[_build_sum_expression] ( identifier[objective] . identifier[variables] , identifier[constant] = identifier[objective] . identifier[constant] ) | def l_objective(model, objective=None):
"""
A replacement for pyomo's Objective that quickly builds linear
objectives.
Instead of
model.objective = Objective(expr=sum(vars[i]*coeffs[i] for i in index)+constant)
call instead
l_objective(model,objective)
where objective is an LExpression.
Variables may be repeated with different coefficients, which pyomo
will sum up.
Parameters
----------
model : pyomo.environ.ConcreteModel
objective : LExpression
"""
if objective is None:
objective = LExpression() # depends on [control=['if'], data=['objective']]
#initialise with a dummy
model.objective = Objective(expr=0.0)
model.objective._expr = _build_sum_expression(objective.variables, constant=objective.constant) |
def update():
"""Update to the latest pages."""
repo_directory = get_config()['repo_directory']
os.chdir(repo_directory)
click.echo("Check for updates...")
local = subprocess.check_output('git rev-parse master'.split()).strip()
remote = subprocess.check_output(
'git ls-remote https://github.com/tldr-pages/tldr/ HEAD'.split()
).split()[0]
if local != remote:
click.echo("Updating...")
subprocess.check_call('git checkout master'.split())
subprocess.check_call('git pull --rebase'.split())
build_index()
click.echo("Update to the latest and rebuild the index.")
else:
click.echo("No need for updates.") | def function[update, parameter[]]:
constant[Update to the latest pages.]
variable[repo_directory] assign[=] call[call[name[get_config], parameter[]]][constant[repo_directory]]
call[name[os].chdir, parameter[name[repo_directory]]]
call[name[click].echo, parameter[constant[Check for updates...]]]
variable[local] assign[=] call[call[name[subprocess].check_output, parameter[call[constant[git rev-parse master].split, parameter[]]]].strip, parameter[]]
variable[remote] assign[=] call[call[call[name[subprocess].check_output, parameter[call[constant[git ls-remote https://github.com/tldr-pages/tldr/ HEAD].split, parameter[]]]].split, parameter[]]][constant[0]]
if compare[name[local] not_equal[!=] name[remote]] begin[:]
call[name[click].echo, parameter[constant[Updating...]]]
call[name[subprocess].check_call, parameter[call[constant[git checkout master].split, parameter[]]]]
call[name[subprocess].check_call, parameter[call[constant[git pull --rebase].split, parameter[]]]]
call[name[build_index], parameter[]]
call[name[click].echo, parameter[constant[Update to the latest and rebuild the index.]]] | keyword[def] identifier[update] ():
literal[string]
identifier[repo_directory] = identifier[get_config] ()[ literal[string] ]
identifier[os] . identifier[chdir] ( identifier[repo_directory] )
identifier[click] . identifier[echo] ( literal[string] )
identifier[local] = identifier[subprocess] . identifier[check_output] ( literal[string] . identifier[split] ()). identifier[strip] ()
identifier[remote] = identifier[subprocess] . identifier[check_output] (
literal[string] . identifier[split] ()
). identifier[split] ()[ literal[int] ]
keyword[if] identifier[local] != identifier[remote] :
identifier[click] . identifier[echo] ( literal[string] )
identifier[subprocess] . identifier[check_call] ( literal[string] . identifier[split] ())
identifier[subprocess] . identifier[check_call] ( literal[string] . identifier[split] ())
identifier[build_index] ()
identifier[click] . identifier[echo] ( literal[string] )
keyword[else] :
identifier[click] . identifier[echo] ( literal[string] ) | def update():
"""Update to the latest pages."""
repo_directory = get_config()['repo_directory']
os.chdir(repo_directory)
click.echo('Check for updates...')
local = subprocess.check_output('git rev-parse master'.split()).strip()
remote = subprocess.check_output('git ls-remote https://github.com/tldr-pages/tldr/ HEAD'.split()).split()[0]
if local != remote:
click.echo('Updating...')
subprocess.check_call('git checkout master'.split())
subprocess.check_call('git pull --rebase'.split())
build_index()
click.echo('Update to the latest and rebuild the index.') # depends on [control=['if'], data=[]]
else:
click.echo('No need for updates.') |
def OLD_printDebug(s, style=None):
"""
util for printing in colors to sys.stderr stream
"""
if style == "comment":
s = Style.DIM + s + Style.RESET_ALL
elif style == "important":
s = Style.BRIGHT + s + Style.RESET_ALL
elif style == "normal":
s = Style.RESET_ALL + s + Style.RESET_ALL
elif style == "red":
s = Fore.RED + s + Style.RESET_ALL
elif style == "green":
s = Fore.GREEN + s + Style.RESET_ALL
try:
print(s, file=sys.stderr)
except:
pass | def function[OLD_printDebug, parameter[s, style]]:
constant[
util for printing in colors to sys.stderr stream
]
if compare[name[style] equal[==] constant[comment]] begin[:]
variable[s] assign[=] binary_operation[binary_operation[name[Style].DIM + name[s]] + name[Style].RESET_ALL]
<ast.Try object at 0x7da1b1005e10> | keyword[def] identifier[OLD_printDebug] ( identifier[s] , identifier[style] = keyword[None] ):
literal[string]
keyword[if] identifier[style] == literal[string] :
identifier[s] = identifier[Style] . identifier[DIM] + identifier[s] + identifier[Style] . identifier[RESET_ALL]
keyword[elif] identifier[style] == literal[string] :
identifier[s] = identifier[Style] . identifier[BRIGHT] + identifier[s] + identifier[Style] . identifier[RESET_ALL]
keyword[elif] identifier[style] == literal[string] :
identifier[s] = identifier[Style] . identifier[RESET_ALL] + identifier[s] + identifier[Style] . identifier[RESET_ALL]
keyword[elif] identifier[style] == literal[string] :
identifier[s] = identifier[Fore] . identifier[RED] + identifier[s] + identifier[Style] . identifier[RESET_ALL]
keyword[elif] identifier[style] == literal[string] :
identifier[s] = identifier[Fore] . identifier[GREEN] + identifier[s] + identifier[Style] . identifier[RESET_ALL]
keyword[try] :
identifier[print] ( identifier[s] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[except] :
keyword[pass] | def OLD_printDebug(s, style=None):
"""
util for printing in colors to sys.stderr stream
"""
if style == 'comment':
s = Style.DIM + s + Style.RESET_ALL # depends on [control=['if'], data=[]]
elif style == 'important':
s = Style.BRIGHT + s + Style.RESET_ALL # depends on [control=['if'], data=[]]
elif style == 'normal':
s = Style.RESET_ALL + s + Style.RESET_ALL # depends on [control=['if'], data=[]]
elif style == 'red':
s = Fore.RED + s + Style.RESET_ALL # depends on [control=['if'], data=[]]
elif style == 'green':
s = Fore.GREEN + s + Style.RESET_ALL # depends on [control=['if'], data=[]]
try:
print(s, file=sys.stderr) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] |
def _extalg(xarr, alpha=100, axis=None):
'''Given an array xarr of values, smoothly return the max/min'''
return (np.sum(xarr * np.exp(alpha*xarr), axis=axis, keepdims=True)/
np.sum(np.exp(alpha*xarr), axis=axis, keepdims=True)) | def function[_extalg, parameter[xarr, alpha, axis]]:
constant[Given an array xarr of values, smoothly return the max/min]
return[binary_operation[call[name[np].sum, parameter[binary_operation[name[xarr] * call[name[np].exp, parameter[binary_operation[name[alpha] * name[xarr]]]]]]] / call[name[np].sum, parameter[call[name[np].exp, parameter[binary_operation[name[alpha] * name[xarr]]]]]]]] | keyword[def] identifier[_extalg] ( identifier[xarr] , identifier[alpha] = literal[int] , identifier[axis] = keyword[None] ):
literal[string]
keyword[return] ( identifier[np] . identifier[sum] ( identifier[xarr] * identifier[np] . identifier[exp] ( identifier[alpha] * identifier[xarr] ), identifier[axis] = identifier[axis] , identifier[keepdims] = keyword[True] )/
identifier[np] . identifier[sum] ( identifier[np] . identifier[exp] ( identifier[alpha] * identifier[xarr] ), identifier[axis] = identifier[axis] , identifier[keepdims] = keyword[True] )) | def _extalg(xarr, alpha=100, axis=None):
"""Given an array xarr of values, smoothly return the max/min"""
return np.sum(xarr * np.exp(alpha * xarr), axis=axis, keepdims=True) / np.sum(np.exp(alpha * xarr), axis=axis, keepdims=True) |
def visit_Call(self, node):
"""Propagate 'debug' wrapper into inner function calls if needed.
Args:
node (ast.AST): node statement to surround.
"""
if self.depth == 0:
return node
if self.ignore_exceptions is None:
ignore_exceptions = ast.Name("None", ast.Load())
else:
ignore_exceptions = ast.List(self.ignore_exceptions, ast.Load())
catch_exception_type = self.catch_exception \
if self.catch_exception else "None"
catch_exception = ast.Name(catch_exception_type, ast.Load())
depth = ast.Num(self.depth - 1 if self.depth > 0 else -1)
debug_node_name = ast.Name("debug", ast.Load())
call_extra_parameters = [] if IS_PYTHON_3 else [None, None]
node.func = ast.Call(debug_node_name,
[node.func, ignore_exceptions,
catch_exception, depth],
[], *call_extra_parameters)
return node | def function[visit_Call, parameter[self, node]]:
constant[Propagate 'debug' wrapper into inner function calls if needed.
Args:
node (ast.AST): node statement to surround.
]
if compare[name[self].depth equal[==] constant[0]] begin[:]
return[name[node]]
if compare[name[self].ignore_exceptions is constant[None]] begin[:]
variable[ignore_exceptions] assign[=] call[name[ast].Name, parameter[constant[None], call[name[ast].Load, parameter[]]]]
variable[catch_exception_type] assign[=] <ast.IfExp object at 0x7da18eb57f10>
variable[catch_exception] assign[=] call[name[ast].Name, parameter[name[catch_exception_type], call[name[ast].Load, parameter[]]]]
variable[depth] assign[=] call[name[ast].Num, parameter[<ast.IfExp object at 0x7da18eb555a0>]]
variable[debug_node_name] assign[=] call[name[ast].Name, parameter[constant[debug], call[name[ast].Load, parameter[]]]]
variable[call_extra_parameters] assign[=] <ast.IfExp object at 0x7da18c4cd720>
name[node].func assign[=] call[name[ast].Call, parameter[name[debug_node_name], list[[<ast.Attribute object at 0x7da18c4cf2e0>, <ast.Name object at 0x7da18c4cf130>, <ast.Name object at 0x7da18c4cc880>, <ast.Name object at 0x7da18c4cc490>]], list[[]], <ast.Starred object at 0x7da18c4cc280>]]
return[name[node]] | keyword[def] identifier[visit_Call] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] identifier[self] . identifier[depth] == literal[int] :
keyword[return] identifier[node]
keyword[if] identifier[self] . identifier[ignore_exceptions] keyword[is] keyword[None] :
identifier[ignore_exceptions] = identifier[ast] . identifier[Name] ( literal[string] , identifier[ast] . identifier[Load] ())
keyword[else] :
identifier[ignore_exceptions] = identifier[ast] . identifier[List] ( identifier[self] . identifier[ignore_exceptions] , identifier[ast] . identifier[Load] ())
identifier[catch_exception_type] = identifier[self] . identifier[catch_exception] keyword[if] identifier[self] . identifier[catch_exception] keyword[else] literal[string]
identifier[catch_exception] = identifier[ast] . identifier[Name] ( identifier[catch_exception_type] , identifier[ast] . identifier[Load] ())
identifier[depth] = identifier[ast] . identifier[Num] ( identifier[self] . identifier[depth] - literal[int] keyword[if] identifier[self] . identifier[depth] > literal[int] keyword[else] - literal[int] )
identifier[debug_node_name] = identifier[ast] . identifier[Name] ( literal[string] , identifier[ast] . identifier[Load] ())
identifier[call_extra_parameters] =[] keyword[if] identifier[IS_PYTHON_3] keyword[else] [ keyword[None] , keyword[None] ]
identifier[node] . identifier[func] = identifier[ast] . identifier[Call] ( identifier[debug_node_name] ,
[ identifier[node] . identifier[func] , identifier[ignore_exceptions] ,
identifier[catch_exception] , identifier[depth] ],
[],* identifier[call_extra_parameters] )
keyword[return] identifier[node] | def visit_Call(self, node):
"""Propagate 'debug' wrapper into inner function calls if needed.
Args:
node (ast.AST): node statement to surround.
"""
if self.depth == 0:
return node # depends on [control=['if'], data=[]]
if self.ignore_exceptions is None:
ignore_exceptions = ast.Name('None', ast.Load()) # depends on [control=['if'], data=[]]
else:
ignore_exceptions = ast.List(self.ignore_exceptions, ast.Load())
catch_exception_type = self.catch_exception if self.catch_exception else 'None'
catch_exception = ast.Name(catch_exception_type, ast.Load())
depth = ast.Num(self.depth - 1 if self.depth > 0 else -1)
debug_node_name = ast.Name('debug', ast.Load())
call_extra_parameters = [] if IS_PYTHON_3 else [None, None]
node.func = ast.Call(debug_node_name, [node.func, ignore_exceptions, catch_exception, depth], [], *call_extra_parameters)
return node |
def hookable(cls):
"""
Initialise hookery in a class that declares hooks by decorating it with this decorator.
This replaces the class with another one which has the same name, but also inherits Hookable
which has HookableMeta set as metaclass so that sub-classes of cls will have hook descriptors
initialised properly.
When you say:
@hookable
class My:
before = Hook()
then @hookable changes My.before to be a HookDescriptor which is then
changed into Hook if anyone accesses it.
There is no need to decorate sub-classes of cls with @hookable.
"""
assert isinstance(cls, type)
# For classes that won't have descriptors initialised by metaclass, need to do it here.
hook_definitions = []
if not issubclass(cls, Hookable):
for k, v in list(cls.__dict__.items()):
if isinstance(v, (ClassHook, InstanceHook)):
delattr(cls, k)
if v.name is None:
v.name = k
hook_definitions.append((k, v))
hookable_cls = type(cls.__name__, (cls, Hookable), {})
for k, v in hook_definitions:
setattr(hookable_cls, k, HookDescriptor(defining_hook=v, defining_class=hookable_cls))
return hookable_cls | def function[hookable, parameter[cls]]:
constant[
Initialise hookery in a class that declares hooks by decorating it with this decorator.
This replaces the class with another one which has the same name, but also inherits Hookable
which has HookableMeta set as metaclass so that sub-classes of cls will have hook descriptors
initialised properly.
When you say:
@hookable
class My:
before = Hook()
then @hookable changes My.before to be a HookDescriptor which is then
changed into Hook if anyone accesses it.
There is no need to decorate sub-classes of cls with @hookable.
]
assert[call[name[isinstance], parameter[name[cls], name[type]]]]
variable[hook_definitions] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da20e955630> begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b16a9690>, <ast.Name object at 0x7da1b16aa740>]]] in starred[call[name[list], parameter[call[name[cls].__dict__.items, parameter[]]]]] begin[:]
if call[name[isinstance], parameter[name[v], tuple[[<ast.Name object at 0x7da1b1668df0>, <ast.Name object at 0x7da1b1669690>]]]] begin[:]
call[name[delattr], parameter[name[cls], name[k]]]
if compare[name[v].name is constant[None]] begin[:]
name[v].name assign[=] name[k]
call[name[hook_definitions].append, parameter[tuple[[<ast.Name object at 0x7da1b1669720>, <ast.Name object at 0x7da1b16693c0>]]]]
variable[hookable_cls] assign[=] call[name[type], parameter[name[cls].__name__, tuple[[<ast.Name object at 0x7da1b16684c0>, <ast.Name object at 0x7da1b1668190>]], dictionary[[], []]]]
for taget[tuple[[<ast.Name object at 0x7da1b1669090>, <ast.Name object at 0x7da1b1669990>]]] in starred[name[hook_definitions]] begin[:]
call[name[setattr], parameter[name[hookable_cls], name[k], call[name[HookDescriptor], parameter[]]]]
return[name[hookable_cls]] | keyword[def] identifier[hookable] ( identifier[cls] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[cls] , identifier[type] )
identifier[hook_definitions] =[]
keyword[if] keyword[not] identifier[issubclass] ( identifier[cls] , identifier[Hookable] ):
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[list] ( identifier[cls] . identifier[__dict__] . identifier[items] ()):
keyword[if] identifier[isinstance] ( identifier[v] ,( identifier[ClassHook] , identifier[InstanceHook] )):
identifier[delattr] ( identifier[cls] , identifier[k] )
keyword[if] identifier[v] . identifier[name] keyword[is] keyword[None] :
identifier[v] . identifier[name] = identifier[k]
identifier[hook_definitions] . identifier[append] (( identifier[k] , identifier[v] ))
identifier[hookable_cls] = identifier[type] ( identifier[cls] . identifier[__name__] ,( identifier[cls] , identifier[Hookable] ),{})
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[hook_definitions] :
identifier[setattr] ( identifier[hookable_cls] , identifier[k] , identifier[HookDescriptor] ( identifier[defining_hook] = identifier[v] , identifier[defining_class] = identifier[hookable_cls] ))
keyword[return] identifier[hookable_cls] | def hookable(cls):
"""
Initialise hookery in a class that declares hooks by decorating it with this decorator.
This replaces the class with another one which has the same name, but also inherits Hookable
which has HookableMeta set as metaclass so that sub-classes of cls will have hook descriptors
initialised properly.
When you say:
@hookable
class My:
before = Hook()
then @hookable changes My.before to be a HookDescriptor which is then
changed into Hook if anyone accesses it.
There is no need to decorate sub-classes of cls with @hookable.
"""
assert isinstance(cls, type)
# For classes that won't have descriptors initialised by metaclass, need to do it here.
hook_definitions = []
if not issubclass(cls, Hookable):
for (k, v) in list(cls.__dict__.items()):
if isinstance(v, (ClassHook, InstanceHook)):
delattr(cls, k)
if v.name is None:
v.name = k # depends on [control=['if'], data=[]]
hook_definitions.append((k, v)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
hookable_cls = type(cls.__name__, (cls, Hookable), {})
for (k, v) in hook_definitions:
setattr(hookable_cls, k, HookDescriptor(defining_hook=v, defining_class=hookable_cls)) # depends on [control=['for'], data=[]]
return hookable_cls |
def validate_amendment(obj, retain_deprecated=True, **kwargs):
"""Takes an `obj` that is an amendment object.
`retain_deprecated` if False, then `obj` may be modified to replace
deprecated constructs with new syntax. If it is True, the `obj` will
not be modified.
Returns the pair:
errors, adaptor
`errors` is a simple list of error messages
`adaptor` will be an instance of amendments.validation.adaptor.AmendmentValidationAdaptor
it holds a reference to `obj` and the bookkeepping data necessary to attach
the log message to `obj` if
"""
# Gather and report errors in a simple list
errors = []
n = create_validation_adaptor(obj, errors, **kwargs)
return errors, n | def function[validate_amendment, parameter[obj, retain_deprecated]]:
constant[Takes an `obj` that is an amendment object.
`retain_deprecated` if False, then `obj` may be modified to replace
deprecated constructs with new syntax. If it is True, the `obj` will
not be modified.
Returns the pair:
errors, adaptor
`errors` is a simple list of error messages
`adaptor` will be an instance of amendments.validation.adaptor.AmendmentValidationAdaptor
it holds a reference to `obj` and the bookkeepping data necessary to attach
the log message to `obj` if
]
variable[errors] assign[=] list[[]]
variable[n] assign[=] call[name[create_validation_adaptor], parameter[name[obj], name[errors]]]
return[tuple[[<ast.Name object at 0x7da2047eb880>, <ast.Name object at 0x7da2047e95d0>]]] | keyword[def] identifier[validate_amendment] ( identifier[obj] , identifier[retain_deprecated] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[errors] =[]
identifier[n] = identifier[create_validation_adaptor] ( identifier[obj] , identifier[errors] ,** identifier[kwargs] )
keyword[return] identifier[errors] , identifier[n] | def validate_amendment(obj, retain_deprecated=True, **kwargs):
"""Takes an `obj` that is an amendment object.
`retain_deprecated` if False, then `obj` may be modified to replace
deprecated constructs with new syntax. If it is True, the `obj` will
not be modified.
Returns the pair:
errors, adaptor
`errors` is a simple list of error messages
`adaptor` will be an instance of amendments.validation.adaptor.AmendmentValidationAdaptor
it holds a reference to `obj` and the bookkeepping data necessary to attach
the log message to `obj` if
"""
# Gather and report errors in a simple list
errors = []
n = create_validation_adaptor(obj, errors, **kwargs)
return (errors, n) |
async def has_started(self):
"""
Whether the handler has completed all start up processes such as
establishing the connection, session, link and authentication, and
is not ready to process messages.
**This function is now deprecated and will be removed in v2.0+.**
:rtype: bool
"""
# pylint: disable=protected-access
timeout = False
auth_in_progress = False
if self._handler._connection.cbs:
timeout, auth_in_progress = await self._handler._auth.handle_token_async()
if timeout:
raise EventHubError("Authorization timeout.")
if auth_in_progress:
return False
if not await self._handler._client_ready_async():
return False
return True | <ast.AsyncFunctionDef object at 0x7da18bccbaf0> | keyword[async] keyword[def] identifier[has_started] ( identifier[self] ):
literal[string]
identifier[timeout] = keyword[False]
identifier[auth_in_progress] = keyword[False]
keyword[if] identifier[self] . identifier[_handler] . identifier[_connection] . identifier[cbs] :
identifier[timeout] , identifier[auth_in_progress] = keyword[await] identifier[self] . identifier[_handler] . identifier[_auth] . identifier[handle_token_async] ()
keyword[if] identifier[timeout] :
keyword[raise] identifier[EventHubError] ( literal[string] )
keyword[if] identifier[auth_in_progress] :
keyword[return] keyword[False]
keyword[if] keyword[not] keyword[await] identifier[self] . identifier[_handler] . identifier[_client_ready_async] ():
keyword[return] keyword[False]
keyword[return] keyword[True] | async def has_started(self):
"""
Whether the handler has completed all start up processes such as
establishing the connection, session, link and authentication, and
is not ready to process messages.
**This function is now deprecated and will be removed in v2.0+.**
:rtype: bool
"""
# pylint: disable=protected-access
timeout = False
auth_in_progress = False
if self._handler._connection.cbs:
(timeout, auth_in_progress) = await self._handler._auth.handle_token_async() # depends on [control=['if'], data=[]]
if timeout:
raise EventHubError('Authorization timeout.') # depends on [control=['if'], data=[]]
if auth_in_progress:
return False # depends on [control=['if'], data=[]]
if not await self._handler._client_ready_async():
return False # depends on [control=['if'], data=[]]
return True |
def remap(im, coords):
"""
Remap an RGB image using the given target coordinate array.
If available, OpenCV is used (faster), otherwise SciPy.
:type im: ndarray of shape (h,w,3)
:param im: RGB image to be remapped
:type coords: ndarray of shape (h,w,2)
:param coords: target coordinates in x,y order for each pixel
:return: remapped RGB image
:rtype: ndarray of shape (h,w,3)
"""
if cv2:
return remapOpenCv(im, coords)
else:
return remapScipy(im, coords) | def function[remap, parameter[im, coords]]:
constant[
Remap an RGB image using the given target coordinate array.
If available, OpenCV is used (faster), otherwise SciPy.
:type im: ndarray of shape (h,w,3)
:param im: RGB image to be remapped
:type coords: ndarray of shape (h,w,2)
:param coords: target coordinates in x,y order for each pixel
:return: remapped RGB image
:rtype: ndarray of shape (h,w,3)
]
if name[cv2] begin[:]
return[call[name[remapOpenCv], parameter[name[im], name[coords]]]] | keyword[def] identifier[remap] ( identifier[im] , identifier[coords] ):
literal[string]
keyword[if] identifier[cv2] :
keyword[return] identifier[remapOpenCv] ( identifier[im] , identifier[coords] )
keyword[else] :
keyword[return] identifier[remapScipy] ( identifier[im] , identifier[coords] ) | def remap(im, coords):
"""
Remap an RGB image using the given target coordinate array.
If available, OpenCV is used (faster), otherwise SciPy.
:type im: ndarray of shape (h,w,3)
:param im: RGB image to be remapped
:type coords: ndarray of shape (h,w,2)
:param coords: target coordinates in x,y order for each pixel
:return: remapped RGB image
:rtype: ndarray of shape (h,w,3)
"""
if cv2:
return remapOpenCv(im, coords) # depends on [control=['if'], data=[]]
else:
return remapScipy(im, coords) |
def delete(filepath):
"""
Delete the given file, directory or link.
It Should support undelete later on.
Args:
filepath (str): Absolute full path to a file. e.g. /path/to/file
"""
# Some files have ACLs, let's remove them recursively
remove_acl(filepath)
# Some files have immutable attributes, let's remove them recursively
remove_immutable_attribute(filepath)
# Finally remove the files and folders
if os.path.isfile(filepath) or os.path.islink(filepath):
os.remove(filepath)
elif os.path.isdir(filepath):
shutil.rmtree(filepath) | def function[delete, parameter[filepath]]:
constant[
Delete the given file, directory or link.
It Should support undelete later on.
Args:
filepath (str): Absolute full path to a file. e.g. /path/to/file
]
call[name[remove_acl], parameter[name[filepath]]]
call[name[remove_immutable_attribute], parameter[name[filepath]]]
if <ast.BoolOp object at 0x7da1b1b3eda0> begin[:]
call[name[os].remove, parameter[name[filepath]]] | keyword[def] identifier[delete] ( identifier[filepath] ):
literal[string]
identifier[remove_acl] ( identifier[filepath] )
identifier[remove_immutable_attribute] ( identifier[filepath] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filepath] ) keyword[or] identifier[os] . identifier[path] . identifier[islink] ( identifier[filepath] ):
identifier[os] . identifier[remove] ( identifier[filepath] )
keyword[elif] identifier[os] . identifier[path] . identifier[isdir] ( identifier[filepath] ):
identifier[shutil] . identifier[rmtree] ( identifier[filepath] ) | def delete(filepath):
"""
Delete the given file, directory or link.
It Should support undelete later on.
Args:
filepath (str): Absolute full path to a file. e.g. /path/to/file
"""
# Some files have ACLs, let's remove them recursively
remove_acl(filepath)
# Some files have immutable attributes, let's remove them recursively
remove_immutable_attribute(filepath)
# Finally remove the files and folders
if os.path.isfile(filepath) or os.path.islink(filepath):
os.remove(filepath) # depends on [control=['if'], data=[]]
elif os.path.isdir(filepath):
shutil.rmtree(filepath) # depends on [control=['if'], data=[]] |
def get_message(self, set_slave_ok, is_mongos, use_cmd=False):
"""Get a query message, possibly setting the slaveOk bit."""
if set_slave_ok:
# Set the slaveOk bit.
flags = self.flags | 4
else:
flags = self.flags
ns = _UJOIN % (self.db, self.coll)
spec = self.spec
if use_cmd:
ns = _UJOIN % (self.db, "$cmd")
spec = self.as_command()[0]
ntoreturn = -1 # All DB commands return 1 document
else:
# OP_QUERY treats ntoreturn of -1 and 1 the same, return
# one document and close the cursor. We have to use 2 for
# batch size if 1 is specified.
ntoreturn = self.batch_size == 1 and 2 or self.batch_size
if self.limit:
if ntoreturn:
ntoreturn = min(self.limit, ntoreturn)
else:
ntoreturn = self.limit
if is_mongos:
spec = _maybe_add_read_preference(spec,
self.read_preference)
return query(flags, ns, self.ntoskip, ntoreturn,
spec, self.fields, self.codec_options) | def function[get_message, parameter[self, set_slave_ok, is_mongos, use_cmd]]:
constant[Get a query message, possibly setting the slaveOk bit.]
if name[set_slave_ok] begin[:]
variable[flags] assign[=] binary_operation[name[self].flags <ast.BitOr object at 0x7da2590d6aa0> constant[4]]
variable[ns] assign[=] binary_operation[name[_UJOIN] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20e954c10>, <ast.Attribute object at 0x7da20e955ba0>]]]
variable[spec] assign[=] name[self].spec
if name[use_cmd] begin[:]
variable[ns] assign[=] binary_operation[name[_UJOIN] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20e9578e0>, <ast.Constant object at 0x7da20e956620>]]]
variable[spec] assign[=] call[call[name[self].as_command, parameter[]]][constant[0]]
variable[ntoreturn] assign[=] <ast.UnaryOp object at 0x7da20e955510>
if name[is_mongos] begin[:]
variable[spec] assign[=] call[name[_maybe_add_read_preference], parameter[name[spec], name[self].read_preference]]
return[call[name[query], parameter[name[flags], name[ns], name[self].ntoskip, name[ntoreturn], name[spec], name[self].fields, name[self].codec_options]]] | keyword[def] identifier[get_message] ( identifier[self] , identifier[set_slave_ok] , identifier[is_mongos] , identifier[use_cmd] = keyword[False] ):
literal[string]
keyword[if] identifier[set_slave_ok] :
identifier[flags] = identifier[self] . identifier[flags] | literal[int]
keyword[else] :
identifier[flags] = identifier[self] . identifier[flags]
identifier[ns] = identifier[_UJOIN] %( identifier[self] . identifier[db] , identifier[self] . identifier[coll] )
identifier[spec] = identifier[self] . identifier[spec]
keyword[if] identifier[use_cmd] :
identifier[ns] = identifier[_UJOIN] %( identifier[self] . identifier[db] , literal[string] )
identifier[spec] = identifier[self] . identifier[as_command] ()[ literal[int] ]
identifier[ntoreturn] =- literal[int]
keyword[else] :
identifier[ntoreturn] = identifier[self] . identifier[batch_size] == literal[int] keyword[and] literal[int] keyword[or] identifier[self] . identifier[batch_size]
keyword[if] identifier[self] . identifier[limit] :
keyword[if] identifier[ntoreturn] :
identifier[ntoreturn] = identifier[min] ( identifier[self] . identifier[limit] , identifier[ntoreturn] )
keyword[else] :
identifier[ntoreturn] = identifier[self] . identifier[limit]
keyword[if] identifier[is_mongos] :
identifier[spec] = identifier[_maybe_add_read_preference] ( identifier[spec] ,
identifier[self] . identifier[read_preference] )
keyword[return] identifier[query] ( identifier[flags] , identifier[ns] , identifier[self] . identifier[ntoskip] , identifier[ntoreturn] ,
identifier[spec] , identifier[self] . identifier[fields] , identifier[self] . identifier[codec_options] ) | def get_message(self, set_slave_ok, is_mongos, use_cmd=False):
"""Get a query message, possibly setting the slaveOk bit."""
if set_slave_ok:
# Set the slaveOk bit.
flags = self.flags | 4 # depends on [control=['if'], data=[]]
else:
flags = self.flags
ns = _UJOIN % (self.db, self.coll)
spec = self.spec
if use_cmd:
ns = _UJOIN % (self.db, '$cmd')
spec = self.as_command()[0]
ntoreturn = -1 # All DB commands return 1 document # depends on [control=['if'], data=[]]
else:
# OP_QUERY treats ntoreturn of -1 and 1 the same, return
# one document and close the cursor. We have to use 2 for
# batch size if 1 is specified.
ntoreturn = self.batch_size == 1 and 2 or self.batch_size
if self.limit:
if ntoreturn:
ntoreturn = min(self.limit, ntoreturn) # depends on [control=['if'], data=[]]
else:
ntoreturn = self.limit # depends on [control=['if'], data=[]]
if is_mongos:
spec = _maybe_add_read_preference(spec, self.read_preference) # depends on [control=['if'], data=[]]
return query(flags, ns, self.ntoskip, ntoreturn, spec, self.fields, self.codec_options) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.