code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def com_google_fonts_check_post_table_version(ttFont, is_ttf):
"""Font has correct post table version (2 for TTF, 3 for OTF)?"""
formatType = ttFont['post'].formatType
if is_ttf:
expected = 2
else:
expected = 3
if formatType != expected:
yield FAIL, ("Post table should be version {} instead of {}."
" More info at https://github.com/google/fonts/"
"issues/215").format(expected, formatType)
else:
yield PASS, f"Font has post table version {expected}." | def function[com_google_fonts_check_post_table_version, parameter[ttFont, is_ttf]]:
constant[Font has correct post table version (2 for TTF, 3 for OTF)?]
variable[formatType] assign[=] call[name[ttFont]][constant[post]].formatType
if name[is_ttf] begin[:]
variable[expected] assign[=] constant[2]
if compare[name[formatType] not_equal[!=] name[expected]] begin[:]
<ast.Yield object at 0x7da1b121b6a0> | keyword[def] identifier[com_google_fonts_check_post_table_version] ( identifier[ttFont] , identifier[is_ttf] ):
literal[string]
identifier[formatType] = identifier[ttFont] [ literal[string] ]. identifier[formatType]
keyword[if] identifier[is_ttf] :
identifier[expected] = literal[int]
keyword[else] :
identifier[expected] = literal[int]
keyword[if] identifier[formatType] != identifier[expected] :
keyword[yield] identifier[FAIL] ,( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[expected] , identifier[formatType] )
keyword[else] :
keyword[yield] identifier[PASS] , literal[string] | def com_google_fonts_check_post_table_version(ttFont, is_ttf):
"""Font has correct post table version (2 for TTF, 3 for OTF)?"""
formatType = ttFont['post'].formatType
if is_ttf:
expected = 2 # depends on [control=['if'], data=[]]
else:
expected = 3
if formatType != expected:
yield (FAIL, 'Post table should be version {} instead of {}. More info at https://github.com/google/fonts/issues/215'.format(expected, formatType)) # depends on [control=['if'], data=['formatType', 'expected']]
else:
yield (PASS, f'Font has post table version {expected}.') |
def users(self):
"""Returns the list of users in the database"""
result = self.db.read("", {"q": "ls"})
if result is None or result.json() is None:
return []
users = []
for u in result.json():
usr = self(u["name"])
usr.metadata = u
users.append(usr)
return users | def function[users, parameter[self]]:
constant[Returns the list of users in the database]
variable[result] assign[=] call[name[self].db.read, parameter[constant[], dictionary[[<ast.Constant object at 0x7da18f811d20>], [<ast.Constant object at 0x7da18f812ef0>]]]]
if <ast.BoolOp object at 0x7da18f811210> begin[:]
return[list[[]]]
variable[users] assign[=] list[[]]
for taget[name[u]] in starred[call[name[result].json, parameter[]]] begin[:]
variable[usr] assign[=] call[name[self], parameter[call[name[u]][constant[name]]]]
name[usr].metadata assign[=] name[u]
call[name[users].append, parameter[name[usr]]]
return[name[users]] | keyword[def] identifier[users] ( identifier[self] ):
literal[string]
identifier[result] = identifier[self] . identifier[db] . identifier[read] ( literal[string] ,{ literal[string] : literal[string] })
keyword[if] identifier[result] keyword[is] keyword[None] keyword[or] identifier[result] . identifier[json] () keyword[is] keyword[None] :
keyword[return] []
identifier[users] =[]
keyword[for] identifier[u] keyword[in] identifier[result] . identifier[json] ():
identifier[usr] = identifier[self] ( identifier[u] [ literal[string] ])
identifier[usr] . identifier[metadata] = identifier[u]
identifier[users] . identifier[append] ( identifier[usr] )
keyword[return] identifier[users] | def users(self):
"""Returns the list of users in the database"""
result = self.db.read('', {'q': 'ls'})
if result is None or result.json() is None:
return [] # depends on [control=['if'], data=[]]
users = []
for u in result.json():
usr = self(u['name'])
usr.metadata = u
users.append(usr) # depends on [control=['for'], data=['u']]
return users |
def parse_def(self, text):
"""Parse the function definition text."""
self.__init__()
if not is_start_of_function(text):
return
self.func_indent = get_indent(text)
text = text.strip()
text = text.replace('\r\n', '')
text = text.replace('\n', '')
return_type_re = re.search(r'->[ ]*([a-zA-Z0-9_,()\[\] ]*):$', text)
if return_type_re:
self.return_type_annotated = return_type_re.group(1)
text_end = text.rfind(return_type_re.group(0))
else:
self.return_type_annotated = None
text_end = len(text)
pos_args_start = text.find('(') + 1
pos_args_end = text.rfind(')', pos_args_start, text_end)
self.args_text = text[pos_args_start:pos_args_end]
args_list = self.split_args_text_to_list(self.args_text)
if args_list is not None:
self.has_info = True
self.split_arg_to_name_type_value(args_list) | def function[parse_def, parameter[self, text]]:
constant[Parse the function definition text.]
call[name[self].__init__, parameter[]]
if <ast.UnaryOp object at 0x7da1b26ae7d0> begin[:]
return[None]
name[self].func_indent assign[=] call[name[get_indent], parameter[name[text]]]
variable[text] assign[=] call[name[text].strip, parameter[]]
variable[text] assign[=] call[name[text].replace, parameter[constant[
], constant[]]]
variable[text] assign[=] call[name[text].replace, parameter[constant[
], constant[]]]
variable[return_type_re] assign[=] call[name[re].search, parameter[constant[->[ ]*([a-zA-Z0-9_,()\[\] ]*):$], name[text]]]
if name[return_type_re] begin[:]
name[self].return_type_annotated assign[=] call[name[return_type_re].group, parameter[constant[1]]]
variable[text_end] assign[=] call[name[text].rfind, parameter[call[name[return_type_re].group, parameter[constant[0]]]]]
variable[pos_args_start] assign[=] binary_operation[call[name[text].find, parameter[constant[(]]] + constant[1]]
variable[pos_args_end] assign[=] call[name[text].rfind, parameter[constant[)], name[pos_args_start], name[text_end]]]
name[self].args_text assign[=] call[name[text]][<ast.Slice object at 0x7da18fe93c70>]
variable[args_list] assign[=] call[name[self].split_args_text_to_list, parameter[name[self].args_text]]
if compare[name[args_list] is_not constant[None]] begin[:]
name[self].has_info assign[=] constant[True]
call[name[self].split_arg_to_name_type_value, parameter[name[args_list]]] | keyword[def] identifier[parse_def] ( identifier[self] , identifier[text] ):
literal[string]
identifier[self] . identifier[__init__] ()
keyword[if] keyword[not] identifier[is_start_of_function] ( identifier[text] ):
keyword[return]
identifier[self] . identifier[func_indent] = identifier[get_indent] ( identifier[text] )
identifier[text] = identifier[text] . identifier[strip] ()
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
identifier[return_type_re] = identifier[re] . identifier[search] ( literal[string] , identifier[text] )
keyword[if] identifier[return_type_re] :
identifier[self] . identifier[return_type_annotated] = identifier[return_type_re] . identifier[group] ( literal[int] )
identifier[text_end] = identifier[text] . identifier[rfind] ( identifier[return_type_re] . identifier[group] ( literal[int] ))
keyword[else] :
identifier[self] . identifier[return_type_annotated] = keyword[None]
identifier[text_end] = identifier[len] ( identifier[text] )
identifier[pos_args_start] = identifier[text] . identifier[find] ( literal[string] )+ literal[int]
identifier[pos_args_end] = identifier[text] . identifier[rfind] ( literal[string] , identifier[pos_args_start] , identifier[text_end] )
identifier[self] . identifier[args_text] = identifier[text] [ identifier[pos_args_start] : identifier[pos_args_end] ]
identifier[args_list] = identifier[self] . identifier[split_args_text_to_list] ( identifier[self] . identifier[args_text] )
keyword[if] identifier[args_list] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[has_info] = keyword[True]
identifier[self] . identifier[split_arg_to_name_type_value] ( identifier[args_list] ) | def parse_def(self, text):
"""Parse the function definition text."""
self.__init__()
if not is_start_of_function(text):
return # depends on [control=['if'], data=[]]
self.func_indent = get_indent(text)
text = text.strip()
text = text.replace('\r\n', '')
text = text.replace('\n', '')
return_type_re = re.search('->[ ]*([a-zA-Z0-9_,()\\[\\] ]*):$', text)
if return_type_re:
self.return_type_annotated = return_type_re.group(1)
text_end = text.rfind(return_type_re.group(0)) # depends on [control=['if'], data=[]]
else:
self.return_type_annotated = None
text_end = len(text)
pos_args_start = text.find('(') + 1
pos_args_end = text.rfind(')', pos_args_start, text_end)
self.args_text = text[pos_args_start:pos_args_end]
args_list = self.split_args_text_to_list(self.args_text)
if args_list is not None:
self.has_info = True
self.split_arg_to_name_type_value(args_list) # depends on [control=['if'], data=['args_list']] |
def wikidata_search(query, lang="zh", output_lang="en", searchtype="item", max_result=1):
"""
wikification: search wikipedia pages for the given query
https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities
result format
{
searchinfo: - {
search: "birthday"
},
search: - [
- {
repository: "",
id: "P3150",
concepturi: "http://www.wikidata.org/entity/P3150",
url: "//www.wikidata.org/wiki/Property:P3150",
title: "Property:P3150",
pageid: 28754653,
datatype: "wikibase-item",
label: "birthday",
description: "item for day and month on which the subject was born. Used when full "date of birth" (P569) isn't known.",
match: - {
type: "label",
language: "en",
text: "birthday"
}
}
"""
query = any2unicode(query)
params = {
"action":"wbsearchentities",
"search": query,
"format":"json",
"language":lang,
"uselang":output_lang,
"type":searchtype
}
urlBase = "https://www.wikidata.org/w/api.php?"
url = urlBase + urllib.urlencode(any2utf8(params))
#logging.info(url)
r = requests.get(url)
results = json.loads(r.content).get("search",[])
#logging.info(items)
property_list = [
{"name":"name", "alternateName":["label"]},
{"name":"url", "alternateName":["concepturi"]},
{"name":"identifier", "alternateName":["id"]},
{"name":"description"},
]
items = []
ret = {"query": query, "itemList":items}
for result in results[0:max_result]:
#logging.info(result)
item = json_dict_copy(result, property_list)
items.append(item)
return ret | def function[wikidata_search, parameter[query, lang, output_lang, searchtype, max_result]]:
constant[
wikification: search wikipedia pages for the given query
https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities
result format
{
searchinfo: - {
search: "birthday"
},
search: - [
- {
repository: "",
id: "P3150",
concepturi: "http://www.wikidata.org/entity/P3150",
url: "//www.wikidata.org/wiki/Property:P3150",
title: "Property:P3150",
pageid: 28754653,
datatype: "wikibase-item",
label: "birthday",
description: "item for day and month on which the subject was born. Used when full "date of birth" (P569) isn't known.",
match: - {
type: "label",
language: "en",
text: "birthday"
}
}
]
variable[query] assign[=] call[name[any2unicode], parameter[name[query]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b2372c20>, <ast.Constant object at 0x7da1b2372350>, <ast.Constant object at 0x7da1b23713f0>, <ast.Constant object at 0x7da1b23728f0>, <ast.Constant object at 0x7da1b23712a0>, <ast.Constant object at 0x7da1b23736d0>], [<ast.Constant object at 0x7da1b2370fa0>, <ast.Name object at 0x7da1b2371de0>, <ast.Constant object at 0x7da1b2373790>, <ast.Name object at 0x7da1b2372e90>, <ast.Name object at 0x7da1b2371930>, <ast.Name object at 0x7da1b2371150>]]
variable[urlBase] assign[=] constant[https://www.wikidata.org/w/api.php?]
variable[url] assign[=] binary_operation[name[urlBase] + call[name[urllib].urlencode, parameter[call[name[any2utf8], parameter[name[params]]]]]]
variable[r] assign[=] call[name[requests].get, parameter[name[url]]]
variable[results] assign[=] call[call[name[json].loads, parameter[name[r].content]].get, parameter[constant[search], list[[]]]]
variable[property_list] assign[=] list[[<ast.Dict object at 0x7da1b2373dc0>, <ast.Dict object at 0x7da1b2371300>, <ast.Dict object at 0x7da1b23730a0>, <ast.Dict object at 0x7da1b2370850>]]
variable[items] assign[=] list[[]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2371e10>, <ast.Constant object at 0x7da1b2370700>], [<ast.Name object at 0x7da1b2372b90>, <ast.Name object at 0x7da1b2373cd0>]]
for taget[name[result]] in starred[call[name[results]][<ast.Slice object at 0x7da1b2373880>]] begin[:]
variable[item] assign[=] call[name[json_dict_copy], parameter[name[result], name[property_list]]]
call[name[items].append, parameter[name[item]]]
return[name[ret]] | keyword[def] identifier[wikidata_search] ( identifier[query] , identifier[lang] = literal[string] , identifier[output_lang] = literal[string] , identifier[searchtype] = literal[string] , identifier[max_result] = literal[int] ):
literal[string]
identifier[query] = identifier[any2unicode] ( identifier[query] )
identifier[params] ={
literal[string] : literal[string] ,
literal[string] : identifier[query] ,
literal[string] : literal[string] ,
literal[string] : identifier[lang] ,
literal[string] : identifier[output_lang] ,
literal[string] : identifier[searchtype]
}
identifier[urlBase] = literal[string]
identifier[url] = identifier[urlBase] + identifier[urllib] . identifier[urlencode] ( identifier[any2utf8] ( identifier[params] ))
identifier[r] = identifier[requests] . identifier[get] ( identifier[url] )
identifier[results] = identifier[json] . identifier[loads] ( identifier[r] . identifier[content] ). identifier[get] ( literal[string] ,[])
identifier[property_list] =[
{ literal[string] : literal[string] , literal[string] :[ literal[string] ]},
{ literal[string] : literal[string] , literal[string] :[ literal[string] ]},
{ literal[string] : literal[string] , literal[string] :[ literal[string] ]},
{ literal[string] : literal[string] },
]
identifier[items] =[]
identifier[ret] ={ literal[string] : identifier[query] , literal[string] : identifier[items] }
keyword[for] identifier[result] keyword[in] identifier[results] [ literal[int] : identifier[max_result] ]:
identifier[item] = identifier[json_dict_copy] ( identifier[result] , identifier[property_list] )
identifier[items] . identifier[append] ( identifier[item] )
keyword[return] identifier[ret] | def wikidata_search(query, lang='zh', output_lang='en', searchtype='item', max_result=1):
"""
wikification: search wikipedia pages for the given query
https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities
result format
{
searchinfo: - {
search: "birthday"
},
search: - [
- {
repository: "",
id: "P3150",
concepturi: "http://www.wikidata.org/entity/P3150",
url: "//www.wikidata.org/wiki/Property:P3150",
title: "Property:P3150",
pageid: 28754653,
datatype: "wikibase-item",
label: "birthday",
description: "item for day and month on which the subject was born. Used when full "date of birth" (P569) isn't known.",
match: - {
type: "label",
language: "en",
text: "birthday"
}
}
"""
query = any2unicode(query)
params = {'action': 'wbsearchentities', 'search': query, 'format': 'json', 'language': lang, 'uselang': output_lang, 'type': searchtype}
urlBase = 'https://www.wikidata.org/w/api.php?'
url = urlBase + urllib.urlencode(any2utf8(params))
#logging.info(url)
r = requests.get(url)
results = json.loads(r.content).get('search', [])
#logging.info(items)
property_list = [{'name': 'name', 'alternateName': ['label']}, {'name': 'url', 'alternateName': ['concepturi']}, {'name': 'identifier', 'alternateName': ['id']}, {'name': 'description'}]
items = []
ret = {'query': query, 'itemList': items}
for result in results[0:max_result]:
#logging.info(result)
item = json_dict_copy(result, property_list)
items.append(item) # depends on [control=['for'], data=['result']]
return ret |
def count(self, table):
"""
Use the current Query object to count the number of entries in `table`
that satisfy `queries`.
Parameters
----------
table : NumPy structured array, astropy Table, etc.
Returns
-------
count : int
"""
if self._operator is None and self._operands is None:
return self._get_table_len(table)
return np.count_nonzero(self.mask(table)) | def function[count, parameter[self, table]]:
constant[
Use the current Query object to count the number of entries in `table`
that satisfy `queries`.
Parameters
----------
table : NumPy structured array, astropy Table, etc.
Returns
-------
count : int
]
if <ast.BoolOp object at 0x7da2041d99c0> begin[:]
return[call[name[self]._get_table_len, parameter[name[table]]]]
return[call[name[np].count_nonzero, parameter[call[name[self].mask, parameter[name[table]]]]]] | keyword[def] identifier[count] ( identifier[self] , identifier[table] ):
literal[string]
keyword[if] identifier[self] . identifier[_operator] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[_operands] keyword[is] keyword[None] :
keyword[return] identifier[self] . identifier[_get_table_len] ( identifier[table] )
keyword[return] identifier[np] . identifier[count_nonzero] ( identifier[self] . identifier[mask] ( identifier[table] )) | def count(self, table):
"""
Use the current Query object to count the number of entries in `table`
that satisfy `queries`.
Parameters
----------
table : NumPy structured array, astropy Table, etc.
Returns
-------
count : int
"""
if self._operator is None and self._operands is None:
return self._get_table_len(table) # depends on [control=['if'], data=[]]
return np.count_nonzero(self.mask(table)) |
def cas(self, key, value, cas, time=0, compress_level=-1):
"""
Set a value for a key on server if its CAS value matches cas.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param cas: The CAS value previously obtained from a call to get*.
:type cas: int
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
"""
server = self._get_server(key)
return server.cas(key, value, cas, time, compress_level) | def function[cas, parameter[self, key, value, cas, time, compress_level]]:
constant[
Set a value for a key on server if its CAS value matches cas.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param cas: The CAS value previously obtained from a call to get*.
:type cas: int
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
]
variable[server] assign[=] call[name[self]._get_server, parameter[name[key]]]
return[call[name[server].cas, parameter[name[key], name[value], name[cas], name[time], name[compress_level]]]] | keyword[def] identifier[cas] ( identifier[self] , identifier[key] , identifier[value] , identifier[cas] , identifier[time] = literal[int] , identifier[compress_level] =- literal[int] ):
literal[string]
identifier[server] = identifier[self] . identifier[_get_server] ( identifier[key] )
keyword[return] identifier[server] . identifier[cas] ( identifier[key] , identifier[value] , identifier[cas] , identifier[time] , identifier[compress_level] ) | def cas(self, key, value, cas, time=0, compress_level=-1):
"""
Set a value for a key on server if its CAS value matches cas.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param cas: The CAS value previously obtained from a call to get*.
:type cas: int
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
"""
server = self._get_server(key)
return server.cas(key, value, cas, time, compress_level) |
def split_header(line):
""" Split a header line into field name and field value.
Note that custom fields may contain colons inside the curly braces,
so we need a special test for them.
:param line: A message line to be split.
:returns: (Field name, field value) tuple.
"""
match = re.match(r'^(CF\.\{.*?}): (.*)$', line)
if match:
return (match.group(1), match.group(2))
return line.split(': ', 1) | def function[split_header, parameter[line]]:
constant[ Split a header line into field name and field value.
Note that custom fields may contain colons inside the curly braces,
so we need a special test for them.
:param line: A message line to be split.
:returns: (Field name, field value) tuple.
]
variable[match] assign[=] call[name[re].match, parameter[constant[^(CF\.\{.*?}): (.*)$], name[line]]]
if name[match] begin[:]
return[tuple[[<ast.Call object at 0x7da18fe938e0>, <ast.Call object at 0x7da18fe91bd0>]]]
return[call[name[line].split, parameter[constant[: ], constant[1]]]] | keyword[def] identifier[split_header] ( identifier[line] ):
literal[string]
identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[line] )
keyword[if] identifier[match] :
keyword[return] ( identifier[match] . identifier[group] ( literal[int] ), identifier[match] . identifier[group] ( literal[int] ))
keyword[return] identifier[line] . identifier[split] ( literal[string] , literal[int] ) | def split_header(line):
""" Split a header line into field name and field value.
Note that custom fields may contain colons inside the curly braces,
so we need a special test for them.
:param line: A message line to be split.
:returns: (Field name, field value) tuple.
"""
match = re.match('^(CF\\.\\{.*?}): (.*)$', line)
if match:
return (match.group(1), match.group(2)) # depends on [control=['if'], data=[]]
return line.split(': ', 1) |
def get(self, id, lazy=False, **kwargs):
"""Retrieve a single object.
Args:
id (int or str): ID of the object to retrieve
lazy (bool): If True, don't request the server, but create a
shallow object giving access to the managers. This is
useful if you want to avoid useless calls to the API.
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
object: The generated RESTObject.
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
"""
if not isinstance(id, int):
id = id.replace('/', '%2F')
path = '%s/%s' % (self.path, id)
if lazy is True:
return self._obj_cls(self, {self._obj_cls._id_attr: id})
server_data = self.gitlab.http_get(path, **kwargs)
return self._obj_cls(self, server_data) | def function[get, parameter[self, id, lazy]]:
constant[Retrieve a single object.
Args:
id (int or str): ID of the object to retrieve
lazy (bool): If True, don't request the server, but create a
shallow object giving access to the managers. This is
useful if you want to avoid useless calls to the API.
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
object: The generated RESTObject.
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
]
if <ast.UnaryOp object at 0x7da20c6c59f0> begin[:]
variable[id] assign[=] call[name[id].replace, parameter[constant[/], constant[%2F]]]
variable[path] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6c49d0>, <ast.Name object at 0x7da20c6c4790>]]]
if compare[name[lazy] is constant[True]] begin[:]
return[call[name[self]._obj_cls, parameter[name[self], dictionary[[<ast.Attribute object at 0x7da20c6c6f50>], [<ast.Name object at 0x7da20c6c7bb0>]]]]]
variable[server_data] assign[=] call[name[self].gitlab.http_get, parameter[name[path]]]
return[call[name[self]._obj_cls, parameter[name[self], name[server_data]]]] | keyword[def] identifier[get] ( identifier[self] , identifier[id] , identifier[lazy] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[id] , identifier[int] ):
identifier[id] = identifier[id] . identifier[replace] ( literal[string] , literal[string] )
identifier[path] = literal[string] %( identifier[self] . identifier[path] , identifier[id] )
keyword[if] identifier[lazy] keyword[is] keyword[True] :
keyword[return] identifier[self] . identifier[_obj_cls] ( identifier[self] ,{ identifier[self] . identifier[_obj_cls] . identifier[_id_attr] : identifier[id] })
identifier[server_data] = identifier[self] . identifier[gitlab] . identifier[http_get] ( identifier[path] ,** identifier[kwargs] )
keyword[return] identifier[self] . identifier[_obj_cls] ( identifier[self] , identifier[server_data] ) | def get(self, id, lazy=False, **kwargs):
"""Retrieve a single object.
Args:
id (int or str): ID of the object to retrieve
lazy (bool): If True, don't request the server, but create a
shallow object giving access to the managers. This is
useful if you want to avoid useless calls to the API.
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
object: The generated RESTObject.
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
"""
if not isinstance(id, int):
id = id.replace('/', '%2F') # depends on [control=['if'], data=[]]
path = '%s/%s' % (self.path, id)
if lazy is True:
return self._obj_cls(self, {self._obj_cls._id_attr: id}) # depends on [control=['if'], data=[]]
server_data = self.gitlab.http_get(path, **kwargs)
return self._obj_cls(self, server_data) |
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__') and \
hasattr(types, 'ClassType') and isinstance(c, types.ClassType):
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result | def function[configure_custom, parameter[self, config]]:
constant[Configure an object with a user-supplied factory.]
variable[c] assign[=] call[name[config].pop, parameter[constant[()]]]
if <ast.BoolOp object at 0x7da207f03850> begin[:]
variable[c] assign[=] call[name[self].resolve, parameter[name[c]]]
variable[props] assign[=] call[name[config].pop, parameter[constant[.], constant[None]]]
variable[kwargs] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da207f03a00>]]
variable[result] assign[=] call[name[c], parameter[]]
if name[props] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c6a9990>, <ast.Name object at 0x7da20c6a86a0>]]] in starred[call[name[props].items, parameter[]]] begin[:]
call[name[setattr], parameter[name[result], name[name], name[value]]]
return[name[result]] | keyword[def] identifier[configure_custom] ( identifier[self] , identifier[config] ):
literal[string]
identifier[c] = identifier[config] . identifier[pop] ( literal[string] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[c] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[types] , literal[string] ) keyword[and] identifier[isinstance] ( identifier[c] , identifier[types] . identifier[ClassType] ):
identifier[c] = identifier[self] . identifier[resolve] ( identifier[c] )
identifier[props] = identifier[config] . identifier[pop] ( literal[string] , keyword[None] )
identifier[kwargs] = identifier[dict] (( identifier[k] , identifier[config] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[config] keyword[if] identifier[valid_ident] ( identifier[k] ))
identifier[result] = identifier[c] (** identifier[kwargs] )
keyword[if] identifier[props] :
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[props] . identifier[items] ():
identifier[setattr] ( identifier[result] , identifier[name] , identifier[value] )
keyword[return] identifier[result] | def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and isinstance(c, types.ClassType):
c = self.resolve(c) # depends on [control=['if'], data=[]]
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict(((k, config[k]) for k in config if valid_ident(k)))
result = c(**kwargs)
if props:
for (name, value) in props.items():
setattr(result, name, value) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return result |
def serve_doc(app, url):
"""
Serve API documentation extracted from request handler docstrings
Parameters:
* app: Grole application object
* url: URL to serve at
"""
@app.route(url, doc=False)
def index(env, req):
ret = ''
for d in env['doc']:
ret += 'URL: {url}, supported methods: {methods}{doc}\n'.format(**d)
return ret | def function[serve_doc, parameter[app, url]]:
constant[
Serve API documentation extracted from request handler docstrings
Parameters:
* app: Grole application object
* url: URL to serve at
]
def function[index, parameter[env, req]]:
variable[ret] assign[=] constant[]
for taget[name[d]] in starred[call[name[env]][constant[doc]]] begin[:]
<ast.AugAssign object at 0x7da1b1c65690>
return[name[ret]] | keyword[def] identifier[serve_doc] ( identifier[app] , identifier[url] ):
literal[string]
@ identifier[app] . identifier[route] ( identifier[url] , identifier[doc] = keyword[False] )
keyword[def] identifier[index] ( identifier[env] , identifier[req] ):
identifier[ret] = literal[string]
keyword[for] identifier[d] keyword[in] identifier[env] [ literal[string] ]:
identifier[ret] += literal[string] . identifier[format] (** identifier[d] )
keyword[return] identifier[ret] | def serve_doc(app, url):
"""
Serve API documentation extracted from request handler docstrings
Parameters:
* app: Grole application object
* url: URL to serve at
"""
@app.route(url, doc=False)
def index(env, req):
ret = ''
for d in env['doc']:
ret += 'URL: {url}, supported methods: {methods}{doc}\n'.format(**d) # depends on [control=['for'], data=['d']]
return ret |
def get_default_dashboard(self):
"""Returns the default :class:`~horizon.Dashboard` instance.
If ``"default_dashboard"`` is specified in ``HORIZON_CONFIG``
then that dashboard will be returned. If not, the first dashboard
returned by :func:`~horizon.get_dashboards` will be returned.
"""
if self.default_dashboard:
return self._registered(self.default_dashboard)
elif self._registry:
return self.get_dashboards()[0]
else:
raise NotRegistered("No dashboard modules have been registered.") | def function[get_default_dashboard, parameter[self]]:
constant[Returns the default :class:`~horizon.Dashboard` instance.
If ``"default_dashboard"`` is specified in ``HORIZON_CONFIG``
then that dashboard will be returned. If not, the first dashboard
returned by :func:`~horizon.get_dashboards` will be returned.
]
if name[self].default_dashboard begin[:]
return[call[name[self]._registered, parameter[name[self].default_dashboard]]] | keyword[def] identifier[get_default_dashboard] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[default_dashboard] :
keyword[return] identifier[self] . identifier[_registered] ( identifier[self] . identifier[default_dashboard] )
keyword[elif] identifier[self] . identifier[_registry] :
keyword[return] identifier[self] . identifier[get_dashboards] ()[ literal[int] ]
keyword[else] :
keyword[raise] identifier[NotRegistered] ( literal[string] ) | def get_default_dashboard(self):
"""Returns the default :class:`~horizon.Dashboard` instance.
If ``"default_dashboard"`` is specified in ``HORIZON_CONFIG``
then that dashboard will be returned. If not, the first dashboard
returned by :func:`~horizon.get_dashboards` will be returned.
"""
if self.default_dashboard:
return self._registered(self.default_dashboard) # depends on [control=['if'], data=[]]
elif self._registry:
return self.get_dashboards()[0] # depends on [control=['if'], data=[]]
else:
raise NotRegistered('No dashboard modules have been registered.') |
def github_repo(github_repo, plugin_name):
"""
Returns a GitRepo from a github repository after either cloning or fetching
(depending on whether it exists)
@param github_repo: the github repository path, e.g. 'drupal/drupal/'
@param plugin_name: the current plugin's name (for namespace purposes).
"""
github_repo = _github_normalize(github_repo)
repo_url = '%s%s' % (GH, github_repo)
gr = GitRepo(repo_url, plugin_name)
gr.init()
return gr | def function[github_repo, parameter[github_repo, plugin_name]]:
constant[
Returns a GitRepo from a github repository after either cloning or fetching
(depending on whether it exists)
@param github_repo: the github repository path, e.g. 'drupal/drupal/'
@param plugin_name: the current plugin's name (for namespace purposes).
]
variable[github_repo] assign[=] call[name[_github_normalize], parameter[name[github_repo]]]
variable[repo_url] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1da2920>, <ast.Name object at 0x7da1b1da0280>]]]
variable[gr] assign[=] call[name[GitRepo], parameter[name[repo_url], name[plugin_name]]]
call[name[gr].init, parameter[]]
return[name[gr]] | keyword[def] identifier[github_repo] ( identifier[github_repo] , identifier[plugin_name] ):
literal[string]
identifier[github_repo] = identifier[_github_normalize] ( identifier[github_repo] )
identifier[repo_url] = literal[string] %( identifier[GH] , identifier[github_repo] )
identifier[gr] = identifier[GitRepo] ( identifier[repo_url] , identifier[plugin_name] )
identifier[gr] . identifier[init] ()
keyword[return] identifier[gr] | def github_repo(github_repo, plugin_name):
"""
Returns a GitRepo from a github repository after either cloning or fetching
(depending on whether it exists)
@param github_repo: the github repository path, e.g. 'drupal/drupal/'
@param plugin_name: the current plugin's name (for namespace purposes).
"""
github_repo = _github_normalize(github_repo)
repo_url = '%s%s' % (GH, github_repo)
gr = GitRepo(repo_url, plugin_name)
gr.init()
return gr |
def render_template_string(source, **context):
"""Renders a template from the given template source string
with the given context.
:param source: the sourcecode of the template to be
rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _app_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.from_string(source),
context, ctx.app) | def function[render_template_string, parameter[source]]:
constant[Renders a template from the given template source string
with the given context.
:param source: the sourcecode of the template to be
rendered
:param context: the variables that should be available in the
context of the template.
]
variable[ctx] assign[=] name[_app_ctx_stack].top
call[name[ctx].app.update_template_context, parameter[name[context]]]
return[call[name[_render], parameter[call[name[ctx].app.jinja_env.from_string, parameter[name[source]]], name[context], name[ctx].app]]] | keyword[def] identifier[render_template_string] ( identifier[source] ,** identifier[context] ):
literal[string]
identifier[ctx] = identifier[_app_ctx_stack] . identifier[top]
identifier[ctx] . identifier[app] . identifier[update_template_context] ( identifier[context] )
keyword[return] identifier[_render] ( identifier[ctx] . identifier[app] . identifier[jinja_env] . identifier[from_string] ( identifier[source] ),
identifier[context] , identifier[ctx] . identifier[app] ) | def render_template_string(source, **context):
"""Renders a template from the given template source string
with the given context.
:param source: the sourcecode of the template to be
rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _app_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.from_string(source), context, ctx.app) |
def get_variable_value_for_variation(self, variable, variation):
""" Get the variable value for the given variation.
Args:
variable: The Variable for which we are getting the value.
variation: The Variation for which we are getting the variable value.
Returns:
The variable value or None if any of the inputs are invalid.
"""
if not variable or not variation:
return None
if variation.id not in self.variation_variable_usage_map:
self.logger.error('Variation with ID "%s" is not in the datafile.' % variation.id)
return None
# Get all variable usages for the given variation
variable_usages = self.variation_variable_usage_map[variation.id]
# Find usage in given variation
variable_usage = None
if variable_usages:
variable_usage = variable_usages.get(variable.id)
if variable_usage:
variable_value = variable_usage.value
self.logger.info('Value for variable "%s" for variation "%s" is "%s".' % (
variable.key,
variation.key,
variable_value
))
else:
variable_value = variable.defaultValue
self.logger.info('Variable "%s" is not used in variation "%s". Assigning default value "%s".' % (
variable.key,
variation.key,
variable_value
))
return variable_value | def function[get_variable_value_for_variation, parameter[self, variable, variation]]:
constant[ Get the variable value for the given variation.
Args:
variable: The Variable for which we are getting the value.
variation: The Variation for which we are getting the variable value.
Returns:
The variable value or None if any of the inputs are invalid.
]
if <ast.BoolOp object at 0x7da1b11aafb0> begin[:]
return[constant[None]]
if compare[name[variation].id <ast.NotIn object at 0x7da2590d7190> name[self].variation_variable_usage_map] begin[:]
call[name[self].logger.error, parameter[binary_operation[constant[Variation with ID "%s" is not in the datafile.] <ast.Mod object at 0x7da2590d6920> name[variation].id]]]
return[constant[None]]
variable[variable_usages] assign[=] call[name[self].variation_variable_usage_map][name[variation].id]
variable[variable_usage] assign[=] constant[None]
if name[variable_usages] begin[:]
variable[variable_usage] assign[=] call[name[variable_usages].get, parameter[name[variable].id]]
if name[variable_usage] begin[:]
variable[variable_value] assign[=] name[variable_usage].value
call[name[self].logger.info, parameter[binary_operation[constant[Value for variable "%s" for variation "%s" is "%s".] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b11aada0>, <ast.Attribute object at 0x7da1b11aaf50>, <ast.Name object at 0x7da1b11ab280>]]]]]
return[name[variable_value]] | keyword[def] identifier[get_variable_value_for_variation] ( identifier[self] , identifier[variable] , identifier[variation] ):
literal[string]
keyword[if] keyword[not] identifier[variable] keyword[or] keyword[not] identifier[variation] :
keyword[return] keyword[None]
keyword[if] identifier[variation] . identifier[id] keyword[not] keyword[in] identifier[self] . identifier[variation_variable_usage_map] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] % identifier[variation] . identifier[id] )
keyword[return] keyword[None]
identifier[variable_usages] = identifier[self] . identifier[variation_variable_usage_map] [ identifier[variation] . identifier[id] ]
identifier[variable_usage] = keyword[None]
keyword[if] identifier[variable_usages] :
identifier[variable_usage] = identifier[variable_usages] . identifier[get] ( identifier[variable] . identifier[id] )
keyword[if] identifier[variable_usage] :
identifier[variable_value] = identifier[variable_usage] . identifier[value]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] %(
identifier[variable] . identifier[key] ,
identifier[variation] . identifier[key] ,
identifier[variable_value]
))
keyword[else] :
identifier[variable_value] = identifier[variable] . identifier[defaultValue]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] %(
identifier[variable] . identifier[key] ,
identifier[variation] . identifier[key] ,
identifier[variable_value]
))
keyword[return] identifier[variable_value] | def get_variable_value_for_variation(self, variable, variation):
""" Get the variable value for the given variation.
Args:
variable: The Variable for which we are getting the value.
variation: The Variation for which we are getting the variable value.
Returns:
The variable value or None if any of the inputs are invalid.
"""
if not variable or not variation:
return None # depends on [control=['if'], data=[]]
if variation.id not in self.variation_variable_usage_map:
self.logger.error('Variation with ID "%s" is not in the datafile.' % variation.id)
return None # depends on [control=['if'], data=[]]
# Get all variable usages for the given variation
variable_usages = self.variation_variable_usage_map[variation.id]
# Find usage in given variation
variable_usage = None
if variable_usages:
variable_usage = variable_usages.get(variable.id) # depends on [control=['if'], data=[]]
if variable_usage:
variable_value = variable_usage.value
self.logger.info('Value for variable "%s" for variation "%s" is "%s".' % (variable.key, variation.key, variable_value)) # depends on [control=['if'], data=[]]
else:
variable_value = variable.defaultValue
self.logger.info('Variable "%s" is not used in variation "%s". Assigning default value "%s".' % (variable.key, variation.key, variable_value))
return variable_value |
def allowDeletion(store, tableClass, comparisonFactory):
"""
Returns a C{bool} indicating whether deletion of an item or items of a
particular item type should be allowed to proceed.
@param tableClass: An L{Item} subclass.
@param comparison: A one-argument callable taking an attribute and
returning an L{iaxiom.IComparison} describing the items to
collect.
@return: A C{bool} indicating whether deletion should be allowed.
"""
for cascadingAttr in (_disallows.get(tableClass, []) +
_disallows.get(None, [])):
for cascadedItem in store.query(cascadingAttr.type,
comparisonFactory(cascadingAttr),
limit=1):
return False
return True | def function[allowDeletion, parameter[store, tableClass, comparisonFactory]]:
constant[
Returns a C{bool} indicating whether deletion of an item or items of a
particular item type should be allowed to proceed.
@param tableClass: An L{Item} subclass.
@param comparison: A one-argument callable taking an attribute and
returning an L{iaxiom.IComparison} describing the items to
collect.
@return: A C{bool} indicating whether deletion should be allowed.
]
for taget[name[cascadingAttr]] in starred[binary_operation[call[name[_disallows].get, parameter[name[tableClass], list[[]]]] + call[name[_disallows].get, parameter[constant[None], list[[]]]]]] begin[:]
for taget[name[cascadedItem]] in starred[call[name[store].query, parameter[name[cascadingAttr].type, call[name[comparisonFactory], parameter[name[cascadingAttr]]]]]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[allowDeletion] ( identifier[store] , identifier[tableClass] , identifier[comparisonFactory] ):
literal[string]
keyword[for] identifier[cascadingAttr] keyword[in] ( identifier[_disallows] . identifier[get] ( identifier[tableClass] ,[])+
identifier[_disallows] . identifier[get] ( keyword[None] ,[])):
keyword[for] identifier[cascadedItem] keyword[in] identifier[store] . identifier[query] ( identifier[cascadingAttr] . identifier[type] ,
identifier[comparisonFactory] ( identifier[cascadingAttr] ),
identifier[limit] = literal[int] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def allowDeletion(store, tableClass, comparisonFactory):
"""
Returns a C{bool} indicating whether deletion of an item or items of a
particular item type should be allowed to proceed.
@param tableClass: An L{Item} subclass.
@param comparison: A one-argument callable taking an attribute and
returning an L{iaxiom.IComparison} describing the items to
collect.
@return: A C{bool} indicating whether deletion should be allowed.
"""
for cascadingAttr in _disallows.get(tableClass, []) + _disallows.get(None, []):
for cascadedItem in store.query(cascadingAttr.type, comparisonFactory(cascadingAttr), limit=1):
return False # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['cascadingAttr']]
return True |
def read_band_blocks(self, blocksize=CHUNK_SIZE):
"""Read the band in native blocks."""
# For sentinel 1 data, the block are 1 line, and dask seems to choke on that.
band = self.filehandle
shape = band.shape
token = tokenize(blocksize, band)
name = 'read_band-' + token
dskx = dict()
if len(band.block_shapes) != 1:
raise NotImplementedError('Bands with multiple shapes not supported.')
else:
chunks = band.block_shapes[0]
def do_read(the_band, the_window, the_lock):
with the_lock:
return the_band.read(1, None, window=the_window)
for ji, window in band.block_windows(1):
dskx[(name, ) + ji] = (do_read, band, window, self.read_lock)
res = da.Array(dskx, name, shape=list(shape),
chunks=chunks,
dtype=band.dtypes[0])
return DataArray(res, dims=('y', 'x')) | def function[read_band_blocks, parameter[self, blocksize]]:
constant[Read the band in native blocks.]
variable[band] assign[=] name[self].filehandle
variable[shape] assign[=] name[band].shape
variable[token] assign[=] call[name[tokenize], parameter[name[blocksize], name[band]]]
variable[name] assign[=] binary_operation[constant[read_band-] + name[token]]
variable[dskx] assign[=] call[name[dict], parameter[]]
if compare[call[name[len], parameter[name[band].block_shapes]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b22fa3b0>
def function[do_read, parameter[the_band, the_window, the_lock]]:
with name[the_lock] begin[:]
return[call[name[the_band].read, parameter[constant[1], constant[None]]]]
for taget[tuple[[<ast.Name object at 0x7da1b22faa70>, <ast.Name object at 0x7da1b22fa1d0>]]] in starred[call[name[band].block_windows, parameter[constant[1]]]] begin[:]
call[name[dskx]][binary_operation[tuple[[<ast.Name object at 0x7da1b22fab00>]] + name[ji]]] assign[=] tuple[[<ast.Name object at 0x7da1b22fa410>, <ast.Name object at 0x7da1b22f83d0>, <ast.Name object at 0x7da1b22f8130>, <ast.Attribute object at 0x7da1b22f81c0>]]
variable[res] assign[=] call[name[da].Array, parameter[name[dskx], name[name]]]
return[call[name[DataArray], parameter[name[res]]]] | keyword[def] identifier[read_band_blocks] ( identifier[self] , identifier[blocksize] = identifier[CHUNK_SIZE] ):
literal[string]
identifier[band] = identifier[self] . identifier[filehandle]
identifier[shape] = identifier[band] . identifier[shape]
identifier[token] = identifier[tokenize] ( identifier[blocksize] , identifier[band] )
identifier[name] = literal[string] + identifier[token]
identifier[dskx] = identifier[dict] ()
keyword[if] identifier[len] ( identifier[band] . identifier[block_shapes] )!= literal[int] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[else] :
identifier[chunks] = identifier[band] . identifier[block_shapes] [ literal[int] ]
keyword[def] identifier[do_read] ( identifier[the_band] , identifier[the_window] , identifier[the_lock] ):
keyword[with] identifier[the_lock] :
keyword[return] identifier[the_band] . identifier[read] ( literal[int] , keyword[None] , identifier[window] = identifier[the_window] )
keyword[for] identifier[ji] , identifier[window] keyword[in] identifier[band] . identifier[block_windows] ( literal[int] ):
identifier[dskx] [( identifier[name] ,)+ identifier[ji] ]=( identifier[do_read] , identifier[band] , identifier[window] , identifier[self] . identifier[read_lock] )
identifier[res] = identifier[da] . identifier[Array] ( identifier[dskx] , identifier[name] , identifier[shape] = identifier[list] ( identifier[shape] ),
identifier[chunks] = identifier[chunks] ,
identifier[dtype] = identifier[band] . identifier[dtypes] [ literal[int] ])
keyword[return] identifier[DataArray] ( identifier[res] , identifier[dims] =( literal[string] , literal[string] )) | def read_band_blocks(self, blocksize=CHUNK_SIZE):
"""Read the band in native blocks."""
# For sentinel 1 data, the block are 1 line, and dask seems to choke on that.
band = self.filehandle
shape = band.shape
token = tokenize(blocksize, band)
name = 'read_band-' + token
dskx = dict()
if len(band.block_shapes) != 1:
raise NotImplementedError('Bands with multiple shapes not supported.') # depends on [control=['if'], data=[]]
else:
chunks = band.block_shapes[0]
def do_read(the_band, the_window, the_lock):
with the_lock:
return the_band.read(1, None, window=the_window) # depends on [control=['with'], data=[]]
for (ji, window) in band.block_windows(1):
dskx[(name,) + ji] = (do_read, band, window, self.read_lock) # depends on [control=['for'], data=[]]
res = da.Array(dskx, name, shape=list(shape), chunks=chunks, dtype=band.dtypes[0])
return DataArray(res, dims=('y', 'x')) |
def shift_to_the_left(array, dist, pad=True, trim=True):
"""Shift array to the left.
:param array: An iterable object.
:type array: iterable object
:param dist: how far you want to shift
:type disk: int
:param pad: pad array[-1] to the right.
:type pad: boolean (default True)
:param trim: trim the first ``#dist`` items.
:type trim: boolean (default True)
Usage::
>>> array = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=True, trim=True)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=True, trim=False)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=False, trim=True)
[1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=True, trim=True)
Warning, with pad=False and trim=False, no change applied.
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
if dist < 0:
raise ValueError("Shift distance has to greater or equal than 0.")
if pad:
if trim:
new_array = array[dist:] + [array[-1]] * dist
else:
new_array = array + [array[-1]] * dist
else:
if trim:
new_array = array[dist:]
else:
print("Warning, with pad=False and trim=False, no change applied.")
new_array = list(array)
return new_array | def function[shift_to_the_left, parameter[array, dist, pad, trim]]:
constant[Shift array to the left.
:param array: An iterable object.
:type array: iterable object
:param dist: how far you want to shift
:type disk: int
:param pad: pad array[-1] to the right.
:type pad: boolean (default True)
:param trim: trim the first ``#dist`` items.
:type trim: boolean (default True)
Usage::
>>> array = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=True, trim=True)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=True, trim=False)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=False, trim=True)
[1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=True, trim=True)
Warning, with pad=False and trim=False, no change applied.
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
]
if compare[name[dist] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da18bc73640>
if name[pad] begin[:]
if name[trim] begin[:]
variable[new_array] assign[=] binary_operation[call[name[array]][<ast.Slice object at 0x7da18eb57280>] + binary_operation[list[[<ast.Subscript object at 0x7da18bc71ae0>]] * name[dist]]]
return[name[new_array]] | keyword[def] identifier[shift_to_the_left] ( identifier[array] , identifier[dist] , identifier[pad] = keyword[True] , identifier[trim] = keyword[True] ):
literal[string]
keyword[if] identifier[dist] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[pad] :
keyword[if] identifier[trim] :
identifier[new_array] = identifier[array] [ identifier[dist] :]+[ identifier[array] [- literal[int] ]]* identifier[dist]
keyword[else] :
identifier[new_array] = identifier[array] +[ identifier[array] [- literal[int] ]]* identifier[dist]
keyword[else] :
keyword[if] identifier[trim] :
identifier[new_array] = identifier[array] [ identifier[dist] :]
keyword[else] :
identifier[print] ( literal[string] )
identifier[new_array] = identifier[list] ( identifier[array] )
keyword[return] identifier[new_array] | def shift_to_the_left(array, dist, pad=True, trim=True):
"""Shift array to the left.
:param array: An iterable object.
:type array: iterable object
:param dist: how far you want to shift
:type disk: int
:param pad: pad array[-1] to the right.
:type pad: boolean (default True)
:param trim: trim the first ``#dist`` items.
:type trim: boolean (default True)
Usage::
>>> array = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=True, trim=True)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=True, trim=False)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=False, trim=True)
[1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> shift_to_the_left(self.iterable_list, 1, pad=True, trim=True)
Warning, with pad=False and trim=False, no change applied.
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
if dist < 0:
raise ValueError('Shift distance has to greater or equal than 0.') # depends on [control=['if'], data=[]]
if pad:
if trim:
new_array = array[dist:] + [array[-1]] * dist # depends on [control=['if'], data=[]]
else:
new_array = array + [array[-1]] * dist # depends on [control=['if'], data=[]]
elif trim:
new_array = array[dist:] # depends on [control=['if'], data=[]]
else:
print('Warning, with pad=False and trim=False, no change applied.')
new_array = list(array)
return new_array |
def filter_spec(spec, paths, wildcard='*', separator='/'):
"""
Remove keys from a spec file.
For example, with the following path: domains/*/disks/*/metadata
all the metadata dicts from all domains disks will be removed.
Args:
spec (dict): spec to remove keys from
paths (list): list of paths to the keys that should be removed
wildcard (str): wildcard character
separator (str): path separator
Returns:
None
Raises:
utils.LagoUserException: If a malformed path was detected
"""
def remove_key(path, spec):
if len(path) == 0:
return
elif len(path) == 1:
key = path.pop()
if not isinstance(spec, collections.Mapping):
raise LagoUserException(
'You have tried to remove the following key - "{key}".\n'
'Keys can not be removed from type {spec_type}\n'
'Please verify that path - "{{path}}" is valid'.format(
key=key, spec_type=type(spec)
)
)
if key == wildcard:
spec.clear()
else:
spec.pop(key, None)
else:
current = path[0]
if current == wildcard:
if isinstance(spec, list):
iterator = iter(spec)
elif isinstance(spec, collections.Mapping):
iterator = spec.itervalues()
else:
raise LagoUserException(
'Glob char {char} should refer only to dict or list, '
'not to {spec_type}\n'
'Please fix path - "{{path}}"'.format(
char=wildcard, spec_type=type(spec)
)
)
for i in iterator:
remove_key(path[1:], i)
else:
try:
remove_key(path[1:], spec[current])
except KeyError:
raise LagoUserException(
'Malformed path "{{path}}", key "{key}" '
'does not exist'.format(key=current)
)
except TypeError:
raise LagoUserException(
'Malformed path "{{path}}", can not get '
'by key from type {spec_type}'.format(
spec_type=type(spec)
)
)
for path in paths:
try:
remove_key(path.split(separator), spec)
except LagoUserException as e:
e.message = e.message.format(path=path)
raise | def function[filter_spec, parameter[spec, paths, wildcard, separator]]:
constant[
Remove keys from a spec file.
For example, with the following path: domains/*/disks/*/metadata
all the metadata dicts from all domains disks will be removed.
Args:
spec (dict): spec to remove keys from
paths (list): list of paths to the keys that should be removed
wildcard (str): wildcard character
separator (str): path separator
Returns:
None
Raises:
utils.LagoUserException: If a malformed path was detected
]
def function[remove_key, parameter[path, spec]]:
if compare[call[name[len], parameter[name[path]]] equal[==] constant[0]] begin[:]
return[None]
for taget[name[path]] in starred[name[paths]] begin[:]
<ast.Try object at 0x7da2041d8310> | keyword[def] identifier[filter_spec] ( identifier[spec] , identifier[paths] , identifier[wildcard] = literal[string] , identifier[separator] = literal[string] ):
literal[string]
keyword[def] identifier[remove_key] ( identifier[path] , identifier[spec] ):
keyword[if] identifier[len] ( identifier[path] )== literal[int] :
keyword[return]
keyword[elif] identifier[len] ( identifier[path] )== literal[int] :
identifier[key] = identifier[path] . identifier[pop] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[spec] , identifier[collections] . identifier[Mapping] ):
keyword[raise] identifier[LagoUserException] (
literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[key] = identifier[key] , identifier[spec_type] = identifier[type] ( identifier[spec] )
)
)
keyword[if] identifier[key] == identifier[wildcard] :
identifier[spec] . identifier[clear] ()
keyword[else] :
identifier[spec] . identifier[pop] ( identifier[key] , keyword[None] )
keyword[else] :
identifier[current] = identifier[path] [ literal[int] ]
keyword[if] identifier[current] == identifier[wildcard] :
keyword[if] identifier[isinstance] ( identifier[spec] , identifier[list] ):
identifier[iterator] = identifier[iter] ( identifier[spec] )
keyword[elif] identifier[isinstance] ( identifier[spec] , identifier[collections] . identifier[Mapping] ):
identifier[iterator] = identifier[spec] . identifier[itervalues] ()
keyword[else] :
keyword[raise] identifier[LagoUserException] (
literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[char] = identifier[wildcard] , identifier[spec_type] = identifier[type] ( identifier[spec] )
)
)
keyword[for] identifier[i] keyword[in] identifier[iterator] :
identifier[remove_key] ( identifier[path] [ literal[int] :], identifier[i] )
keyword[else] :
keyword[try] :
identifier[remove_key] ( identifier[path] [ literal[int] :], identifier[spec] [ identifier[current] ])
keyword[except] identifier[KeyError] :
keyword[raise] identifier[LagoUserException] (
literal[string]
literal[string] . identifier[format] ( identifier[key] = identifier[current] )
)
keyword[except] identifier[TypeError] :
keyword[raise] identifier[LagoUserException] (
literal[string]
literal[string] . identifier[format] (
identifier[spec_type] = identifier[type] ( identifier[spec] )
)
)
keyword[for] identifier[path] keyword[in] identifier[paths] :
keyword[try] :
identifier[remove_key] ( identifier[path] . identifier[split] ( identifier[separator] ), identifier[spec] )
keyword[except] identifier[LagoUserException] keyword[as] identifier[e] :
identifier[e] . identifier[message] = identifier[e] . identifier[message] . identifier[format] ( identifier[path] = identifier[path] )
keyword[raise] | def filter_spec(spec, paths, wildcard='*', separator='/'):
"""
Remove keys from a spec file.
For example, with the following path: domains/*/disks/*/metadata
all the metadata dicts from all domains disks will be removed.
Args:
spec (dict): spec to remove keys from
paths (list): list of paths to the keys that should be removed
wildcard (str): wildcard character
separator (str): path separator
Returns:
None
Raises:
utils.LagoUserException: If a malformed path was detected
"""
def remove_key(path, spec):
if len(path) == 0:
return # depends on [control=['if'], data=[]]
elif len(path) == 1:
key = path.pop()
if not isinstance(spec, collections.Mapping):
raise LagoUserException('You have tried to remove the following key - "{key}".\nKeys can not be removed from type {spec_type}\nPlease verify that path - "{{path}}" is valid'.format(key=key, spec_type=type(spec))) # depends on [control=['if'], data=[]]
if key == wildcard:
spec.clear() # depends on [control=['if'], data=[]]
else:
spec.pop(key, None) # depends on [control=['if'], data=[]]
else:
current = path[0]
if current == wildcard:
if isinstance(spec, list):
iterator = iter(spec) # depends on [control=['if'], data=[]]
elif isinstance(spec, collections.Mapping):
iterator = spec.itervalues() # depends on [control=['if'], data=[]]
else:
raise LagoUserException('Glob char {char} should refer only to dict or list, not to {spec_type}\nPlease fix path - "{{path}}"'.format(char=wildcard, spec_type=type(spec)))
for i in iterator:
remove_key(path[1:], i) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['wildcard']]
else:
try:
remove_key(path[1:], spec[current]) # depends on [control=['try'], data=[]]
except KeyError:
raise LagoUserException('Malformed path "{{path}}", key "{key}" does not exist'.format(key=current)) # depends on [control=['except'], data=[]]
except TypeError:
raise LagoUserException('Malformed path "{{path}}", can not get by key from type {spec_type}'.format(spec_type=type(spec))) # depends on [control=['except'], data=[]]
for path in paths:
try:
remove_key(path.split(separator), spec) # depends on [control=['try'], data=[]]
except LagoUserException as e:
e.message = e.message.format(path=path)
raise # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['path']] |
def start(self):
"""
Start trapping WINCH signals and resizing the PTY.
This method saves the previous WINCH handler so it can be restored on
`stop()`.
"""
def handle(signum, frame):
if signum == signal.SIGWINCH:
self.pty.resize()
self.original_handler = signal.signal(signal.SIGWINCH, handle) | def function[start, parameter[self]]:
constant[
Start trapping WINCH signals and resizing the PTY.
This method saves the previous WINCH handler so it can be restored on
`stop()`.
]
def function[handle, parameter[signum, frame]]:
if compare[name[signum] equal[==] name[signal].SIGWINCH] begin[:]
call[name[self].pty.resize, parameter[]]
name[self].original_handler assign[=] call[name[signal].signal, parameter[name[signal].SIGWINCH, name[handle]]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[def] identifier[handle] ( identifier[signum] , identifier[frame] ):
keyword[if] identifier[signum] == identifier[signal] . identifier[SIGWINCH] :
identifier[self] . identifier[pty] . identifier[resize] ()
identifier[self] . identifier[original_handler] = identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGWINCH] , identifier[handle] ) | def start(self):
"""
Start trapping WINCH signals and resizing the PTY.
This method saves the previous WINCH handler so it can be restored on
`stop()`.
"""
def handle(signum, frame):
if signum == signal.SIGWINCH:
self.pty.resize() # depends on [control=['if'], data=[]]
self.original_handler = signal.signal(signal.SIGWINCH, handle) |
def IntegerMin(left: vertex_constructor_param_types, right: vertex_constructor_param_types, label: Optional[str]=None) -> Vertex:
"""
Finds the minimum between two vertices
:param left: one of the vertices to find the minimum of
:param right: one of the vertices to find the minimum of
"""
return Integer(context.jvm_view().IntegerMinVertex, label, cast_to_integer_vertex(left), cast_to_integer_vertex(right)) | def function[IntegerMin, parameter[left, right, label]]:
constant[
Finds the minimum between two vertices
:param left: one of the vertices to find the minimum of
:param right: one of the vertices to find the minimum of
]
return[call[name[Integer], parameter[call[name[context].jvm_view, parameter[]].IntegerMinVertex, name[label], call[name[cast_to_integer_vertex], parameter[name[left]]], call[name[cast_to_integer_vertex], parameter[name[right]]]]]] | keyword[def] identifier[IntegerMin] ( identifier[left] : identifier[vertex_constructor_param_types] , identifier[right] : identifier[vertex_constructor_param_types] , identifier[label] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[Vertex] :
literal[string]
keyword[return] identifier[Integer] ( identifier[context] . identifier[jvm_view] (). identifier[IntegerMinVertex] , identifier[label] , identifier[cast_to_integer_vertex] ( identifier[left] ), identifier[cast_to_integer_vertex] ( identifier[right] )) | def IntegerMin(left: vertex_constructor_param_types, right: vertex_constructor_param_types, label: Optional[str]=None) -> Vertex:
"""
Finds the minimum between two vertices
:param left: one of the vertices to find the minimum of
:param right: one of the vertices to find the minimum of
"""
return Integer(context.jvm_view().IntegerMinVertex, label, cast_to_integer_vertex(left), cast_to_integer_vertex(right)) |
def _set_buttons(self, chat, bot):
"""
Helper methods to set the buttons given the input sender and chat.
"""
if isinstance(self.reply_markup, (
types.ReplyInlineMarkup, types.ReplyKeyboardMarkup)):
self._buttons = [[
MessageButton(self._client, button, chat, bot, self.id)
for button in row.buttons
] for row in self.reply_markup.rows]
self._buttons_flat = [x for row in self._buttons for x in row] | def function[_set_buttons, parameter[self, chat, bot]]:
constant[
Helper methods to set the buttons given the input sender and chat.
]
if call[name[isinstance], parameter[name[self].reply_markup, tuple[[<ast.Attribute object at 0x7da1b1f96950>, <ast.Attribute object at 0x7da1b1f97490>]]]] begin[:]
name[self]._buttons assign[=] <ast.ListComp object at 0x7da1b1f97d30>
name[self]._buttons_flat assign[=] <ast.ListComp object at 0x7da1b1f94340> | keyword[def] identifier[_set_buttons] ( identifier[self] , identifier[chat] , identifier[bot] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[reply_markup] ,(
identifier[types] . identifier[ReplyInlineMarkup] , identifier[types] . identifier[ReplyKeyboardMarkup] )):
identifier[self] . identifier[_buttons] =[[
identifier[MessageButton] ( identifier[self] . identifier[_client] , identifier[button] , identifier[chat] , identifier[bot] , identifier[self] . identifier[id] )
keyword[for] identifier[button] keyword[in] identifier[row] . identifier[buttons]
] keyword[for] identifier[row] keyword[in] identifier[self] . identifier[reply_markup] . identifier[rows] ]
identifier[self] . identifier[_buttons_flat] =[ identifier[x] keyword[for] identifier[row] keyword[in] identifier[self] . identifier[_buttons] keyword[for] identifier[x] keyword[in] identifier[row] ] | def _set_buttons(self, chat, bot):
"""
Helper methods to set the buttons given the input sender and chat.
"""
if isinstance(self.reply_markup, (types.ReplyInlineMarkup, types.ReplyKeyboardMarkup)):
self._buttons = [[MessageButton(self._client, button, chat, bot, self.id) for button in row.buttons] for row in self.reply_markup.rows]
self._buttons_flat = [x for row in self._buttons for x in row] # depends on [control=['if'], data=[]] |
def is_valid_sid_for_chain(pid, sid):
"""Return True if ``sid`` can be assigned to the single object ``pid`` or to the
chain to which ``pid`` belongs.
- If the chain does not have a SID, the new SID must be previously unused.
- If the chain already has a SID, the new SID must match the existing SID.
All known PIDs are associated with a chain.
Preconditions:
- ``pid`` is verified to exist. E.g., with
d1_gmn.app.views.asserts.is_existing_object().
- ``sid`` is None or verified to be a SID
"""
if _is_unused_did(sid):
return True
existing_sid = d1_gmn.app.revision.get_sid_by_pid(pid)
if existing_sid is None:
return False
return existing_sid == sid | def function[is_valid_sid_for_chain, parameter[pid, sid]]:
constant[Return True if ``sid`` can be assigned to the single object ``pid`` or to the
chain to which ``pid`` belongs.
- If the chain does not have a SID, the new SID must be previously unused.
- If the chain already has a SID, the new SID must match the existing SID.
All known PIDs are associated with a chain.
Preconditions:
- ``pid`` is verified to exist. E.g., with
d1_gmn.app.views.asserts.is_existing_object().
- ``sid`` is None or verified to be a SID
]
if call[name[_is_unused_did], parameter[name[sid]]] begin[:]
return[constant[True]]
variable[existing_sid] assign[=] call[name[d1_gmn].app.revision.get_sid_by_pid, parameter[name[pid]]]
if compare[name[existing_sid] is constant[None]] begin[:]
return[constant[False]]
return[compare[name[existing_sid] equal[==] name[sid]]] | keyword[def] identifier[is_valid_sid_for_chain] ( identifier[pid] , identifier[sid] ):
literal[string]
keyword[if] identifier[_is_unused_did] ( identifier[sid] ):
keyword[return] keyword[True]
identifier[existing_sid] = identifier[d1_gmn] . identifier[app] . identifier[revision] . identifier[get_sid_by_pid] ( identifier[pid] )
keyword[if] identifier[existing_sid] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[return] identifier[existing_sid] == identifier[sid] | def is_valid_sid_for_chain(pid, sid):
"""Return True if ``sid`` can be assigned to the single object ``pid`` or to the
chain to which ``pid`` belongs.
- If the chain does not have a SID, the new SID must be previously unused.
- If the chain already has a SID, the new SID must match the existing SID.
All known PIDs are associated with a chain.
Preconditions:
- ``pid`` is verified to exist. E.g., with
d1_gmn.app.views.asserts.is_existing_object().
- ``sid`` is None or verified to be a SID
"""
if _is_unused_did(sid):
return True # depends on [control=['if'], data=[]]
existing_sid = d1_gmn.app.revision.get_sid_by_pid(pid)
if existing_sid is None:
return False # depends on [control=['if'], data=[]]
return existing_sid == sid |
def parse_known_args(self, args=None, namespace=None):
"""Parse args method to handle input-file dependent arguments."""
# run parse args once to make sure the name space is populated with
# the input files. We'll turn off raising NoInputFileErrors on this
# pass
self.no_input_file_err = True
self._unset_required()
opts, extra_opts = super(ResultsArgumentParser, self).parse_known_args(
args, namespace)
# now do it again
self.no_input_file_err = False
self._reset_required()
opts, extra_opts = super(ResultsArgumentParser, self).parse_known_args(
args, opts)
# populate the parameters option if it wasn't specified
if opts.parameters is None:
parameters = get_common_parameters(opts.input_file,
collection='variable_params')
# now call parse parameters action to populate the namespace
self.actions['parameters'](self, opts, parameters)
# parse the sampler-specific options and check for any unknowns
unknown = []
for fn in opts.input_file:
fp = loadfile(fn, 'r')
sampler_parser, _ = fp.extra_args_parser(skip_args=self.skip_args)
if sampler_parser is not None:
opts, still_unknown = sampler_parser.parse_known_args(
extra_opts, namespace=opts)
unknown.append(set(still_unknown))
# the intersection of the unknowns are options not understood by
# any of the files
if len(unknown) > 0:
unknown = set.intersection(*unknown)
return opts, list(unknown) | def function[parse_known_args, parameter[self, args, namespace]]:
constant[Parse args method to handle input-file dependent arguments.]
name[self].no_input_file_err assign[=] constant[True]
call[name[self]._unset_required, parameter[]]
<ast.Tuple object at 0x7da18dc05b40> assign[=] call[call[name[super], parameter[name[ResultsArgumentParser], name[self]]].parse_known_args, parameter[name[args], name[namespace]]]
name[self].no_input_file_err assign[=] constant[False]
call[name[self]._reset_required, parameter[]]
<ast.Tuple object at 0x7da18dc079a0> assign[=] call[call[name[super], parameter[name[ResultsArgumentParser], name[self]]].parse_known_args, parameter[name[args], name[opts]]]
if compare[name[opts].parameters is constant[None]] begin[:]
variable[parameters] assign[=] call[name[get_common_parameters], parameter[name[opts].input_file]]
call[call[name[self].actions][constant[parameters]], parameter[name[self], name[opts], name[parameters]]]
variable[unknown] assign[=] list[[]]
for taget[name[fn]] in starred[name[opts].input_file] begin[:]
variable[fp] assign[=] call[name[loadfile], parameter[name[fn], constant[r]]]
<ast.Tuple object at 0x7da18dc06b60> assign[=] call[name[fp].extra_args_parser, parameter[]]
if compare[name[sampler_parser] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da18dc07430> assign[=] call[name[sampler_parser].parse_known_args, parameter[name[extra_opts]]]
call[name[unknown].append, parameter[call[name[set], parameter[name[still_unknown]]]]]
if compare[call[name[len], parameter[name[unknown]]] greater[>] constant[0]] begin[:]
variable[unknown] assign[=] call[name[set].intersection, parameter[<ast.Starred object at 0x7da2054a43d0>]]
return[tuple[[<ast.Name object at 0x7da2054a4b80>, <ast.Call object at 0x7da2054a5090>]]] | keyword[def] identifier[parse_known_args] ( identifier[self] , identifier[args] = keyword[None] , identifier[namespace] = keyword[None] ):
literal[string]
identifier[self] . identifier[no_input_file_err] = keyword[True]
identifier[self] . identifier[_unset_required] ()
identifier[opts] , identifier[extra_opts] = identifier[super] ( identifier[ResultsArgumentParser] , identifier[self] ). identifier[parse_known_args] (
identifier[args] , identifier[namespace] )
identifier[self] . identifier[no_input_file_err] = keyword[False]
identifier[self] . identifier[_reset_required] ()
identifier[opts] , identifier[extra_opts] = identifier[super] ( identifier[ResultsArgumentParser] , identifier[self] ). identifier[parse_known_args] (
identifier[args] , identifier[opts] )
keyword[if] identifier[opts] . identifier[parameters] keyword[is] keyword[None] :
identifier[parameters] = identifier[get_common_parameters] ( identifier[opts] . identifier[input_file] ,
identifier[collection] = literal[string] )
identifier[self] . identifier[actions] [ literal[string] ]( identifier[self] , identifier[opts] , identifier[parameters] )
identifier[unknown] =[]
keyword[for] identifier[fn] keyword[in] identifier[opts] . identifier[input_file] :
identifier[fp] = identifier[loadfile] ( identifier[fn] , literal[string] )
identifier[sampler_parser] , identifier[_] = identifier[fp] . identifier[extra_args_parser] ( identifier[skip_args] = identifier[self] . identifier[skip_args] )
keyword[if] identifier[sampler_parser] keyword[is] keyword[not] keyword[None] :
identifier[opts] , identifier[still_unknown] = identifier[sampler_parser] . identifier[parse_known_args] (
identifier[extra_opts] , identifier[namespace] = identifier[opts] )
identifier[unknown] . identifier[append] ( identifier[set] ( identifier[still_unknown] ))
keyword[if] identifier[len] ( identifier[unknown] )> literal[int] :
identifier[unknown] = identifier[set] . identifier[intersection] (* identifier[unknown] )
keyword[return] identifier[opts] , identifier[list] ( identifier[unknown] ) | def parse_known_args(self, args=None, namespace=None):
"""Parse args method to handle input-file dependent arguments."""
# run parse args once to make sure the name space is populated with
# the input files. We'll turn off raising NoInputFileErrors on this
# pass
self.no_input_file_err = True
self._unset_required()
(opts, extra_opts) = super(ResultsArgumentParser, self).parse_known_args(args, namespace)
# now do it again
self.no_input_file_err = False
self._reset_required()
(opts, extra_opts) = super(ResultsArgumentParser, self).parse_known_args(args, opts)
# populate the parameters option if it wasn't specified
if opts.parameters is None:
parameters = get_common_parameters(opts.input_file, collection='variable_params')
# now call parse parameters action to populate the namespace
self.actions['parameters'](self, opts, parameters) # depends on [control=['if'], data=[]]
# parse the sampler-specific options and check for any unknowns
unknown = []
for fn in opts.input_file:
fp = loadfile(fn, 'r')
(sampler_parser, _) = fp.extra_args_parser(skip_args=self.skip_args)
if sampler_parser is not None:
(opts, still_unknown) = sampler_parser.parse_known_args(extra_opts, namespace=opts)
unknown.append(set(still_unknown)) # depends on [control=['if'], data=['sampler_parser']] # depends on [control=['for'], data=['fn']]
# the intersection of the unknowns are options not understood by
# any of the files
if len(unknown) > 0:
unknown = set.intersection(*unknown) # depends on [control=['if'], data=[]]
return (opts, list(unknown)) |
def echo_html_list_str(self, catid, infos):
'''
生成 list 后的 HTML 格式的字符串
'''
zhiding_str = ''
tuiguang_str = ''
imgname = 'fixed/zhanwei.png'
kwd = {
'imgname': imgname,
'zhiding': zhiding_str,
'tuiguang': tuiguang_str,
}
self.render('autogen/infolist/infolist_{0}.html'.format(catid),
userinfo=self.userinfo,
kwd=kwd,
html2text=html2text,
post_infos=infos,
widget_info=kwd) | def function[echo_html_list_str, parameter[self, catid, infos]]:
constant[
生成 list 后的 HTML 格式的字符串
]
variable[zhiding_str] assign[=] constant[]
variable[tuiguang_str] assign[=] constant[]
variable[imgname] assign[=] constant[fixed/zhanwei.png]
variable[kwd] assign[=] dictionary[[<ast.Constant object at 0x7da1b0669060>, <ast.Constant object at 0x7da1b0668520>, <ast.Constant object at 0x7da1b06685b0>], [<ast.Name object at 0x7da1b0668490>, <ast.Name object at 0x7da1b06684c0>, <ast.Name object at 0x7da1b06684f0>]]
call[name[self].render, parameter[call[constant[autogen/infolist/infolist_{0}.html].format, parameter[name[catid]]]]] | keyword[def] identifier[echo_html_list_str] ( identifier[self] , identifier[catid] , identifier[infos] ):
literal[string]
identifier[zhiding_str] = literal[string]
identifier[tuiguang_str] = literal[string]
identifier[imgname] = literal[string]
identifier[kwd] ={
literal[string] : identifier[imgname] ,
literal[string] : identifier[zhiding_str] ,
literal[string] : identifier[tuiguang_str] ,
}
identifier[self] . identifier[render] ( literal[string] . identifier[format] ( identifier[catid] ),
identifier[userinfo] = identifier[self] . identifier[userinfo] ,
identifier[kwd] = identifier[kwd] ,
identifier[html2text] = identifier[html2text] ,
identifier[post_infos] = identifier[infos] ,
identifier[widget_info] = identifier[kwd] ) | def echo_html_list_str(self, catid, infos):
"""
生成 list 后的 HTML 格式的字符串
"""
zhiding_str = ''
tuiguang_str = ''
imgname = 'fixed/zhanwei.png'
kwd = {'imgname': imgname, 'zhiding': zhiding_str, 'tuiguang': tuiguang_str}
self.render('autogen/infolist/infolist_{0}.html'.format(catid), userinfo=self.userinfo, kwd=kwd, html2text=html2text, post_infos=infos, widget_info=kwd) |
def xml(self, url, method='get', params=None, data=None):
"""
请求并返回xml
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: html.HtmlElement
:return:
"""
r = self.req(url, method, params, data)
# this is required for avoid utf8-mb4 lead to encoding error
return self.to_xml(r.content, base_url=r.url) | def function[xml, parameter[self, url, method, params, data]]:
constant[
请求并返回xml
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: html.HtmlElement
:return:
]
variable[r] assign[=] call[name[self].req, parameter[name[url], name[method], name[params], name[data]]]
return[call[name[self].to_xml, parameter[name[r].content]]] | keyword[def] identifier[xml] ( identifier[self] , identifier[url] , identifier[method] = literal[string] , identifier[params] = keyword[None] , identifier[data] = keyword[None] ):
literal[string]
identifier[r] = identifier[self] . identifier[req] ( identifier[url] , identifier[method] , identifier[params] , identifier[data] )
keyword[return] identifier[self] . identifier[to_xml] ( identifier[r] . identifier[content] , identifier[base_url] = identifier[r] . identifier[url] ) | def xml(self, url, method='get', params=None, data=None):
"""
请求并返回xml
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: html.HtmlElement
:return:
"""
r = self.req(url, method, params, data)
# this is required for avoid utf8-mb4 lead to encoding error
return self.to_xml(r.content, base_url=r.url) |
def definitions_errors(self):
""" Dictionary with errors of an *of-rule mapped to the index of the
definition it occurred in. Returns :obj:`None` if not applicable.
"""
if not self.is_logic_error:
return None
result = defaultdict(list)
for error in self.child_errors:
i = error.schema_path[len(self.schema_path)]
result[i].append(error)
return result | def function[definitions_errors, parameter[self]]:
constant[ Dictionary with errors of an *of-rule mapped to the index of the
definition it occurred in. Returns :obj:`None` if not applicable.
]
if <ast.UnaryOp object at 0x7da1b1f80e50> begin[:]
return[constant[None]]
variable[result] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[error]] in starred[name[self].child_errors] begin[:]
variable[i] assign[=] call[name[error].schema_path][call[name[len], parameter[name[self].schema_path]]]
call[call[name[result]][name[i]].append, parameter[name[error]]]
return[name[result]] | keyword[def] identifier[definitions_errors] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_logic_error] :
keyword[return] keyword[None]
identifier[result] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[error] keyword[in] identifier[self] . identifier[child_errors] :
identifier[i] = identifier[error] . identifier[schema_path] [ identifier[len] ( identifier[self] . identifier[schema_path] )]
identifier[result] [ identifier[i] ]. identifier[append] ( identifier[error] )
keyword[return] identifier[result] | def definitions_errors(self):
""" Dictionary with errors of an *of-rule mapped to the index of the
definition it occurred in. Returns :obj:`None` if not applicable.
"""
if not self.is_logic_error:
return None # depends on [control=['if'], data=[]]
result = defaultdict(list)
for error in self.child_errors:
i = error.schema_path[len(self.schema_path)]
result[i].append(error) # depends on [control=['for'], data=['error']]
return result |
def activation(data: mx.sym.Symbol, act_type: str) -> mx.sym.Symbol:
"""
Apply custom or standard activation.
Custom activation types include:
- Swish-1, also called Sigmoid-Weighted Linear Unit (SiLU): Ramachandran et
al. (https://arxiv.org/pdf/1710.05941.pdf), Elfwing et al.
(https://arxiv.org/pdf/1702.03118.pdf)
- Gaussian Error Linear Unit (GELU): Hendrycks and Gimpel
(https://arxiv.org/pdf/1606.08415.pdf)
:param data: input Symbol of any shape.
:param act_type: Type of activation.
:return: output Symbol with same shape as input.
"""
# TODO: Contribute these to MXNet? For now it appears that registered activation types must be implemented in C++.
if act_type == C.SWISH1:
return data * mx.sym.Activation(data, act_type="sigmoid")
elif act_type == C.GELU:
# Approximation of x * gaussian_cdf(x) used by Hendrycks and Gimpel
return 0.5 * data * (1 + mx.sym.Activation((math.sqrt(2 / math.pi) * (data + (0.044715 * (data**3)))),
act_type="tanh"))
else:
return mx.sym.Activation(data, act_type=act_type) | def function[activation, parameter[data, act_type]]:
constant[
Apply custom or standard activation.
Custom activation types include:
- Swish-1, also called Sigmoid-Weighted Linear Unit (SiLU): Ramachandran et
al. (https://arxiv.org/pdf/1710.05941.pdf), Elfwing et al.
(https://arxiv.org/pdf/1702.03118.pdf)
- Gaussian Error Linear Unit (GELU): Hendrycks and Gimpel
(https://arxiv.org/pdf/1606.08415.pdf)
:param data: input Symbol of any shape.
:param act_type: Type of activation.
:return: output Symbol with same shape as input.
]
if compare[name[act_type] equal[==] name[C].SWISH1] begin[:]
return[binary_operation[name[data] * call[name[mx].sym.Activation, parameter[name[data]]]]] | keyword[def] identifier[activation] ( identifier[data] : identifier[mx] . identifier[sym] . identifier[Symbol] , identifier[act_type] : identifier[str] )-> identifier[mx] . identifier[sym] . identifier[Symbol] :
literal[string]
keyword[if] identifier[act_type] == identifier[C] . identifier[SWISH1] :
keyword[return] identifier[data] * identifier[mx] . identifier[sym] . identifier[Activation] ( identifier[data] , identifier[act_type] = literal[string] )
keyword[elif] identifier[act_type] == identifier[C] . identifier[GELU] :
keyword[return] literal[int] * identifier[data] *( literal[int] + identifier[mx] . identifier[sym] . identifier[Activation] (( identifier[math] . identifier[sqrt] ( literal[int] / identifier[math] . identifier[pi] )*( identifier[data] +( literal[int] *( identifier[data] ** literal[int] )))),
identifier[act_type] = literal[string] ))
keyword[else] :
keyword[return] identifier[mx] . identifier[sym] . identifier[Activation] ( identifier[data] , identifier[act_type] = identifier[act_type] ) | def activation(data: mx.sym.Symbol, act_type: str) -> mx.sym.Symbol:
"""
Apply custom or standard activation.
Custom activation types include:
- Swish-1, also called Sigmoid-Weighted Linear Unit (SiLU): Ramachandran et
al. (https://arxiv.org/pdf/1710.05941.pdf), Elfwing et al.
(https://arxiv.org/pdf/1702.03118.pdf)
- Gaussian Error Linear Unit (GELU): Hendrycks and Gimpel
(https://arxiv.org/pdf/1606.08415.pdf)
:param data: input Symbol of any shape.
:param act_type: Type of activation.
:return: output Symbol with same shape as input.
"""
# TODO: Contribute these to MXNet? For now it appears that registered activation types must be implemented in C++.
if act_type == C.SWISH1:
return data * mx.sym.Activation(data, act_type='sigmoid') # depends on [control=['if'], data=[]]
elif act_type == C.GELU:
# Approximation of x * gaussian_cdf(x) used by Hendrycks and Gimpel
return 0.5 * data * (1 + mx.sym.Activation(math.sqrt(2 / math.pi) * (data + 0.044715 * data ** 3), act_type='tanh')) # depends on [control=['if'], data=[]]
else:
return mx.sym.Activation(data, act_type=act_type) |
def __create_kdtree(self):
"""!
@brief Create k-d tree in line with created clusters. At the first iteration contains all points from the input data set.
@return (kdtree) k-d tree that consist of representative points of CURE clusters.
"""
self.__tree = kdtree()
for current_cluster in self.__queue:
for representative_point in current_cluster.rep:
self.__tree.insert(representative_point, current_cluster) | def function[__create_kdtree, parameter[self]]:
constant[!
@brief Create k-d tree in line with created clusters. At the first iteration contains all points from the input data set.
@return (kdtree) k-d tree that consist of representative points of CURE clusters.
]
name[self].__tree assign[=] call[name[kdtree], parameter[]]
for taget[name[current_cluster]] in starred[name[self].__queue] begin[:]
for taget[name[representative_point]] in starred[name[current_cluster].rep] begin[:]
call[name[self].__tree.insert, parameter[name[representative_point], name[current_cluster]]] | keyword[def] identifier[__create_kdtree] ( identifier[self] ):
literal[string]
identifier[self] . identifier[__tree] = identifier[kdtree] ()
keyword[for] identifier[current_cluster] keyword[in] identifier[self] . identifier[__queue] :
keyword[for] identifier[representative_point] keyword[in] identifier[current_cluster] . identifier[rep] :
identifier[self] . identifier[__tree] . identifier[insert] ( identifier[representative_point] , identifier[current_cluster] ) | def __create_kdtree(self):
"""!
@brief Create k-d tree in line with created clusters. At the first iteration contains all points from the input data set.
@return (kdtree) k-d tree that consist of representative points of CURE clusters.
"""
self.__tree = kdtree()
for current_cluster in self.__queue:
for representative_point in current_cluster.rep:
self.__tree.insert(representative_point, current_cluster) # depends on [control=['for'], data=['representative_point']] # depends on [control=['for'], data=['current_cluster']] |
def detect_batch(self, batch):
"""
Return detections for batch
:param batch:
:return:
"""
self.mod.forward(batch, is_train=False)
detections = self.mod.get_outputs()[0]
positive_detections = Detector.filter_positive_detections(detections)
return positive_detections | def function[detect_batch, parameter[self, batch]]:
constant[
Return detections for batch
:param batch:
:return:
]
call[name[self].mod.forward, parameter[name[batch]]]
variable[detections] assign[=] call[call[name[self].mod.get_outputs, parameter[]]][constant[0]]
variable[positive_detections] assign[=] call[name[Detector].filter_positive_detections, parameter[name[detections]]]
return[name[positive_detections]] | keyword[def] identifier[detect_batch] ( identifier[self] , identifier[batch] ):
literal[string]
identifier[self] . identifier[mod] . identifier[forward] ( identifier[batch] , identifier[is_train] = keyword[False] )
identifier[detections] = identifier[self] . identifier[mod] . identifier[get_outputs] ()[ literal[int] ]
identifier[positive_detections] = identifier[Detector] . identifier[filter_positive_detections] ( identifier[detections] )
keyword[return] identifier[positive_detections] | def detect_batch(self, batch):
"""
Return detections for batch
:param batch:
:return:
"""
self.mod.forward(batch, is_train=False)
detections = self.mod.get_outputs()[0]
positive_detections = Detector.filter_positive_detections(detections)
return positive_detections |
def translatePath(path):
'''Creates folders in the OS's temp directory. Doesn't touch any
possible XBMC installation on the machine. Attempting to do as
little work as possible to enable this function to work seamlessly.
'''
valid_dirs = ['xbmc', 'home', 'temp', 'masterprofile', 'profile',
'subtitles', 'userdata', 'database', 'thumbnails', 'recordings',
'screenshots', 'musicplaylists', 'videoplaylists', 'cdrips', 'skin',
]
assert path.startswith('special://'), 'Not a valid special:// path.'
parts = path.split('/')[2:]
assert len(parts) > 1, 'Need at least a single root directory'
assert parts[0] in valid_dirs, '%s is not a valid root dir.' % parts[0]
# We don't want to swallow any potential IOErrors here, so only makedir for
# the root dir, the user is responsible for making any further child dirs
_create_dir(os.path.join(TEMP_DIR, parts[0]))
return os.path.join(TEMP_DIR, *parts) | def function[translatePath, parameter[path]]:
constant[Creates folders in the OS's temp directory. Doesn't touch any
possible XBMC installation on the machine. Attempting to do as
little work as possible to enable this function to work seamlessly.
]
variable[valid_dirs] assign[=] list[[<ast.Constant object at 0x7da20e954040>, <ast.Constant object at 0x7da20e957c40>, <ast.Constant object at 0x7da20e9561d0>, <ast.Constant object at 0x7da20e955090>, <ast.Constant object at 0x7da20e956110>, <ast.Constant object at 0x7da20e9544f0>, <ast.Constant object at 0x7da20e957b20>, <ast.Constant object at 0x7da20e957ca0>, <ast.Constant object at 0x7da20e954520>, <ast.Constant object at 0x7da20e954e80>, <ast.Constant object at 0x7da20e9559f0>, <ast.Constant object at 0x7da20e956fe0>, <ast.Constant object at 0x7da20e957e50>, <ast.Constant object at 0x7da20e954850>, <ast.Constant object at 0x7da20e9579a0>]]
assert[call[name[path].startswith, parameter[constant[special://]]]]
variable[parts] assign[=] call[call[name[path].split, parameter[constant[/]]]][<ast.Slice object at 0x7da20e9578b0>]
assert[compare[call[name[len], parameter[name[parts]]] greater[>] constant[1]]]
assert[compare[call[name[parts]][constant[0]] in name[valid_dirs]]]
call[name[_create_dir], parameter[call[name[os].path.join, parameter[name[TEMP_DIR], call[name[parts]][constant[0]]]]]]
return[call[name[os].path.join, parameter[name[TEMP_DIR], <ast.Starred object at 0x7da20e955120>]]] | keyword[def] identifier[translatePath] ( identifier[path] ):
literal[string]
identifier[valid_dirs] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
]
keyword[assert] identifier[path] . identifier[startswith] ( literal[string] ), literal[string]
identifier[parts] = identifier[path] . identifier[split] ( literal[string] )[ literal[int] :]
keyword[assert] identifier[len] ( identifier[parts] )> literal[int] , literal[string]
keyword[assert] identifier[parts] [ literal[int] ] keyword[in] identifier[valid_dirs] , literal[string] % identifier[parts] [ literal[int] ]
identifier[_create_dir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[TEMP_DIR] , identifier[parts] [ literal[int] ]))
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[TEMP_DIR] ,* identifier[parts] ) | def translatePath(path):
"""Creates folders in the OS's temp directory. Doesn't touch any
possible XBMC installation on the machine. Attempting to do as
little work as possible to enable this function to work seamlessly.
"""
valid_dirs = ['xbmc', 'home', 'temp', 'masterprofile', 'profile', 'subtitles', 'userdata', 'database', 'thumbnails', 'recordings', 'screenshots', 'musicplaylists', 'videoplaylists', 'cdrips', 'skin']
assert path.startswith('special://'), 'Not a valid special:// path.'
parts = path.split('/')[2:]
assert len(parts) > 1, 'Need at least a single root directory'
assert parts[0] in valid_dirs, '%s is not a valid root dir.' % parts[0]
# We don't want to swallow any potential IOErrors here, so only makedir for
# the root dir, the user is responsible for making any further child dirs
_create_dir(os.path.join(TEMP_DIR, parts[0]))
return os.path.join(TEMP_DIR, *parts) |
def _prepare_docstrings(self, frame):
"""Assign docstrings to the constants handled by |Constants|
to make them available in the interactive mode of Python."""
if config.USEAUTODOC:
filename = inspect.getsourcefile(frame)
with open(filename) as file_:
sources = file_.read().split('"""')
for code, doc in zip(sources[::2], sources[1::2]):
code = code.strip()
key = code.split('\n')[-1].split()[0]
value = self.get(key)
if value:
value.__doc__ = doc | def function[_prepare_docstrings, parameter[self, frame]]:
constant[Assign docstrings to the constants handled by |Constants|
to make them available in the interactive mode of Python.]
if name[config].USEAUTODOC begin[:]
variable[filename] assign[=] call[name[inspect].getsourcefile, parameter[name[frame]]]
with call[name[open], parameter[name[filename]]] begin[:]
variable[sources] assign[=] call[call[name[file_].read, parameter[]].split, parameter[constant["""]]]
for taget[tuple[[<ast.Name object at 0x7da18f7235e0>, <ast.Name object at 0x7da18f7201c0>]]] in starred[call[name[zip], parameter[call[name[sources]][<ast.Slice object at 0x7da2044c2140>], call[name[sources]][<ast.Slice object at 0x7da2044c21d0>]]]] begin[:]
variable[code] assign[=] call[name[code].strip, parameter[]]
variable[key] assign[=] call[call[call[call[name[code].split, parameter[constant[
]]]][<ast.UnaryOp object at 0x7da2044c3580>].split, parameter[]]][constant[0]]
variable[value] assign[=] call[name[self].get, parameter[name[key]]]
if name[value] begin[:]
name[value].__doc__ assign[=] name[doc] | keyword[def] identifier[_prepare_docstrings] ( identifier[self] , identifier[frame] ):
literal[string]
keyword[if] identifier[config] . identifier[USEAUTODOC] :
identifier[filename] = identifier[inspect] . identifier[getsourcefile] ( identifier[frame] )
keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[file_] :
identifier[sources] = identifier[file_] . identifier[read] (). identifier[split] ( literal[string] )
keyword[for] identifier[code] , identifier[doc] keyword[in] identifier[zip] ( identifier[sources] [:: literal[int] ], identifier[sources] [ literal[int] :: literal[int] ]):
identifier[code] = identifier[code] . identifier[strip] ()
identifier[key] = identifier[code] . identifier[split] ( literal[string] )[- literal[int] ]. identifier[split] ()[ literal[int] ]
identifier[value] = identifier[self] . identifier[get] ( identifier[key] )
keyword[if] identifier[value] :
identifier[value] . identifier[__doc__] = identifier[doc] | def _prepare_docstrings(self, frame):
"""Assign docstrings to the constants handled by |Constants|
to make them available in the interactive mode of Python."""
if config.USEAUTODOC:
filename = inspect.getsourcefile(frame)
with open(filename) as file_:
sources = file_.read().split('"""') # depends on [control=['with'], data=['file_']]
for (code, doc) in zip(sources[::2], sources[1::2]):
code = code.strip()
key = code.split('\n')[-1].split()[0]
value = self.get(key)
if value:
value.__doc__ = doc # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] |
def _get_state(inspect_results):
'''
Helper for deriving the current state of the container from the inspect
results.
'''
if inspect_results.get('State', {}).get('Paused', False):
return 'paused'
elif inspect_results.get('State', {}).get('Running', False):
return 'running'
else:
return 'stopped' | def function[_get_state, parameter[inspect_results]]:
constant[
Helper for deriving the current state of the container from the inspect
results.
]
if call[call[name[inspect_results].get, parameter[constant[State], dictionary[[], []]]].get, parameter[constant[Paused], constant[False]]] begin[:]
return[constant[paused]] | keyword[def] identifier[_get_state] ( identifier[inspect_results] ):
literal[string]
keyword[if] identifier[inspect_results] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[False] ):
keyword[return] literal[string]
keyword[elif] identifier[inspect_results] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[False] ):
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string] | def _get_state(inspect_results):
"""
Helper for deriving the current state of the container from the inspect
results.
"""
if inspect_results.get('State', {}).get('Paused', False):
return 'paused' # depends on [control=['if'], data=[]]
elif inspect_results.get('State', {}).get('Running', False):
return 'running' # depends on [control=['if'], data=[]]
else:
return 'stopped' |
def main(unused_argv):
"""Run SC2 to play a game or a replay."""
stopwatch.sw.enabled = FLAGS.profile or FLAGS.trace
stopwatch.sw.trace = FLAGS.trace
if (FLAGS.map and FLAGS.replay) or (not FLAGS.map and not FLAGS.replay):
sys.exit("Must supply either a map or replay.")
if FLAGS.replay and not FLAGS.replay.lower().endswith("sc2replay"):
sys.exit("Replay must end in .SC2Replay.")
if FLAGS.realtime and FLAGS.replay:
# TODO(tewalds): Support realtime in replays once the game supports it.
sys.exit("realtime isn't possible for replays yet.")
if FLAGS.render and (FLAGS.realtime or FLAGS.full_screen):
sys.exit("disable pygame rendering if you want realtime or full_screen.")
if platform.system() == "Linux" and (FLAGS.realtime or FLAGS.full_screen):
sys.exit("realtime and full_screen only make sense on Windows/MacOS.")
if not FLAGS.render and FLAGS.render_sync:
sys.exit("render_sync only makes sense with pygame rendering on.")
run_config = run_configs.get()
interface = sc_pb.InterfaceOptions()
interface.raw = FLAGS.render
interface.score = True
interface.feature_layer.width = 24
if FLAGS.feature_screen_size and FLAGS.feature_minimap_size:
FLAGS.feature_screen_size.assign_to(interface.feature_layer.resolution)
FLAGS.feature_minimap_size.assign_to(
interface.feature_layer.minimap_resolution)
if FLAGS.rgb_screen_size and FLAGS.rgb_minimap_size:
FLAGS.rgb_screen_size.assign_to(interface.render.resolution)
FLAGS.rgb_minimap_size.assign_to(interface.render.minimap_resolution)
max_episode_steps = FLAGS.max_episode_steps
if FLAGS.map:
map_inst = maps.get(FLAGS.map)
if map_inst.game_steps_per_episode:
max_episode_steps = map_inst.game_steps_per_episode
create = sc_pb.RequestCreateGame(
realtime=FLAGS.realtime,
disable_fog=FLAGS.disable_fog,
local_map=sc_pb.LocalMap(map_path=map_inst.path,
map_data=map_inst.data(run_config)))
create.player_setup.add(type=sc_pb.Participant)
create.player_setup.add(type=sc_pb.Computer,
race=sc2_env.Race[FLAGS.bot_race],
difficulty=sc2_env.Difficulty[FLAGS.difficulty])
join = sc_pb.RequestJoinGame(
options=interface, race=sc2_env.Race[FLAGS.user_race],
player_name=FLAGS.user_name)
version = None
else:
replay_data = run_config.replay_data(FLAGS.replay)
start_replay = sc_pb.RequestStartReplay(
replay_data=replay_data,
options=interface,
disable_fog=FLAGS.disable_fog,
observed_player_id=FLAGS.observed_player)
version = get_replay_version(replay_data)
with run_config.start(version=version,
full_screen=FLAGS.full_screen) as controller:
if FLAGS.map:
controller.create_game(create)
controller.join_game(join)
else:
info = controller.replay_info(replay_data)
print(" Replay info ".center(60, "-"))
print(info)
print("-" * 60)
map_path = FLAGS.map_path or info.local_map_path
if map_path:
start_replay.map_data = run_config.map_data(map_path)
controller.start_replay(start_replay)
if FLAGS.render:
renderer = renderer_human.RendererHuman(
fps=FLAGS.fps, step_mul=FLAGS.step_mul,
render_sync=FLAGS.render_sync, video=FLAGS.video)
renderer.run(
run_config, controller, max_game_steps=FLAGS.max_game_steps,
game_steps_per_episode=max_episode_steps,
save_replay=FLAGS.save_replay)
else: # Still step forward so the Mac/Windows renderer works.
try:
while True:
frame_start_time = time.time()
if not FLAGS.realtime:
controller.step(FLAGS.step_mul)
obs = controller.observe()
if obs.player_result:
break
time.sleep(max(0, frame_start_time + 1 / FLAGS.fps - time.time()))
except KeyboardInterrupt:
pass
print("Score: ", obs.observation.score.score)
print("Result: ", obs.player_result)
if FLAGS.map and FLAGS.save_replay:
replay_save_loc = run_config.save_replay(
controller.save_replay(), "local", FLAGS.map)
print("Replay saved to:", replay_save_loc)
# Save scores so we know how the human player did.
with open(replay_save_loc.replace("SC2Replay", "txt"), "w") as f:
f.write("{}\n".format(obs.observation.score.score))
if FLAGS.profile:
print(stopwatch.sw) | def function[main, parameter[unused_argv]]:
constant[Run SC2 to play a game or a replay.]
name[stopwatch].sw.enabled assign[=] <ast.BoolOp object at 0x7da18f09f0a0>
name[stopwatch].sw.trace assign[=] name[FLAGS].trace
if <ast.BoolOp object at 0x7da18f09dc30> begin[:]
call[name[sys].exit, parameter[constant[Must supply either a map or replay.]]]
if <ast.BoolOp object at 0x7da18f09e530> begin[:]
call[name[sys].exit, parameter[constant[Replay must end in .SC2Replay.]]]
if <ast.BoolOp object at 0x7da18f09efe0> begin[:]
call[name[sys].exit, parameter[constant[realtime isn't possible for replays yet.]]]
if <ast.BoolOp object at 0x7da18f09e9e0> begin[:]
call[name[sys].exit, parameter[constant[disable pygame rendering if you want realtime or full_screen.]]]
if <ast.BoolOp object at 0x7da18f09d5a0> begin[:]
call[name[sys].exit, parameter[constant[realtime and full_screen only make sense on Windows/MacOS.]]]
if <ast.BoolOp object at 0x7da20e962ef0> begin[:]
call[name[sys].exit, parameter[constant[render_sync only makes sense with pygame rendering on.]]]
variable[run_config] assign[=] call[name[run_configs].get, parameter[]]
variable[interface] assign[=] call[name[sc_pb].InterfaceOptions, parameter[]]
name[interface].raw assign[=] name[FLAGS].render
name[interface].score assign[=] constant[True]
name[interface].feature_layer.width assign[=] constant[24]
if <ast.BoolOp object at 0x7da20c6e5c90> begin[:]
call[name[FLAGS].feature_screen_size.assign_to, parameter[name[interface].feature_layer.resolution]]
call[name[FLAGS].feature_minimap_size.assign_to, parameter[name[interface].feature_layer.minimap_resolution]]
if <ast.BoolOp object at 0x7da20c6e7460> begin[:]
call[name[FLAGS].rgb_screen_size.assign_to, parameter[name[interface].render.resolution]]
call[name[FLAGS].rgb_minimap_size.assign_to, parameter[name[interface].render.minimap_resolution]]
variable[max_episode_steps] assign[=] name[FLAGS].max_episode_steps
if name[FLAGS].map begin[:]
variable[map_inst] assign[=] call[name[maps].get, parameter[name[FLAGS].map]]
if name[map_inst].game_steps_per_episode begin[:]
variable[max_episode_steps] assign[=] name[map_inst].game_steps_per_episode
variable[create] assign[=] call[name[sc_pb].RequestCreateGame, parameter[]]
call[name[create].player_setup.add, parameter[]]
call[name[create].player_setup.add, parameter[]]
variable[join] assign[=] call[name[sc_pb].RequestJoinGame, parameter[]]
variable[version] assign[=] constant[None]
with call[name[run_config].start, parameter[]] begin[:]
if name[FLAGS].map begin[:]
call[name[controller].create_game, parameter[name[create]]]
call[name[controller].join_game, parameter[name[join]]]
if name[FLAGS].render begin[:]
variable[renderer] assign[=] call[name[renderer_human].RendererHuman, parameter[]]
call[name[renderer].run, parameter[name[run_config], name[controller]]]
if name[FLAGS].profile begin[:]
call[name[print], parameter[name[stopwatch].sw]] | keyword[def] identifier[main] ( identifier[unused_argv] ):
literal[string]
identifier[stopwatch] . identifier[sw] . identifier[enabled] = identifier[FLAGS] . identifier[profile] keyword[or] identifier[FLAGS] . identifier[trace]
identifier[stopwatch] . identifier[sw] . identifier[trace] = identifier[FLAGS] . identifier[trace]
keyword[if] ( identifier[FLAGS] . identifier[map] keyword[and] identifier[FLAGS] . identifier[replay] ) keyword[or] ( keyword[not] identifier[FLAGS] . identifier[map] keyword[and] keyword[not] identifier[FLAGS] . identifier[replay] ):
identifier[sys] . identifier[exit] ( literal[string] )
keyword[if] identifier[FLAGS] . identifier[replay] keyword[and] keyword[not] identifier[FLAGS] . identifier[replay] . identifier[lower] (). identifier[endswith] ( literal[string] ):
identifier[sys] . identifier[exit] ( literal[string] )
keyword[if] identifier[FLAGS] . identifier[realtime] keyword[and] identifier[FLAGS] . identifier[replay] :
identifier[sys] . identifier[exit] ( literal[string] )
keyword[if] identifier[FLAGS] . identifier[render] keyword[and] ( identifier[FLAGS] . identifier[realtime] keyword[or] identifier[FLAGS] . identifier[full_screen] ):
identifier[sys] . identifier[exit] ( literal[string] )
keyword[if] identifier[platform] . identifier[system] ()== literal[string] keyword[and] ( identifier[FLAGS] . identifier[realtime] keyword[or] identifier[FLAGS] . identifier[full_screen] ):
identifier[sys] . identifier[exit] ( literal[string] )
keyword[if] keyword[not] identifier[FLAGS] . identifier[render] keyword[and] identifier[FLAGS] . identifier[render_sync] :
identifier[sys] . identifier[exit] ( literal[string] )
identifier[run_config] = identifier[run_configs] . identifier[get] ()
identifier[interface] = identifier[sc_pb] . identifier[InterfaceOptions] ()
identifier[interface] . identifier[raw] = identifier[FLAGS] . identifier[render]
identifier[interface] . identifier[score] = keyword[True]
identifier[interface] . identifier[feature_layer] . identifier[width] = literal[int]
keyword[if] identifier[FLAGS] . identifier[feature_screen_size] keyword[and] identifier[FLAGS] . identifier[feature_minimap_size] :
identifier[FLAGS] . identifier[feature_screen_size] . identifier[assign_to] ( identifier[interface] . identifier[feature_layer] . identifier[resolution] )
identifier[FLAGS] . identifier[feature_minimap_size] . identifier[assign_to] (
identifier[interface] . identifier[feature_layer] . identifier[minimap_resolution] )
keyword[if] identifier[FLAGS] . identifier[rgb_screen_size] keyword[and] identifier[FLAGS] . identifier[rgb_minimap_size] :
identifier[FLAGS] . identifier[rgb_screen_size] . identifier[assign_to] ( identifier[interface] . identifier[render] . identifier[resolution] )
identifier[FLAGS] . identifier[rgb_minimap_size] . identifier[assign_to] ( identifier[interface] . identifier[render] . identifier[minimap_resolution] )
identifier[max_episode_steps] = identifier[FLAGS] . identifier[max_episode_steps]
keyword[if] identifier[FLAGS] . identifier[map] :
identifier[map_inst] = identifier[maps] . identifier[get] ( identifier[FLAGS] . identifier[map] )
keyword[if] identifier[map_inst] . identifier[game_steps_per_episode] :
identifier[max_episode_steps] = identifier[map_inst] . identifier[game_steps_per_episode]
identifier[create] = identifier[sc_pb] . identifier[RequestCreateGame] (
identifier[realtime] = identifier[FLAGS] . identifier[realtime] ,
identifier[disable_fog] = identifier[FLAGS] . identifier[disable_fog] ,
identifier[local_map] = identifier[sc_pb] . identifier[LocalMap] ( identifier[map_path] = identifier[map_inst] . identifier[path] ,
identifier[map_data] = identifier[map_inst] . identifier[data] ( identifier[run_config] )))
identifier[create] . identifier[player_setup] . identifier[add] ( identifier[type] = identifier[sc_pb] . identifier[Participant] )
identifier[create] . identifier[player_setup] . identifier[add] ( identifier[type] = identifier[sc_pb] . identifier[Computer] ,
identifier[race] = identifier[sc2_env] . identifier[Race] [ identifier[FLAGS] . identifier[bot_race] ],
identifier[difficulty] = identifier[sc2_env] . identifier[Difficulty] [ identifier[FLAGS] . identifier[difficulty] ])
identifier[join] = identifier[sc_pb] . identifier[RequestJoinGame] (
identifier[options] = identifier[interface] , identifier[race] = identifier[sc2_env] . identifier[Race] [ identifier[FLAGS] . identifier[user_race] ],
identifier[player_name] = identifier[FLAGS] . identifier[user_name] )
identifier[version] = keyword[None]
keyword[else] :
identifier[replay_data] = identifier[run_config] . identifier[replay_data] ( identifier[FLAGS] . identifier[replay] )
identifier[start_replay] = identifier[sc_pb] . identifier[RequestStartReplay] (
identifier[replay_data] = identifier[replay_data] ,
identifier[options] = identifier[interface] ,
identifier[disable_fog] = identifier[FLAGS] . identifier[disable_fog] ,
identifier[observed_player_id] = identifier[FLAGS] . identifier[observed_player] )
identifier[version] = identifier[get_replay_version] ( identifier[replay_data] )
keyword[with] identifier[run_config] . identifier[start] ( identifier[version] = identifier[version] ,
identifier[full_screen] = identifier[FLAGS] . identifier[full_screen] ) keyword[as] identifier[controller] :
keyword[if] identifier[FLAGS] . identifier[map] :
identifier[controller] . identifier[create_game] ( identifier[create] )
identifier[controller] . identifier[join_game] ( identifier[join] )
keyword[else] :
identifier[info] = identifier[controller] . identifier[replay_info] ( identifier[replay_data] )
identifier[print] ( literal[string] . identifier[center] ( literal[int] , literal[string] ))
identifier[print] ( identifier[info] )
identifier[print] ( literal[string] * literal[int] )
identifier[map_path] = identifier[FLAGS] . identifier[map_path] keyword[or] identifier[info] . identifier[local_map_path]
keyword[if] identifier[map_path] :
identifier[start_replay] . identifier[map_data] = identifier[run_config] . identifier[map_data] ( identifier[map_path] )
identifier[controller] . identifier[start_replay] ( identifier[start_replay] )
keyword[if] identifier[FLAGS] . identifier[render] :
identifier[renderer] = identifier[renderer_human] . identifier[RendererHuman] (
identifier[fps] = identifier[FLAGS] . identifier[fps] , identifier[step_mul] = identifier[FLAGS] . identifier[step_mul] ,
identifier[render_sync] = identifier[FLAGS] . identifier[render_sync] , identifier[video] = identifier[FLAGS] . identifier[video] )
identifier[renderer] . identifier[run] (
identifier[run_config] , identifier[controller] , identifier[max_game_steps] = identifier[FLAGS] . identifier[max_game_steps] ,
identifier[game_steps_per_episode] = identifier[max_episode_steps] ,
identifier[save_replay] = identifier[FLAGS] . identifier[save_replay] )
keyword[else] :
keyword[try] :
keyword[while] keyword[True] :
identifier[frame_start_time] = identifier[time] . identifier[time] ()
keyword[if] keyword[not] identifier[FLAGS] . identifier[realtime] :
identifier[controller] . identifier[step] ( identifier[FLAGS] . identifier[step_mul] )
identifier[obs] = identifier[controller] . identifier[observe] ()
keyword[if] identifier[obs] . identifier[player_result] :
keyword[break]
identifier[time] . identifier[sleep] ( identifier[max] ( literal[int] , identifier[frame_start_time] + literal[int] / identifier[FLAGS] . identifier[fps] - identifier[time] . identifier[time] ()))
keyword[except] identifier[KeyboardInterrupt] :
keyword[pass]
identifier[print] ( literal[string] , identifier[obs] . identifier[observation] . identifier[score] . identifier[score] )
identifier[print] ( literal[string] , identifier[obs] . identifier[player_result] )
keyword[if] identifier[FLAGS] . identifier[map] keyword[and] identifier[FLAGS] . identifier[save_replay] :
identifier[replay_save_loc] = identifier[run_config] . identifier[save_replay] (
identifier[controller] . identifier[save_replay] (), literal[string] , identifier[FLAGS] . identifier[map] )
identifier[print] ( literal[string] , identifier[replay_save_loc] )
keyword[with] identifier[open] ( identifier[replay_save_loc] . identifier[replace] ( literal[string] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[format] ( identifier[obs] . identifier[observation] . identifier[score] . identifier[score] ))
keyword[if] identifier[FLAGS] . identifier[profile] :
identifier[print] ( identifier[stopwatch] . identifier[sw] ) | def main(unused_argv):
"""Run SC2 to play a game or a replay."""
stopwatch.sw.enabled = FLAGS.profile or FLAGS.trace
stopwatch.sw.trace = FLAGS.trace
if FLAGS.map and FLAGS.replay or (not FLAGS.map and (not FLAGS.replay)):
sys.exit('Must supply either a map or replay.') # depends on [control=['if'], data=[]]
if FLAGS.replay and (not FLAGS.replay.lower().endswith('sc2replay')):
sys.exit('Replay must end in .SC2Replay.') # depends on [control=['if'], data=[]]
if FLAGS.realtime and FLAGS.replay:
# TODO(tewalds): Support realtime in replays once the game supports it.
sys.exit("realtime isn't possible for replays yet.") # depends on [control=['if'], data=[]]
if FLAGS.render and (FLAGS.realtime or FLAGS.full_screen):
sys.exit('disable pygame rendering if you want realtime or full_screen.') # depends on [control=['if'], data=[]]
if platform.system() == 'Linux' and (FLAGS.realtime or FLAGS.full_screen):
sys.exit('realtime and full_screen only make sense on Windows/MacOS.') # depends on [control=['if'], data=[]]
if not FLAGS.render and FLAGS.render_sync:
sys.exit('render_sync only makes sense with pygame rendering on.') # depends on [control=['if'], data=[]]
run_config = run_configs.get()
interface = sc_pb.InterfaceOptions()
interface.raw = FLAGS.render
interface.score = True
interface.feature_layer.width = 24
if FLAGS.feature_screen_size and FLAGS.feature_minimap_size:
FLAGS.feature_screen_size.assign_to(interface.feature_layer.resolution)
FLAGS.feature_minimap_size.assign_to(interface.feature_layer.minimap_resolution) # depends on [control=['if'], data=[]]
if FLAGS.rgb_screen_size and FLAGS.rgb_minimap_size:
FLAGS.rgb_screen_size.assign_to(interface.render.resolution)
FLAGS.rgb_minimap_size.assign_to(interface.render.minimap_resolution) # depends on [control=['if'], data=[]]
max_episode_steps = FLAGS.max_episode_steps
if FLAGS.map:
map_inst = maps.get(FLAGS.map)
if map_inst.game_steps_per_episode:
max_episode_steps = map_inst.game_steps_per_episode # depends on [control=['if'], data=[]]
create = sc_pb.RequestCreateGame(realtime=FLAGS.realtime, disable_fog=FLAGS.disable_fog, local_map=sc_pb.LocalMap(map_path=map_inst.path, map_data=map_inst.data(run_config)))
create.player_setup.add(type=sc_pb.Participant)
create.player_setup.add(type=sc_pb.Computer, race=sc2_env.Race[FLAGS.bot_race], difficulty=sc2_env.Difficulty[FLAGS.difficulty])
join = sc_pb.RequestJoinGame(options=interface, race=sc2_env.Race[FLAGS.user_race], player_name=FLAGS.user_name)
version = None # depends on [control=['if'], data=[]]
else:
replay_data = run_config.replay_data(FLAGS.replay)
start_replay = sc_pb.RequestStartReplay(replay_data=replay_data, options=interface, disable_fog=FLAGS.disable_fog, observed_player_id=FLAGS.observed_player)
version = get_replay_version(replay_data)
with run_config.start(version=version, full_screen=FLAGS.full_screen) as controller:
if FLAGS.map:
controller.create_game(create)
controller.join_game(join) # depends on [control=['if'], data=[]]
else:
info = controller.replay_info(replay_data)
print(' Replay info '.center(60, '-'))
print(info)
print('-' * 60)
map_path = FLAGS.map_path or info.local_map_path
if map_path:
start_replay.map_data = run_config.map_data(map_path) # depends on [control=['if'], data=[]]
controller.start_replay(start_replay)
if FLAGS.render:
renderer = renderer_human.RendererHuman(fps=FLAGS.fps, step_mul=FLAGS.step_mul, render_sync=FLAGS.render_sync, video=FLAGS.video)
renderer.run(run_config, controller, max_game_steps=FLAGS.max_game_steps, game_steps_per_episode=max_episode_steps, save_replay=FLAGS.save_replay) # depends on [control=['if'], data=[]]
else: # Still step forward so the Mac/Windows renderer works.
try:
while True:
frame_start_time = time.time()
if not FLAGS.realtime:
controller.step(FLAGS.step_mul) # depends on [control=['if'], data=[]]
obs = controller.observe()
if obs.player_result:
break # depends on [control=['if'], data=[]]
time.sleep(max(0, frame_start_time + 1 / FLAGS.fps - time.time())) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
pass # depends on [control=['except'], data=[]]
print('Score: ', obs.observation.score.score)
print('Result: ', obs.player_result)
if FLAGS.map and FLAGS.save_replay:
replay_save_loc = run_config.save_replay(controller.save_replay(), 'local', FLAGS.map)
print('Replay saved to:', replay_save_loc)
# Save scores so we know how the human player did.
with open(replay_save_loc.replace('SC2Replay', 'txt'), 'w') as f:
f.write('{}\n'.format(obs.observation.score.score)) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['controller']]
if FLAGS.profile:
print(stopwatch.sw) # depends on [control=['if'], data=[]] |
def set_signal_replacement(self, signum, handle):
"""A replacement for signal.signal which chains the signal behind
the debugger's handler"""
signame = lookup_signame(signum)
if signame is None:
self.dbgr.intf[-1].errmsg(("%s is not a signal number"
" I know about.") % signum)
return False
# Since the intent is to set a handler, we should pass this
# signal on to the handler
self.sigs[signame].pass_along = True
if self.check_and_adjust_sighandler(signame, self.sigs):
self.sigs[signame].old_handler = handle
return True
return False | def function[set_signal_replacement, parameter[self, signum, handle]]:
constant[A replacement for signal.signal which chains the signal behind
the debugger's handler]
variable[signame] assign[=] call[name[lookup_signame], parameter[name[signum]]]
if compare[name[signame] is constant[None]] begin[:]
call[call[name[self].dbgr.intf][<ast.UnaryOp object at 0x7da1b032e020>].errmsg, parameter[binary_operation[constant[%s is not a signal number I know about.] <ast.Mod object at 0x7da2590d6920> name[signum]]]]
return[constant[False]]
call[name[self].sigs][name[signame]].pass_along assign[=] constant[True]
if call[name[self].check_and_adjust_sighandler, parameter[name[signame], name[self].sigs]] begin[:]
call[name[self].sigs][name[signame]].old_handler assign[=] name[handle]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[set_signal_replacement] ( identifier[self] , identifier[signum] , identifier[handle] ):
literal[string]
identifier[signame] = identifier[lookup_signame] ( identifier[signum] )
keyword[if] identifier[signame] keyword[is] keyword[None] :
identifier[self] . identifier[dbgr] . identifier[intf] [- literal[int] ]. identifier[errmsg] (( literal[string]
literal[string] )% identifier[signum] )
keyword[return] keyword[False]
identifier[self] . identifier[sigs] [ identifier[signame] ]. identifier[pass_along] = keyword[True]
keyword[if] identifier[self] . identifier[check_and_adjust_sighandler] ( identifier[signame] , identifier[self] . identifier[sigs] ):
identifier[self] . identifier[sigs] [ identifier[signame] ]. identifier[old_handler] = identifier[handle]
keyword[return] keyword[True]
keyword[return] keyword[False] | def set_signal_replacement(self, signum, handle):
"""A replacement for signal.signal which chains the signal behind
the debugger's handler"""
signame = lookup_signame(signum)
if signame is None:
self.dbgr.intf[-1].errmsg('%s is not a signal number I know about.' % signum)
return False # depends on [control=['if'], data=[]]
# Since the intent is to set a handler, we should pass this
# signal on to the handler
self.sigs[signame].pass_along = True
if self.check_and_adjust_sighandler(signame, self.sigs):
self.sigs[signame].old_handler = handle
return True # depends on [control=['if'], data=[]]
return False |
def _ubridge_apply_filters(self, adapter_number, port_number, filters):
"""
Apply filter like rate limiting
:param adapter_number: adapter number
:param port_number: port number
:param filters: Array of filter dictionnary
"""
bridge_name = "IOL-BRIDGE-{}".format(self.application_id + 512)
location = '{bridge_name} {bay} {unit}'.format(
bridge_name=bridge_name,
bay=adapter_number,
unit=port_number)
yield from self._ubridge_send('iol_bridge reset_packet_filters ' + location)
for filter in self._build_filter_list(filters):
cmd = 'iol_bridge add_packet_filter {} {}'.format(
location,
filter)
yield from self._ubridge_send(cmd) | def function[_ubridge_apply_filters, parameter[self, adapter_number, port_number, filters]]:
constant[
Apply filter like rate limiting
:param adapter_number: adapter number
:param port_number: port number
:param filters: Array of filter dictionnary
]
variable[bridge_name] assign[=] call[constant[IOL-BRIDGE-{}].format, parameter[binary_operation[name[self].application_id + constant[512]]]]
variable[location] assign[=] call[constant[{bridge_name} {bay} {unit}].format, parameter[]]
<ast.YieldFrom object at 0x7da2045647f0>
for taget[name[filter]] in starred[call[name[self]._build_filter_list, parameter[name[filters]]]] begin[:]
variable[cmd] assign[=] call[constant[iol_bridge add_packet_filter {} {}].format, parameter[name[location], name[filter]]]
<ast.YieldFrom object at 0x7da204564310> | keyword[def] identifier[_ubridge_apply_filters] ( identifier[self] , identifier[adapter_number] , identifier[port_number] , identifier[filters] ):
literal[string]
identifier[bridge_name] = literal[string] . identifier[format] ( identifier[self] . identifier[application_id] + literal[int] )
identifier[location] = literal[string] . identifier[format] (
identifier[bridge_name] = identifier[bridge_name] ,
identifier[bay] = identifier[adapter_number] ,
identifier[unit] = identifier[port_number] )
keyword[yield] keyword[from] identifier[self] . identifier[_ubridge_send] ( literal[string] + identifier[location] )
keyword[for] identifier[filter] keyword[in] identifier[self] . identifier[_build_filter_list] ( identifier[filters] ):
identifier[cmd] = literal[string] . identifier[format] (
identifier[location] ,
identifier[filter] )
keyword[yield] keyword[from] identifier[self] . identifier[_ubridge_send] ( identifier[cmd] ) | def _ubridge_apply_filters(self, adapter_number, port_number, filters):
"""
Apply filter like rate limiting
:param adapter_number: adapter number
:param port_number: port number
:param filters: Array of filter dictionnary
"""
bridge_name = 'IOL-BRIDGE-{}'.format(self.application_id + 512)
location = '{bridge_name} {bay} {unit}'.format(bridge_name=bridge_name, bay=adapter_number, unit=port_number)
yield from self._ubridge_send('iol_bridge reset_packet_filters ' + location)
for filter in self._build_filter_list(filters):
cmd = 'iol_bridge add_packet_filter {} {}'.format(location, filter)
yield from self._ubridge_send(cmd) # depends on [control=['for'], data=['filter']] |
def search(spec, operator='and'):
"""Implement xmlrpc search command.
This only searches through the mirrored and private packages
"""
field_map = {
'name': 'name__icontains',
'summary': 'releases__summary__icontains',
}
query_filter = None
for field, values in spec.items():
for value in values:
if field not in field_map:
continue
field_filter = Q(**{field_map[field]: value})
if not query_filter:
query_filter = field_filter
continue
if operator == 'and':
query_filter &= field_filter
else:
query_filter |= field_filter
result = []
packages = models.Package.objects.filter(query_filter).all()[:20]
for package in packages:
release = package.releases.all()[0]
result.append({
'name': package.name,
'summary': release.summary,
'version': release.version,
'_pypi_ordering': 0,
})
return result | def function[search, parameter[spec, operator]]:
constant[Implement xmlrpc search command.
This only searches through the mirrored and private packages
]
variable[field_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b0706620>, <ast.Constant object at 0x7da1b0707400>], [<ast.Constant object at 0x7da1b0706800>, <ast.Constant object at 0x7da1b0705c60>]]
variable[query_filter] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da1b07070d0>, <ast.Name object at 0x7da1b0704df0>]]] in starred[call[name[spec].items, parameter[]]] begin[:]
for taget[name[value]] in starred[name[values]] begin[:]
if compare[name[field] <ast.NotIn object at 0x7da2590d7190> name[field_map]] begin[:]
continue
variable[field_filter] assign[=] call[name[Q], parameter[]]
if <ast.UnaryOp object at 0x7da1b07048e0> begin[:]
variable[query_filter] assign[=] name[field_filter]
continue
if compare[name[operator] equal[==] constant[and]] begin[:]
<ast.AugAssign object at 0x7da1b0706200>
variable[result] assign[=] list[[]]
variable[packages] assign[=] call[call[call[name[models].Package.objects.filter, parameter[name[query_filter]]].all, parameter[]]][<ast.Slice object at 0x7da1b0707730>]
for taget[name[package]] in starred[name[packages]] begin[:]
variable[release] assign[=] call[call[name[package].releases.all, parameter[]]][constant[0]]
call[name[result].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0707ca0>, <ast.Constant object at 0x7da1b07065f0>, <ast.Constant object at 0x7da1b0706cb0>, <ast.Constant object at 0x7da1b0706f80>], [<ast.Attribute object at 0x7da1b07068c0>, <ast.Attribute object at 0x7da1b0706fb0>, <ast.Attribute object at 0x7da1b0706c50>, <ast.Constant object at 0x7da1b0707910>]]]]
return[name[result]] | keyword[def] identifier[search] ( identifier[spec] , identifier[operator] = literal[string] ):
literal[string]
identifier[field_map] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[query_filter] = keyword[None]
keyword[for] identifier[field] , identifier[values] keyword[in] identifier[spec] . identifier[items] ():
keyword[for] identifier[value] keyword[in] identifier[values] :
keyword[if] identifier[field] keyword[not] keyword[in] identifier[field_map] :
keyword[continue]
identifier[field_filter] = identifier[Q] (**{ identifier[field_map] [ identifier[field] ]: identifier[value] })
keyword[if] keyword[not] identifier[query_filter] :
identifier[query_filter] = identifier[field_filter]
keyword[continue]
keyword[if] identifier[operator] == literal[string] :
identifier[query_filter] &= identifier[field_filter]
keyword[else] :
identifier[query_filter] |= identifier[field_filter]
identifier[result] =[]
identifier[packages] = identifier[models] . identifier[Package] . identifier[objects] . identifier[filter] ( identifier[query_filter] ). identifier[all] ()[: literal[int] ]
keyword[for] identifier[package] keyword[in] identifier[packages] :
identifier[release] = identifier[package] . identifier[releases] . identifier[all] ()[ literal[int] ]
identifier[result] . identifier[append] ({
literal[string] : identifier[package] . identifier[name] ,
literal[string] : identifier[release] . identifier[summary] ,
literal[string] : identifier[release] . identifier[version] ,
literal[string] : literal[int] ,
})
keyword[return] identifier[result] | def search(spec, operator='and'):
"""Implement xmlrpc search command.
This only searches through the mirrored and private packages
"""
field_map = {'name': 'name__icontains', 'summary': 'releases__summary__icontains'}
query_filter = None
for (field, values) in spec.items():
for value in values:
if field not in field_map:
continue # depends on [control=['if'], data=[]]
field_filter = Q(**{field_map[field]: value})
if not query_filter:
query_filter = field_filter
continue # depends on [control=['if'], data=[]]
if operator == 'and':
query_filter &= field_filter # depends on [control=['if'], data=[]]
else:
query_filter |= field_filter # depends on [control=['for'], data=['value']] # depends on [control=['for'], data=[]]
result = []
packages = models.Package.objects.filter(query_filter).all()[:20]
for package in packages:
release = package.releases.all()[0]
result.append({'name': package.name, 'summary': release.summary, 'version': release.version, '_pypi_ordering': 0}) # depends on [control=['for'], data=['package']]
return result |
def verify_cert(signature_chain_url: str) -> Optional[crypto.X509]:
"""Conducts series of Alexa SSL certificate verifications against Amazon Alexa requirements.
Args:
signature_chain_url: Signature certificate URL from SignatureCertChainUrl HTTP header.
Returns:
result: Amazon certificate if verification was successful, None if not.
"""
try:
certs_chain_get = requests.get(signature_chain_url)
except requests.exceptions.ConnectionError as e:
log.error(f'Amazon signature chain get error: {e}')
return None
certs_chain_txt = certs_chain_get.text
certs_chain = extract_certs(certs_chain_txt)
amazon_cert: crypto.X509 = certs_chain.pop(0)
# verify signature chain url
sc_url_verification = verify_sc_url(signature_chain_url)
if not sc_url_verification:
log.error(f'Amazon signature url {signature_chain_url} was not verified')
# verify not expired
expired_verification = not amazon_cert.has_expired()
if not expired_verification:
log.error(f'Amazon certificate ({signature_chain_url}) expired')
# verify subject alternative names
sans_verification = verify_sans(amazon_cert)
if not sans_verification:
log.error(f'Subject alternative names verification for ({signature_chain_url}) certificate failed')
# verify certs chain
chain_verification = verify_certs_chain(certs_chain, amazon_cert)
if not chain_verification:
log.error(f'Certificates chain verification for ({signature_chain_url}) certificate failed')
result = (sc_url_verification and expired_verification and sans_verification and chain_verification)
return amazon_cert if result else None | def function[verify_cert, parameter[signature_chain_url]]:
constant[Conducts series of Alexa SSL certificate verifications against Amazon Alexa requirements.
Args:
signature_chain_url: Signature certificate URL from SignatureCertChainUrl HTTP header.
Returns:
result: Amazon certificate if verification was successful, None if not.
]
<ast.Try object at 0x7da20c76ca90>
variable[certs_chain_txt] assign[=] name[certs_chain_get].text
variable[certs_chain] assign[=] call[name[extract_certs], parameter[name[certs_chain_txt]]]
<ast.AnnAssign object at 0x7da20c76d960>
variable[sc_url_verification] assign[=] call[name[verify_sc_url], parameter[name[signature_chain_url]]]
if <ast.UnaryOp object at 0x7da20c76f850> begin[:]
call[name[log].error, parameter[<ast.JoinedStr object at 0x7da20c76d930>]]
variable[expired_verification] assign[=] <ast.UnaryOp object at 0x7da1b0355cc0>
if <ast.UnaryOp object at 0x7da1b0354460> begin[:]
call[name[log].error, parameter[<ast.JoinedStr object at 0x7da1b0357f10>]]
variable[sans_verification] assign[=] call[name[verify_sans], parameter[name[amazon_cert]]]
if <ast.UnaryOp object at 0x7da1b0357700> begin[:]
call[name[log].error, parameter[<ast.JoinedStr object at 0x7da1b0355e10>]]
variable[chain_verification] assign[=] call[name[verify_certs_chain], parameter[name[certs_chain], name[amazon_cert]]]
if <ast.UnaryOp object at 0x7da1b0355e70> begin[:]
call[name[log].error, parameter[<ast.JoinedStr object at 0x7da1b03569b0>]]
variable[result] assign[=] <ast.BoolOp object at 0x7da1b0354a30>
return[<ast.IfExp object at 0x7da1b0357b50>] | keyword[def] identifier[verify_cert] ( identifier[signature_chain_url] : identifier[str] )-> identifier[Optional] [ identifier[crypto] . identifier[X509] ]:
literal[string]
keyword[try] :
identifier[certs_chain_get] = identifier[requests] . identifier[get] ( identifier[signature_chain_url] )
keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] keyword[as] identifier[e] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[None]
identifier[certs_chain_txt] = identifier[certs_chain_get] . identifier[text]
identifier[certs_chain] = identifier[extract_certs] ( identifier[certs_chain_txt] )
identifier[amazon_cert] : identifier[crypto] . identifier[X509] = identifier[certs_chain] . identifier[pop] ( literal[int] )
identifier[sc_url_verification] = identifier[verify_sc_url] ( identifier[signature_chain_url] )
keyword[if] keyword[not] identifier[sc_url_verification] :
identifier[log] . identifier[error] ( literal[string] )
identifier[expired_verification] = keyword[not] identifier[amazon_cert] . identifier[has_expired] ()
keyword[if] keyword[not] identifier[expired_verification] :
identifier[log] . identifier[error] ( literal[string] )
identifier[sans_verification] = identifier[verify_sans] ( identifier[amazon_cert] )
keyword[if] keyword[not] identifier[sans_verification] :
identifier[log] . identifier[error] ( literal[string] )
identifier[chain_verification] = identifier[verify_certs_chain] ( identifier[certs_chain] , identifier[amazon_cert] )
keyword[if] keyword[not] identifier[chain_verification] :
identifier[log] . identifier[error] ( literal[string] )
identifier[result] =( identifier[sc_url_verification] keyword[and] identifier[expired_verification] keyword[and] identifier[sans_verification] keyword[and] identifier[chain_verification] )
keyword[return] identifier[amazon_cert] keyword[if] identifier[result] keyword[else] keyword[None] | def verify_cert(signature_chain_url: str) -> Optional[crypto.X509]:
"""Conducts series of Alexa SSL certificate verifications against Amazon Alexa requirements.
Args:
signature_chain_url: Signature certificate URL from SignatureCertChainUrl HTTP header.
Returns:
result: Amazon certificate if verification was successful, None if not.
"""
try:
certs_chain_get = requests.get(signature_chain_url) # depends on [control=['try'], data=[]]
except requests.exceptions.ConnectionError as e:
log.error(f'Amazon signature chain get error: {e}')
return None # depends on [control=['except'], data=['e']]
certs_chain_txt = certs_chain_get.text
certs_chain = extract_certs(certs_chain_txt)
amazon_cert: crypto.X509 = certs_chain.pop(0)
# verify signature chain url
sc_url_verification = verify_sc_url(signature_chain_url)
if not sc_url_verification:
log.error(f'Amazon signature url {signature_chain_url} was not verified') # depends on [control=['if'], data=[]]
# verify not expired
expired_verification = not amazon_cert.has_expired()
if not expired_verification:
log.error(f'Amazon certificate ({signature_chain_url}) expired') # depends on [control=['if'], data=[]]
# verify subject alternative names
sans_verification = verify_sans(amazon_cert)
if not sans_verification:
log.error(f'Subject alternative names verification for ({signature_chain_url}) certificate failed') # depends on [control=['if'], data=[]]
# verify certs chain
chain_verification = verify_certs_chain(certs_chain, amazon_cert)
if not chain_verification:
log.error(f'Certificates chain verification for ({signature_chain_url}) certificate failed') # depends on [control=['if'], data=[]]
result = sc_url_verification and expired_verification and sans_verification and chain_verification
return amazon_cert if result else None |
def getBytes(self) -> list:
'''Returns the Root layer as list with bytes'''
tmpList = []
tmpList.extend(_FIRST_INDEX)
# first append the high byte from the Flags and Length
# high 4 bit: 0x7 then the bits 8-11(indexes) from _length
length = self.length - 16
tmpList.append((0x7 << 4) + (length >> 8))
# Then append the lower 8 bits from _length
tmpList.append(length & 0xFF)
tmpList.extend(self._vector)
tmpList.extend(self._cid)
return tmpList | def function[getBytes, parameter[self]]:
constant[Returns the Root layer as list with bytes]
variable[tmpList] assign[=] list[[]]
call[name[tmpList].extend, parameter[name[_FIRST_INDEX]]]
variable[length] assign[=] binary_operation[name[self].length - constant[16]]
call[name[tmpList].append, parameter[binary_operation[binary_operation[constant[7] <ast.LShift object at 0x7da2590d69e0> constant[4]] + binary_operation[name[length] <ast.RShift object at 0x7da2590d6a40> constant[8]]]]]
call[name[tmpList].append, parameter[binary_operation[name[length] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]]]
call[name[tmpList].extend, parameter[name[self]._vector]]
call[name[tmpList].extend, parameter[name[self]._cid]]
return[name[tmpList]] | keyword[def] identifier[getBytes] ( identifier[self] )-> identifier[list] :
literal[string]
identifier[tmpList] =[]
identifier[tmpList] . identifier[extend] ( identifier[_FIRST_INDEX] )
identifier[length] = identifier[self] . identifier[length] - literal[int]
identifier[tmpList] . identifier[append] (( literal[int] << literal[int] )+( identifier[length] >> literal[int] ))
identifier[tmpList] . identifier[append] ( identifier[length] & literal[int] )
identifier[tmpList] . identifier[extend] ( identifier[self] . identifier[_vector] )
identifier[tmpList] . identifier[extend] ( identifier[self] . identifier[_cid] )
keyword[return] identifier[tmpList] | def getBytes(self) -> list:
"""Returns the Root layer as list with bytes"""
tmpList = []
tmpList.extend(_FIRST_INDEX)
# first append the high byte from the Flags and Length
# high 4 bit: 0x7 then the bits 8-11(indexes) from _length
length = self.length - 16
tmpList.append((7 << 4) + (length >> 8))
# Then append the lower 8 bits from _length
tmpList.append(length & 255)
tmpList.extend(self._vector)
tmpList.extend(self._cid)
return tmpList |
def _ParseUpdateKeyValue(self, parser_mediator, registry_value, key_path):
"""Parses the UpdateKey value.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_value (dfwinreg.WinRegistryValue): Windows Registry value.
key_path (str): Windows Registry key path.
"""
if not registry_value.DataIsString():
parser_mediator.ProduceExtractionWarning(
'unsupported UpdateKey value data type: {0:s}'.format(
registry_value.data_type_string))
return
date_time_string = registry_value.GetDataAsObject()
if not date_time_string:
parser_mediator.ProduceExtractionWarning('missing UpdateKey value data')
return
re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string)
if not re_match:
parser_mediator.ProduceExtractionWarning(
'unsupported UpdateKey value data: {0!s}'.format(date_time_string))
return
month, day_of_month, year, hours, minutes, seconds, part_of_day = (
re_match.groups())
try:
year = int(year, 10)
month = int(month, 10)
day_of_month = int(day_of_month, 10)
hours = int(hours, 10)
minutes = int(minutes, 10)
seconds = int(seconds, 10)
except (TypeError, ValueError):
parser_mediator.ProduceExtractionWarning(
'invalid UpdateKey date time value: {0!s}'.format(date_time_string))
return
if part_of_day == 'PM':
hours += 12
time_elements_tuple = (year, month, day_of_month, hours, minutes, seconds)
try:
date_time = dfdatetime_time_elements.TimeElements(
time_elements_tuple=time_elements_tuple)
date_time.is_local_time = True
except ValueError:
parser_mediator.ProduceExtractionWarning(
'invalid UpdateKey date time value: {0!s}'.format(
time_elements_tuple))
return
event_data = CCleanerUpdateEventData()
event_data.key_path = key_path
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_UPDATE,
time_zone=parser_mediator.timezone)
parser_mediator.ProduceEventWithEventData(event, event_data) | def function[_ParseUpdateKeyValue, parameter[self, parser_mediator, registry_value, key_path]]:
constant[Parses the UpdateKey value.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_value (dfwinreg.WinRegistryValue): Windows Registry value.
key_path (str): Windows Registry key path.
]
if <ast.UnaryOp object at 0x7da207f99780> begin[:]
call[name[parser_mediator].ProduceExtractionWarning, parameter[call[constant[unsupported UpdateKey value data type: {0:s}].format, parameter[name[registry_value].data_type_string]]]]
return[None]
variable[date_time_string] assign[=] call[name[registry_value].GetDataAsObject, parameter[]]
if <ast.UnaryOp object at 0x7da207f987c0> begin[:]
call[name[parser_mediator].ProduceExtractionWarning, parameter[constant[missing UpdateKey value data]]]
return[None]
variable[re_match] assign[=] call[name[self]._UPDATE_DATE_TIME_RE.match, parameter[name[date_time_string]]]
if <ast.UnaryOp object at 0x7da207f9a0b0> begin[:]
call[name[parser_mediator].ProduceExtractionWarning, parameter[call[constant[unsupported UpdateKey value data: {0!s}].format, parameter[name[date_time_string]]]]]
return[None]
<ast.Tuple object at 0x7da207f98520> assign[=] call[name[re_match].groups, parameter[]]
<ast.Try object at 0x7da207f98f70>
if compare[name[part_of_day] equal[==] constant[PM]] begin[:]
<ast.AugAssign object at 0x7da207f03220>
variable[time_elements_tuple] assign[=] tuple[[<ast.Name object at 0x7da207f024a0>, <ast.Name object at 0x7da207f00f40>, <ast.Name object at 0x7da207f00fd0>, <ast.Name object at 0x7da207f00c10>, <ast.Name object at 0x7da207f02ec0>, <ast.Name object at 0x7da207f037f0>]]
<ast.Try object at 0x7da207f02ad0>
variable[event_data] assign[=] call[name[CCleanerUpdateEventData], parameter[]]
name[event_data].key_path assign[=] name[key_path]
variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[date_time], name[definitions].TIME_DESCRIPTION_UPDATE]]
call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]] | keyword[def] identifier[_ParseUpdateKeyValue] ( identifier[self] , identifier[parser_mediator] , identifier[registry_value] , identifier[key_path] ):
literal[string]
keyword[if] keyword[not] identifier[registry_value] . identifier[DataIsString] ():
identifier[parser_mediator] . identifier[ProduceExtractionWarning] (
literal[string] . identifier[format] (
identifier[registry_value] . identifier[data_type_string] ))
keyword[return]
identifier[date_time_string] = identifier[registry_value] . identifier[GetDataAsObject] ()
keyword[if] keyword[not] identifier[date_time_string] :
identifier[parser_mediator] . identifier[ProduceExtractionWarning] ( literal[string] )
keyword[return]
identifier[re_match] = identifier[self] . identifier[_UPDATE_DATE_TIME_RE] . identifier[match] ( identifier[date_time_string] )
keyword[if] keyword[not] identifier[re_match] :
identifier[parser_mediator] . identifier[ProduceExtractionWarning] (
literal[string] . identifier[format] ( identifier[date_time_string] ))
keyword[return]
identifier[month] , identifier[day_of_month] , identifier[year] , identifier[hours] , identifier[minutes] , identifier[seconds] , identifier[part_of_day] =(
identifier[re_match] . identifier[groups] ())
keyword[try] :
identifier[year] = identifier[int] ( identifier[year] , literal[int] )
identifier[month] = identifier[int] ( identifier[month] , literal[int] )
identifier[day_of_month] = identifier[int] ( identifier[day_of_month] , literal[int] )
identifier[hours] = identifier[int] ( identifier[hours] , literal[int] )
identifier[minutes] = identifier[int] ( identifier[minutes] , literal[int] )
identifier[seconds] = identifier[int] ( identifier[seconds] , literal[int] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
identifier[parser_mediator] . identifier[ProduceExtractionWarning] (
literal[string] . identifier[format] ( identifier[date_time_string] ))
keyword[return]
keyword[if] identifier[part_of_day] == literal[string] :
identifier[hours] += literal[int]
identifier[time_elements_tuple] =( identifier[year] , identifier[month] , identifier[day_of_month] , identifier[hours] , identifier[minutes] , identifier[seconds] )
keyword[try] :
identifier[date_time] = identifier[dfdatetime_time_elements] . identifier[TimeElements] (
identifier[time_elements_tuple] = identifier[time_elements_tuple] )
identifier[date_time] . identifier[is_local_time] = keyword[True]
keyword[except] identifier[ValueError] :
identifier[parser_mediator] . identifier[ProduceExtractionWarning] (
literal[string] . identifier[format] (
identifier[time_elements_tuple] ))
keyword[return]
identifier[event_data] = identifier[CCleanerUpdateEventData] ()
identifier[event_data] . identifier[key_path] = identifier[key_path]
identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] (
identifier[date_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_UPDATE] ,
identifier[time_zone] = identifier[parser_mediator] . identifier[timezone] )
identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] ) | def _ParseUpdateKeyValue(self, parser_mediator, registry_value, key_path):
"""Parses the UpdateKey value.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_value (dfwinreg.WinRegistryValue): Windows Registry value.
key_path (str): Windows Registry key path.
"""
if not registry_value.DataIsString():
parser_mediator.ProduceExtractionWarning('unsupported UpdateKey value data type: {0:s}'.format(registry_value.data_type_string))
return # depends on [control=['if'], data=[]]
date_time_string = registry_value.GetDataAsObject()
if not date_time_string:
parser_mediator.ProduceExtractionWarning('missing UpdateKey value data')
return # depends on [control=['if'], data=[]]
re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string)
if not re_match:
parser_mediator.ProduceExtractionWarning('unsupported UpdateKey value data: {0!s}'.format(date_time_string))
return # depends on [control=['if'], data=[]]
(month, day_of_month, year, hours, minutes, seconds, part_of_day) = re_match.groups()
try:
year = int(year, 10)
month = int(month, 10)
day_of_month = int(day_of_month, 10)
hours = int(hours, 10)
minutes = int(minutes, 10)
seconds = int(seconds, 10) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
parser_mediator.ProduceExtractionWarning('invalid UpdateKey date time value: {0!s}'.format(date_time_string))
return # depends on [control=['except'], data=[]]
if part_of_day == 'PM':
hours += 12 # depends on [control=['if'], data=[]]
time_elements_tuple = (year, month, day_of_month, hours, minutes, seconds)
try:
date_time = dfdatetime_time_elements.TimeElements(time_elements_tuple=time_elements_tuple)
date_time.is_local_time = True # depends on [control=['try'], data=[]]
except ValueError:
parser_mediator.ProduceExtractionWarning('invalid UpdateKey date time value: {0!s}'.format(time_elements_tuple))
return # depends on [control=['except'], data=[]]
event_data = CCleanerUpdateEventData()
event_data.key_path = key_path
event = time_events.DateTimeValuesEvent(date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone)
parser_mediator.ProduceEventWithEventData(event, event_data) |
def get_msg(self, block=True, timeout=None):
"Gets a message if there is one that is ready."
return self._in_queue.get(block, timeout) | def function[get_msg, parameter[self, block, timeout]]:
constant[Gets a message if there is one that is ready.]
return[call[name[self]._in_queue.get, parameter[name[block], name[timeout]]]] | keyword[def] identifier[get_msg] ( identifier[self] , identifier[block] = keyword[True] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_in_queue] . identifier[get] ( identifier[block] , identifier[timeout] ) | def get_msg(self, block=True, timeout=None):
"""Gets a message if there is one that is ready."""
return self._in_queue.get(block, timeout) |
def append(self, observation, action, reward, terminal, training=True):
"""Append a reward to the memory
# Argument
observation (dict): Observation returned by environment
action (int): Action taken to obtain this observation
reward (float): Reward obtained by taking this action
terminal (boolean): Is the state terminal
"""
super(EpisodeParameterMemory, self).append(observation, action, reward, terminal, training=training)
if training:
self.intermediate_rewards.append(reward) | def function[append, parameter[self, observation, action, reward, terminal, training]]:
constant[Append a reward to the memory
# Argument
observation (dict): Observation returned by environment
action (int): Action taken to obtain this observation
reward (float): Reward obtained by taking this action
terminal (boolean): Is the state terminal
]
call[call[name[super], parameter[name[EpisodeParameterMemory], name[self]]].append, parameter[name[observation], name[action], name[reward], name[terminal]]]
if name[training] begin[:]
call[name[self].intermediate_rewards.append, parameter[name[reward]]] | keyword[def] identifier[append] ( identifier[self] , identifier[observation] , identifier[action] , identifier[reward] , identifier[terminal] , identifier[training] = keyword[True] ):
literal[string]
identifier[super] ( identifier[EpisodeParameterMemory] , identifier[self] ). identifier[append] ( identifier[observation] , identifier[action] , identifier[reward] , identifier[terminal] , identifier[training] = identifier[training] )
keyword[if] identifier[training] :
identifier[self] . identifier[intermediate_rewards] . identifier[append] ( identifier[reward] ) | def append(self, observation, action, reward, terminal, training=True):
"""Append a reward to the memory
# Argument
observation (dict): Observation returned by environment
action (int): Action taken to obtain this observation
reward (float): Reward obtained by taking this action
terminal (boolean): Is the state terminal
"""
super(EpisodeParameterMemory, self).append(observation, action, reward, terminal, training=training)
if training:
self.intermediate_rewards.append(reward) # depends on [control=['if'], data=[]] |
def non_zero_row(arr):
"""
0. Empty row returns False.
>>> arr = array([])
>>> non_zero_row(arr)
False
1. Row with a zero returns False.
>>> arr = array([1, 4, 3, 0, 5, -1, -2])
>>> non_zero_row(arr)
False
2. Row with no zeros returns True.
>>> arr = array([-1, -0.1, 0.001, 2])
>>> non_zero_row(arr)
True
:param arr: array
:type arr: numpy array
:return empty: If row is completely free of zeros
:rtype empty: bool
"""
if len(arr) == 0:
return False
for item in arr:
if item == 0:
return False
return True | def function[non_zero_row, parameter[arr]]:
constant[
0. Empty row returns False.
>>> arr = array([])
>>> non_zero_row(arr)
False
1. Row with a zero returns False.
>>> arr = array([1, 4, 3, 0, 5, -1, -2])
>>> non_zero_row(arr)
False
2. Row with no zeros returns True.
>>> arr = array([-1, -0.1, 0.001, 2])
>>> non_zero_row(arr)
True
:param arr: array
:type arr: numpy array
:return empty: If row is completely free of zeros
:rtype empty: bool
]
if compare[call[name[len], parameter[name[arr]]] equal[==] constant[0]] begin[:]
return[constant[False]]
for taget[name[item]] in starred[name[arr]] begin[:]
if compare[name[item] equal[==] constant[0]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[non_zero_row] ( identifier[arr] ):
literal[string]
keyword[if] identifier[len] ( identifier[arr] )== literal[int] :
keyword[return] keyword[False]
keyword[for] identifier[item] keyword[in] identifier[arr] :
keyword[if] identifier[item] == literal[int] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def non_zero_row(arr):
"""
0. Empty row returns False.
>>> arr = array([])
>>> non_zero_row(arr)
False
1. Row with a zero returns False.
>>> arr = array([1, 4, 3, 0, 5, -1, -2])
>>> non_zero_row(arr)
False
2. Row with no zeros returns True.
>>> arr = array([-1, -0.1, 0.001, 2])
>>> non_zero_row(arr)
True
:param arr: array
:type arr: numpy array
:return empty: If row is completely free of zeros
:rtype empty: bool
"""
if len(arr) == 0:
return False # depends on [control=['if'], data=[]]
for item in arr:
if item == 0:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
return True |
def check_frequencies(
feed: "Feed", *, as_df: bool = False, include_warnings: bool = False
) -> List:
"""
Analog of :func:`check_agency` for ``feed.frequencies``.
"""
table = "frequencies"
problems = []
# Preliminary checks
if feed.frequencies is None:
return problems
f = feed.frequencies.copy()
problems = check_for_required_columns(problems, table, f)
if problems:
return format_problems(problems, as_df=as_df)
if include_warnings:
problems = check_for_invalid_columns(problems, table, f)
# Check trip_id
problems = check_column_linked_id(
problems, table, f, "trip_id", feed.trips
)
# Check start_time and end_time
time_cols = ["start_time", "end_time"]
for col in time_cols:
problems = check_column(problems, table, f, col, valid_time)
for col in time_cols:
f[col] = f[col].map(hp.timestr_to_seconds)
# Start_time should be earlier than end_time
cond = f["start_time"] >= f["end_time"]
problems = check_table(
problems, table, f, cond, "start_time not earlier than end_time"
)
# Headway periods should not overlap
f = f.sort_values(["trip_id", "start_time"])
for __, group in f.groupby("trip_id"):
a = group["start_time"].values
b = group["end_time"].values
indices = np.flatnonzero(a[1:] < b[:-1]).tolist()
if indices:
problems.append(
[
"error",
"Headway periods for the same trip overlap",
table,
indices,
]
)
# Check headway_secs
v = lambda x: x >= 0
problems = check_column(problems, table, f, "headway_secs", v)
# Check exact_times
v = lambda x: x in range(2)
problems = check_column(
problems, table, f, "exact_times", v, column_required=False
)
return format_problems(problems, as_df=as_df) | def function[check_frequencies, parameter[feed]]:
constant[
Analog of :func:`check_agency` for ``feed.frequencies``.
]
variable[table] assign[=] constant[frequencies]
variable[problems] assign[=] list[[]]
if compare[name[feed].frequencies is constant[None]] begin[:]
return[name[problems]]
variable[f] assign[=] call[name[feed].frequencies.copy, parameter[]]
variable[problems] assign[=] call[name[check_for_required_columns], parameter[name[problems], name[table], name[f]]]
if name[problems] begin[:]
return[call[name[format_problems], parameter[name[problems]]]]
if name[include_warnings] begin[:]
variable[problems] assign[=] call[name[check_for_invalid_columns], parameter[name[problems], name[table], name[f]]]
variable[problems] assign[=] call[name[check_column_linked_id], parameter[name[problems], name[table], name[f], constant[trip_id], name[feed].trips]]
variable[time_cols] assign[=] list[[<ast.Constant object at 0x7da1b0ca54e0>, <ast.Constant object at 0x7da1b0ca5d80>]]
for taget[name[col]] in starred[name[time_cols]] begin[:]
variable[problems] assign[=] call[name[check_column], parameter[name[problems], name[table], name[f], name[col], name[valid_time]]]
for taget[name[col]] in starred[name[time_cols]] begin[:]
call[name[f]][name[col]] assign[=] call[call[name[f]][name[col]].map, parameter[name[hp].timestr_to_seconds]]
variable[cond] assign[=] compare[call[name[f]][constant[start_time]] greater_or_equal[>=] call[name[f]][constant[end_time]]]
variable[problems] assign[=] call[name[check_table], parameter[name[problems], name[table], name[f], name[cond], constant[start_time not earlier than end_time]]]
variable[f] assign[=] call[name[f].sort_values, parameter[list[[<ast.Constant object at 0x7da1b0ca49a0>, <ast.Constant object at 0x7da1b0ca5d20>]]]]
for taget[tuple[[<ast.Name object at 0x7da1b0ca64d0>, <ast.Name object at 0x7da1b0ca47c0>]]] in starred[call[name[f].groupby, parameter[constant[trip_id]]]] begin[:]
variable[a] assign[=] call[name[group]][constant[start_time]].values
variable[b] assign[=] call[name[group]][constant[end_time]].values
variable[indices] assign[=] call[call[name[np].flatnonzero, parameter[compare[call[name[a]][<ast.Slice object at 0x7da1b0ca7580>] less[<] call[name[b]][<ast.Slice object at 0x7da1b0ca7df0>]]]].tolist, parameter[]]
if name[indices] begin[:]
call[name[problems].append, parameter[list[[<ast.Constant object at 0x7da1b0ca7040>, <ast.Constant object at 0x7da1b0ca6fb0>, <ast.Name object at 0x7da1b0ca4a60>, <ast.Name object at 0x7da1b0cb37c0>]]]]
variable[v] assign[=] <ast.Lambda object at 0x7da1b0cb07c0>
variable[problems] assign[=] call[name[check_column], parameter[name[problems], name[table], name[f], constant[headway_secs], name[v]]]
variable[v] assign[=] <ast.Lambda object at 0x7da1b0c8a590>
variable[problems] assign[=] call[name[check_column], parameter[name[problems], name[table], name[f], constant[exact_times], name[v]]]
return[call[name[format_problems], parameter[name[problems]]]] | keyword[def] identifier[check_frequencies] (
identifier[feed] : literal[string] ,*, identifier[as_df] : identifier[bool] = keyword[False] , identifier[include_warnings] : identifier[bool] = keyword[False]
)-> identifier[List] :
literal[string]
identifier[table] = literal[string]
identifier[problems] =[]
keyword[if] identifier[feed] . identifier[frequencies] keyword[is] keyword[None] :
keyword[return] identifier[problems]
identifier[f] = identifier[feed] . identifier[frequencies] . identifier[copy] ()
identifier[problems] = identifier[check_for_required_columns] ( identifier[problems] , identifier[table] , identifier[f] )
keyword[if] identifier[problems] :
keyword[return] identifier[format_problems] ( identifier[problems] , identifier[as_df] = identifier[as_df] )
keyword[if] identifier[include_warnings] :
identifier[problems] = identifier[check_for_invalid_columns] ( identifier[problems] , identifier[table] , identifier[f] )
identifier[problems] = identifier[check_column_linked_id] (
identifier[problems] , identifier[table] , identifier[f] , literal[string] , identifier[feed] . identifier[trips]
)
identifier[time_cols] =[ literal[string] , literal[string] ]
keyword[for] identifier[col] keyword[in] identifier[time_cols] :
identifier[problems] = identifier[check_column] ( identifier[problems] , identifier[table] , identifier[f] , identifier[col] , identifier[valid_time] )
keyword[for] identifier[col] keyword[in] identifier[time_cols] :
identifier[f] [ identifier[col] ]= identifier[f] [ identifier[col] ]. identifier[map] ( identifier[hp] . identifier[timestr_to_seconds] )
identifier[cond] = identifier[f] [ literal[string] ]>= identifier[f] [ literal[string] ]
identifier[problems] = identifier[check_table] (
identifier[problems] , identifier[table] , identifier[f] , identifier[cond] , literal[string]
)
identifier[f] = identifier[f] . identifier[sort_values] ([ literal[string] , literal[string] ])
keyword[for] identifier[__] , identifier[group] keyword[in] identifier[f] . identifier[groupby] ( literal[string] ):
identifier[a] = identifier[group] [ literal[string] ]. identifier[values]
identifier[b] = identifier[group] [ literal[string] ]. identifier[values]
identifier[indices] = identifier[np] . identifier[flatnonzero] ( identifier[a] [ literal[int] :]< identifier[b] [:- literal[int] ]). identifier[tolist] ()
keyword[if] identifier[indices] :
identifier[problems] . identifier[append] (
[
literal[string] ,
literal[string] ,
identifier[table] ,
identifier[indices] ,
]
)
identifier[v] = keyword[lambda] identifier[x] : identifier[x] >= literal[int]
identifier[problems] = identifier[check_column] ( identifier[problems] , identifier[table] , identifier[f] , literal[string] , identifier[v] )
identifier[v] = keyword[lambda] identifier[x] : identifier[x] keyword[in] identifier[range] ( literal[int] )
identifier[problems] = identifier[check_column] (
identifier[problems] , identifier[table] , identifier[f] , literal[string] , identifier[v] , identifier[column_required] = keyword[False]
)
keyword[return] identifier[format_problems] ( identifier[problems] , identifier[as_df] = identifier[as_df] ) | def check_frequencies(feed: 'Feed', *, as_df: bool=False, include_warnings: bool=False) -> List:
"""
Analog of :func:`check_agency` for ``feed.frequencies``.
"""
table = 'frequencies'
problems = []
# Preliminary checks
if feed.frequencies is None:
return problems # depends on [control=['if'], data=[]]
f = feed.frequencies.copy()
problems = check_for_required_columns(problems, table, f)
if problems:
return format_problems(problems, as_df=as_df) # depends on [control=['if'], data=[]]
if include_warnings:
problems = check_for_invalid_columns(problems, table, f) # depends on [control=['if'], data=[]]
# Check trip_id
problems = check_column_linked_id(problems, table, f, 'trip_id', feed.trips)
# Check start_time and end_time
time_cols = ['start_time', 'end_time']
for col in time_cols:
problems = check_column(problems, table, f, col, valid_time) # depends on [control=['for'], data=['col']]
for col in time_cols:
f[col] = f[col].map(hp.timestr_to_seconds) # depends on [control=['for'], data=['col']]
# Start_time should be earlier than end_time
cond = f['start_time'] >= f['end_time']
problems = check_table(problems, table, f, cond, 'start_time not earlier than end_time')
# Headway periods should not overlap
f = f.sort_values(['trip_id', 'start_time'])
for (__, group) in f.groupby('trip_id'):
a = group['start_time'].values
b = group['end_time'].values
indices = np.flatnonzero(a[1:] < b[:-1]).tolist()
if indices:
problems.append(['error', 'Headway periods for the same trip overlap', table, indices]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Check headway_secs
v = lambda x: x >= 0
problems = check_column(problems, table, f, 'headway_secs', v)
# Check exact_times
v = lambda x: x in range(2)
problems = check_column(problems, table, f, 'exact_times', v, column_required=False)
return format_problems(problems, as_df=as_df) |
def wait_connected(self, conns=None, timeout=None):
'''Wait for connections to be made and their handshakes to finish
:param conns:
a single or list of (host, port) tuples with the connections that
must be finished before the method will return. defaults to all the
peers the :class:`Hub` was instantiated with.
:param timeout:
maximum time to wait in seconds. with None, there is no timeout.
:type timeout: float or None
:returns:
``True`` if all connections were made, ``False`` one or more
failed.
'''
if timeout:
deadline = time.time() + timeout
conns = conns or self._started_peers.keys()
if not hasattr(conns, "__iter__"):
conns = [conns]
for peer_addr in conns:
remaining = max(0, deadline - time.time()) if timeout else None
if not self._started_peers[peer_addr].wait_connected(remaining):
if timeout:
log.warn("connect wait timed out after %.2f seconds" %
timeout)
return False
return True | def function[wait_connected, parameter[self, conns, timeout]]:
constant[Wait for connections to be made and their handshakes to finish
:param conns:
a single or list of (host, port) tuples with the connections that
must be finished before the method will return. defaults to all the
peers the :class:`Hub` was instantiated with.
:param timeout:
maximum time to wait in seconds. with None, there is no timeout.
:type timeout: float or None
:returns:
``True`` if all connections were made, ``False`` one or more
failed.
]
if name[timeout] begin[:]
variable[deadline] assign[=] binary_operation[call[name[time].time, parameter[]] + name[timeout]]
variable[conns] assign[=] <ast.BoolOp object at 0x7da2044c3460>
if <ast.UnaryOp object at 0x7da2044c3d00> begin[:]
variable[conns] assign[=] list[[<ast.Name object at 0x7da2044c22c0>]]
for taget[name[peer_addr]] in starred[name[conns]] begin[:]
variable[remaining] assign[=] <ast.IfExp object at 0x7da2044c2350>
if <ast.UnaryOp object at 0x7da2044c01f0> begin[:]
if name[timeout] begin[:]
call[name[log].warn, parameter[binary_operation[constant[connect wait timed out after %.2f seconds] <ast.Mod object at 0x7da2590d6920> name[timeout]]]]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[wait_connected] ( identifier[self] , identifier[conns] = keyword[None] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] identifier[timeout] :
identifier[deadline] = identifier[time] . identifier[time] ()+ identifier[timeout]
identifier[conns] = identifier[conns] keyword[or] identifier[self] . identifier[_started_peers] . identifier[keys] ()
keyword[if] keyword[not] identifier[hasattr] ( identifier[conns] , literal[string] ):
identifier[conns] =[ identifier[conns] ]
keyword[for] identifier[peer_addr] keyword[in] identifier[conns] :
identifier[remaining] = identifier[max] ( literal[int] , identifier[deadline] - identifier[time] . identifier[time] ()) keyword[if] identifier[timeout] keyword[else] keyword[None]
keyword[if] keyword[not] identifier[self] . identifier[_started_peers] [ identifier[peer_addr] ]. identifier[wait_connected] ( identifier[remaining] ):
keyword[if] identifier[timeout] :
identifier[log] . identifier[warn] ( literal[string] %
identifier[timeout] )
keyword[return] keyword[False]
keyword[return] keyword[True] | def wait_connected(self, conns=None, timeout=None):
"""Wait for connections to be made and their handshakes to finish
:param conns:
a single or list of (host, port) tuples with the connections that
must be finished before the method will return. defaults to all the
peers the :class:`Hub` was instantiated with.
:param timeout:
maximum time to wait in seconds. with None, there is no timeout.
:type timeout: float or None
:returns:
``True`` if all connections were made, ``False`` one or more
failed.
"""
if timeout:
deadline = time.time() + timeout # depends on [control=['if'], data=[]]
conns = conns or self._started_peers.keys()
if not hasattr(conns, '__iter__'):
conns = [conns] # depends on [control=['if'], data=[]]
for peer_addr in conns:
remaining = max(0, deadline - time.time()) if timeout else None
if not self._started_peers[peer_addr].wait_connected(remaining):
if timeout:
log.warn('connect wait timed out after %.2f seconds' % timeout) # depends on [control=['if'], data=[]]
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['peer_addr']]
return True |
def with_exit_condition(self, exit_condition: Optional[bool]=True) -> 'MonitorTask':
"""
Sets the flag indicating that the task should also run after the optimisation is ended.
"""
self._exit_condition = exit_condition
return self | def function[with_exit_condition, parameter[self, exit_condition]]:
constant[
Sets the flag indicating that the task should also run after the optimisation is ended.
]
name[self]._exit_condition assign[=] name[exit_condition]
return[name[self]] | keyword[def] identifier[with_exit_condition] ( identifier[self] , identifier[exit_condition] : identifier[Optional] [ identifier[bool] ]= keyword[True] )-> literal[string] :
literal[string]
identifier[self] . identifier[_exit_condition] = identifier[exit_condition]
keyword[return] identifier[self] | def with_exit_condition(self, exit_condition: Optional[bool]=True) -> 'MonitorTask':
"""
Sets the flag indicating that the task should also run after the optimisation is ended.
"""
self._exit_condition = exit_condition
return self |
def parse_gene(gene_info):
"""Parse a gene line with information from a panel file
Args:
gene_info(dict): dictionary with gene info
Returns:
gene(dict): A dictionary with the gene information
{
'hgnc_id': int,
'hgnc_symbol': str,
'disease_associated_transcripts': list(str),
'inheritance_models': list(str),
'mosaicism': bool,
'reduced_penetrance': bool,
'database_entry_version': str,
}
"""
gene = {}
# This is either hgnc id or hgnc symbol
identifier = None
hgnc_id = None
try:
if 'hgnc_id' in gene_info:
hgnc_id = int(gene_info['hgnc_id'])
elif 'hgnc_idnumber' in gene_info:
hgnc_id = int(gene_info['hgnc_idnumber'])
elif 'hgncid' in gene_info:
hgnc_id = int(gene_info['hgncid'])
except ValueError as e:
raise SyntaxError("Invalid hgnc id: {0}".format(hgnc_id))
gene['hgnc_id'] = hgnc_id
identifier = hgnc_id
hgnc_symbol = None
if 'hgnc_symbol' in gene_info:
hgnc_symbol = gene_info['hgnc_symbol']
elif 'hgncsymbol' in gene_info:
hgnc_symbol = gene_info['hgncsymbol']
elif 'symbol' in gene_info:
hgnc_symbol = gene_info['symbol']
gene['hgnc_symbol'] = hgnc_symbol
if not identifier:
if hgnc_symbol:
identifier = hgnc_symbol
else:
raise SyntaxError("No gene identifier could be found")
gene['identifier'] = identifier
# Disease associated transcripts is a ','-separated list of
# manually curated transcripts
transcripts = ""
if 'disease_associated_transcripts' in gene_info:
transcripts = gene_info['disease_associated_transcripts']
elif 'disease_associated_transcript' in gene_info:
transcripts = gene_info['disease_associated_transcript']
elif 'transcripts' in gene_info:
transcripts = gene_info['transcripts']
gene['transcripts'] = [
transcript.strip() for transcript in
transcripts.split(',') if transcript
]
# Genetic disease models is a ','-separated list of manually curated
# inheritance patterns that are followed for a gene
models = ""
if 'genetic_disease_models' in gene_info:
models = gene_info['genetic_disease_models']
elif 'genetic_disease_model' in gene_info:
models = gene_info['genetic_disease_model']
elif 'inheritance_models' in gene_info:
models = gene_info['inheritance_models']
elif 'genetic_inheritance_models' in gene_info:
models = gene_info['genetic_inheritance_models']
gene['inheritance_models'] = [
model.strip() for model in models.split(',')
if model.strip() in VALID_MODELS
]
# If a gene is known to be associated with mosaicism this is annotated
gene['mosaicism'] = True if gene_info.get('mosaicism') else False
# If a gene is known to have reduced penetrance this is annotated
gene['reduced_penetrance'] = True if gene_info.get('reduced_penetrance') else False
# The database entry version is a way to track when a a gene was added or
# modified, optional
gene['database_entry_version'] = gene_info.get('database_entry_version')
return gene | def function[parse_gene, parameter[gene_info]]:
constant[Parse a gene line with information from a panel file
Args:
gene_info(dict): dictionary with gene info
Returns:
gene(dict): A dictionary with the gene information
{
'hgnc_id': int,
'hgnc_symbol': str,
'disease_associated_transcripts': list(str),
'inheritance_models': list(str),
'mosaicism': bool,
'reduced_penetrance': bool,
'database_entry_version': str,
}
]
variable[gene] assign[=] dictionary[[], []]
variable[identifier] assign[=] constant[None]
variable[hgnc_id] assign[=] constant[None]
<ast.Try object at 0x7da18eb56ad0>
call[name[gene]][constant[hgnc_id]] assign[=] name[hgnc_id]
variable[identifier] assign[=] name[hgnc_id]
variable[hgnc_symbol] assign[=] constant[None]
if compare[constant[hgnc_symbol] in name[gene_info]] begin[:]
variable[hgnc_symbol] assign[=] call[name[gene_info]][constant[hgnc_symbol]]
call[name[gene]][constant[hgnc_symbol]] assign[=] name[hgnc_symbol]
if <ast.UnaryOp object at 0x7da18eb56830> begin[:]
if name[hgnc_symbol] begin[:]
variable[identifier] assign[=] name[hgnc_symbol]
call[name[gene]][constant[identifier]] assign[=] name[identifier]
variable[transcripts] assign[=] constant[]
if compare[constant[disease_associated_transcripts] in name[gene_info]] begin[:]
variable[transcripts] assign[=] call[name[gene_info]][constant[disease_associated_transcripts]]
call[name[gene]][constant[transcripts]] assign[=] <ast.ListComp object at 0x7da20e74b070>
variable[models] assign[=] constant[]
if compare[constant[genetic_disease_models] in name[gene_info]] begin[:]
variable[models] assign[=] call[name[gene_info]][constant[genetic_disease_models]]
call[name[gene]][constant[inheritance_models]] assign[=] <ast.ListComp object at 0x7da18f58cc40>
call[name[gene]][constant[mosaicism]] assign[=] <ast.IfExp object at 0x7da18f58e1a0>
call[name[gene]][constant[reduced_penetrance]] assign[=] <ast.IfExp object at 0x7da18f58f460>
call[name[gene]][constant[database_entry_version]] assign[=] call[name[gene_info].get, parameter[constant[database_entry_version]]]
return[name[gene]] | keyword[def] identifier[parse_gene] ( identifier[gene_info] ):
literal[string]
identifier[gene] ={}
identifier[identifier] = keyword[None]
identifier[hgnc_id] = keyword[None]
keyword[try] :
keyword[if] literal[string] keyword[in] identifier[gene_info] :
identifier[hgnc_id] = identifier[int] ( identifier[gene_info] [ literal[string] ])
keyword[elif] literal[string] keyword[in] identifier[gene_info] :
identifier[hgnc_id] = identifier[int] ( identifier[gene_info] [ literal[string] ])
keyword[elif] literal[string] keyword[in] identifier[gene_info] :
identifier[hgnc_id] = identifier[int] ( identifier[gene_info] [ literal[string] ])
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[raise] identifier[SyntaxError] ( literal[string] . identifier[format] ( identifier[hgnc_id] ))
identifier[gene] [ literal[string] ]= identifier[hgnc_id]
identifier[identifier] = identifier[hgnc_id]
identifier[hgnc_symbol] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[gene_info] :
identifier[hgnc_symbol] = identifier[gene_info] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[gene_info] :
identifier[hgnc_symbol] = identifier[gene_info] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[gene_info] :
identifier[hgnc_symbol] = identifier[gene_info] [ literal[string] ]
identifier[gene] [ literal[string] ]= identifier[hgnc_symbol]
keyword[if] keyword[not] identifier[identifier] :
keyword[if] identifier[hgnc_symbol] :
identifier[identifier] = identifier[hgnc_symbol]
keyword[else] :
keyword[raise] identifier[SyntaxError] ( literal[string] )
identifier[gene] [ literal[string] ]= identifier[identifier]
identifier[transcripts] = literal[string]
keyword[if] literal[string] keyword[in] identifier[gene_info] :
identifier[transcripts] = identifier[gene_info] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[gene_info] :
identifier[transcripts] = identifier[gene_info] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[gene_info] :
identifier[transcripts] = identifier[gene_info] [ literal[string] ]
identifier[gene] [ literal[string] ]=[
identifier[transcript] . identifier[strip] () keyword[for] identifier[transcript] keyword[in]
identifier[transcripts] . identifier[split] ( literal[string] ) keyword[if] identifier[transcript]
]
identifier[models] = literal[string]
keyword[if] literal[string] keyword[in] identifier[gene_info] :
identifier[models] = identifier[gene_info] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[gene_info] :
identifier[models] = identifier[gene_info] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[gene_info] :
identifier[models] = identifier[gene_info] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[gene_info] :
identifier[models] = identifier[gene_info] [ literal[string] ]
identifier[gene] [ literal[string] ]=[
identifier[model] . identifier[strip] () keyword[for] identifier[model] keyword[in] identifier[models] . identifier[split] ( literal[string] )
keyword[if] identifier[model] . identifier[strip] () keyword[in] identifier[VALID_MODELS]
]
identifier[gene] [ literal[string] ]= keyword[True] keyword[if] identifier[gene_info] . identifier[get] ( literal[string] ) keyword[else] keyword[False]
identifier[gene] [ literal[string] ]= keyword[True] keyword[if] identifier[gene_info] . identifier[get] ( literal[string] ) keyword[else] keyword[False]
identifier[gene] [ literal[string] ]= identifier[gene_info] . identifier[get] ( literal[string] )
keyword[return] identifier[gene] | def parse_gene(gene_info):
"""Parse a gene line with information from a panel file
Args:
gene_info(dict): dictionary with gene info
Returns:
gene(dict): A dictionary with the gene information
{
'hgnc_id': int,
'hgnc_symbol': str,
'disease_associated_transcripts': list(str),
'inheritance_models': list(str),
'mosaicism': bool,
'reduced_penetrance': bool,
'database_entry_version': str,
}
"""
gene = {}
# This is either hgnc id or hgnc symbol
identifier = None
hgnc_id = None
try:
if 'hgnc_id' in gene_info:
hgnc_id = int(gene_info['hgnc_id']) # depends on [control=['if'], data=['gene_info']]
elif 'hgnc_idnumber' in gene_info:
hgnc_id = int(gene_info['hgnc_idnumber']) # depends on [control=['if'], data=['gene_info']]
elif 'hgncid' in gene_info:
hgnc_id = int(gene_info['hgncid']) # depends on [control=['if'], data=['gene_info']] # depends on [control=['try'], data=[]]
except ValueError as e:
raise SyntaxError('Invalid hgnc id: {0}'.format(hgnc_id)) # depends on [control=['except'], data=[]]
gene['hgnc_id'] = hgnc_id
identifier = hgnc_id
hgnc_symbol = None
if 'hgnc_symbol' in gene_info:
hgnc_symbol = gene_info['hgnc_symbol'] # depends on [control=['if'], data=['gene_info']]
elif 'hgncsymbol' in gene_info:
hgnc_symbol = gene_info['hgncsymbol'] # depends on [control=['if'], data=['gene_info']]
elif 'symbol' in gene_info:
hgnc_symbol = gene_info['symbol'] # depends on [control=['if'], data=['gene_info']]
gene['hgnc_symbol'] = hgnc_symbol
if not identifier:
if hgnc_symbol:
identifier = hgnc_symbol # depends on [control=['if'], data=[]]
else:
raise SyntaxError('No gene identifier could be found') # depends on [control=['if'], data=[]]
gene['identifier'] = identifier
# Disease associated transcripts is a ','-separated list of
# manually curated transcripts
transcripts = ''
if 'disease_associated_transcripts' in gene_info:
transcripts = gene_info['disease_associated_transcripts'] # depends on [control=['if'], data=['gene_info']]
elif 'disease_associated_transcript' in gene_info:
transcripts = gene_info['disease_associated_transcript'] # depends on [control=['if'], data=['gene_info']]
elif 'transcripts' in gene_info:
transcripts = gene_info['transcripts'] # depends on [control=['if'], data=['gene_info']]
gene['transcripts'] = [transcript.strip() for transcript in transcripts.split(',') if transcript]
# Genetic disease models is a ','-separated list of manually curated
# inheritance patterns that are followed for a gene
models = ''
if 'genetic_disease_models' in gene_info:
models = gene_info['genetic_disease_models'] # depends on [control=['if'], data=['gene_info']]
elif 'genetic_disease_model' in gene_info:
models = gene_info['genetic_disease_model'] # depends on [control=['if'], data=['gene_info']]
elif 'inheritance_models' in gene_info:
models = gene_info['inheritance_models'] # depends on [control=['if'], data=['gene_info']]
elif 'genetic_inheritance_models' in gene_info:
models = gene_info['genetic_inheritance_models'] # depends on [control=['if'], data=['gene_info']]
gene['inheritance_models'] = [model.strip() for model in models.split(',') if model.strip() in VALID_MODELS]
# If a gene is known to be associated with mosaicism this is annotated
gene['mosaicism'] = True if gene_info.get('mosaicism') else False
# If a gene is known to have reduced penetrance this is annotated
gene['reduced_penetrance'] = True if gene_info.get('reduced_penetrance') else False
# The database entry version is a way to track when a a gene was added or
# modified, optional
gene['database_entry_version'] = gene_info.get('database_entry_version')
return gene |
def get_create_option(self, context, q):
"""Form the correct create_option to append to results."""
create_option = []
display_create_option = False
if self.create_field and q:
page_obj = context.get('page_obj', None)
if page_obj is None or page_obj.number == 1:
display_create_option = True
if display_create_option and self.has_add_permission(self.request):
'''
Generate querysets of Locations, StaffMembers, and Users that
match the query string.
'''
for s in Location.objects.filter(
Q(
Q(name__istartswith=q) & Q(transactionparty__isnull=True)
)
):
create_option += [{
'id': 'Location_%s' % s.id,
'text': _('Generate from location "%(location)s"') % {'location': s.name},
'create_id': True,
}]
for s in StaffMember.objects.filter(
Q(
(Q(firstName__istartswith=q) | Q(lastName__istartswith=q)) &
Q(transactionparty__isnull=True)
)
):
create_option += [{
'id': 'StaffMember_%s' % s.id,
'text': _('Generate from staff member "%(staff_member)s"') % {'staff_member': s.fullName},
'create_id': True,
}]
for s in User.objects.filter(
Q(
(Q(first_name__istartswith=q) | Q(last_name__istartswith=q)) &
Q(staffmember__isnull=True) & Q(transactionparty__isnull=True)
)
):
create_option += [{
'id': 'User_%s' % s.id,
'text': _('Generate from user "%(user)s"') % {'user': s.get_full_name()},
'create_id': True,
}]
# Finally, allow creation from a name only.
create_option += [{
'id': q,
'text': _('Create "%(new_value)s"') % {'new_value': q},
'create_id': True,
}]
return create_option | def function[get_create_option, parameter[self, context, q]]:
constant[Form the correct create_option to append to results.]
variable[create_option] assign[=] list[[]]
variable[display_create_option] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b1306050> begin[:]
variable[page_obj] assign[=] call[name[context].get, parameter[constant[page_obj], constant[None]]]
if <ast.BoolOp object at 0x7da1b1306590> begin[:]
variable[display_create_option] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b1305330> begin[:]
constant[
Generate querysets of Locations, StaffMembers, and Users that
match the query string.
]
for taget[name[s]] in starred[call[name[Location].objects.filter, parameter[call[name[Q], parameter[binary_operation[call[name[Q], parameter[]] <ast.BitAnd object at 0x7da2590d6b60> call[name[Q], parameter[]]]]]]]] begin[:]
<ast.AugAssign object at 0x7da1b1232dd0>
for taget[name[s]] in starred[call[name[StaffMember].objects.filter, parameter[call[name[Q], parameter[binary_operation[binary_operation[call[name[Q], parameter[]] <ast.BitOr object at 0x7da2590d6aa0> call[name[Q], parameter[]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[Q], parameter[]]]]]]]] begin[:]
<ast.AugAssign object at 0x7da1b13a7160>
for taget[name[s]] in starred[call[name[User].objects.filter, parameter[call[name[Q], parameter[binary_operation[binary_operation[binary_operation[call[name[Q], parameter[]] <ast.BitOr object at 0x7da2590d6aa0> call[name[Q], parameter[]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[Q], parameter[]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[Q], parameter[]]]]]]]] begin[:]
<ast.AugAssign object at 0x7da1b13a6500>
<ast.AugAssign object at 0x7da1b13a71f0>
return[name[create_option]] | keyword[def] identifier[get_create_option] ( identifier[self] , identifier[context] , identifier[q] ):
literal[string]
identifier[create_option] =[]
identifier[display_create_option] = keyword[False]
keyword[if] identifier[self] . identifier[create_field] keyword[and] identifier[q] :
identifier[page_obj] = identifier[context] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[page_obj] keyword[is] keyword[None] keyword[or] identifier[page_obj] . identifier[number] == literal[int] :
identifier[display_create_option] = keyword[True]
keyword[if] identifier[display_create_option] keyword[and] identifier[self] . identifier[has_add_permission] ( identifier[self] . identifier[request] ):
literal[string]
keyword[for] identifier[s] keyword[in] identifier[Location] . identifier[objects] . identifier[filter] (
identifier[Q] (
identifier[Q] ( identifier[name__istartswith] = identifier[q] )& identifier[Q] ( identifier[transactionparty__isnull] = keyword[True] )
)
):
identifier[create_option] +=[{
literal[string] : literal[string] % identifier[s] . identifier[id] ,
literal[string] : identifier[_] ( literal[string] )%{ literal[string] : identifier[s] . identifier[name] },
literal[string] : keyword[True] ,
}]
keyword[for] identifier[s] keyword[in] identifier[StaffMember] . identifier[objects] . identifier[filter] (
identifier[Q] (
( identifier[Q] ( identifier[firstName__istartswith] = identifier[q] )| identifier[Q] ( identifier[lastName__istartswith] = identifier[q] ))&
identifier[Q] ( identifier[transactionparty__isnull] = keyword[True] )
)
):
identifier[create_option] +=[{
literal[string] : literal[string] % identifier[s] . identifier[id] ,
literal[string] : identifier[_] ( literal[string] )%{ literal[string] : identifier[s] . identifier[fullName] },
literal[string] : keyword[True] ,
}]
keyword[for] identifier[s] keyword[in] identifier[User] . identifier[objects] . identifier[filter] (
identifier[Q] (
( identifier[Q] ( identifier[first_name__istartswith] = identifier[q] )| identifier[Q] ( identifier[last_name__istartswith] = identifier[q] ))&
identifier[Q] ( identifier[staffmember__isnull] = keyword[True] )& identifier[Q] ( identifier[transactionparty__isnull] = keyword[True] )
)
):
identifier[create_option] +=[{
literal[string] : literal[string] % identifier[s] . identifier[id] ,
literal[string] : identifier[_] ( literal[string] )%{ literal[string] : identifier[s] . identifier[get_full_name] ()},
literal[string] : keyword[True] ,
}]
identifier[create_option] +=[{
literal[string] : identifier[q] ,
literal[string] : identifier[_] ( literal[string] )%{ literal[string] : identifier[q] },
literal[string] : keyword[True] ,
}]
keyword[return] identifier[create_option] | def get_create_option(self, context, q):
"""Form the correct create_option to append to results."""
create_option = []
display_create_option = False
if self.create_field and q:
page_obj = context.get('page_obj', None)
if page_obj is None or page_obj.number == 1:
display_create_option = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if display_create_option and self.has_add_permission(self.request):
'\n Generate querysets of Locations, StaffMembers, and Users that \n match the query string.\n '
for s in Location.objects.filter(Q(Q(name__istartswith=q) & Q(transactionparty__isnull=True))):
create_option += [{'id': 'Location_%s' % s.id, 'text': _('Generate from location "%(location)s"') % {'location': s.name}, 'create_id': True}] # depends on [control=['for'], data=['s']]
for s in StaffMember.objects.filter(Q((Q(firstName__istartswith=q) | Q(lastName__istartswith=q)) & Q(transactionparty__isnull=True))):
create_option += [{'id': 'StaffMember_%s' % s.id, 'text': _('Generate from staff member "%(staff_member)s"') % {'staff_member': s.fullName}, 'create_id': True}] # depends on [control=['for'], data=['s']]
for s in User.objects.filter(Q((Q(first_name__istartswith=q) | Q(last_name__istartswith=q)) & Q(staffmember__isnull=True) & Q(transactionparty__isnull=True))):
create_option += [{'id': 'User_%s' % s.id, 'text': _('Generate from user "%(user)s"') % {'user': s.get_full_name()}, 'create_id': True}] # depends on [control=['for'], data=['s']] # Finally, allow creation from a name only.
create_option += [{'id': q, 'text': _('Create "%(new_value)s"') % {'new_value': q}, 'create_id': True}] # depends on [control=['if'], data=[]]
return create_option |
def last_git_release_tag(git_tags: str) -> str:
"""
:git_tags: chronos.helpers.git_tags() function output.
Returns the latest Git tag ending with a SemVer as a string.
"""
semver_re = re.compile(r'[0-9]+\.[0-9]+\.[0-9]+$')
str_ver = []
for i in git_tags.split():
if semver_re.search(i):
str_ver.append(i)
try:
return str_ver[0]
except IndexError:
raise NoGitTagsException | def function[last_git_release_tag, parameter[git_tags]]:
constant[
:git_tags: chronos.helpers.git_tags() function output.
Returns the latest Git tag ending with a SemVer as a string.
]
variable[semver_re] assign[=] call[name[re].compile, parameter[constant[[0-9]+\.[0-9]+\.[0-9]+$]]]
variable[str_ver] assign[=] list[[]]
for taget[name[i]] in starred[call[name[git_tags].split, parameter[]]] begin[:]
if call[name[semver_re].search, parameter[name[i]]] begin[:]
call[name[str_ver].append, parameter[name[i]]]
<ast.Try object at 0x7da1b1175210> | keyword[def] identifier[last_git_release_tag] ( identifier[git_tags] : identifier[str] )-> identifier[str] :
literal[string]
identifier[semver_re] = identifier[re] . identifier[compile] ( literal[string] )
identifier[str_ver] =[]
keyword[for] identifier[i] keyword[in] identifier[git_tags] . identifier[split] ():
keyword[if] identifier[semver_re] . identifier[search] ( identifier[i] ):
identifier[str_ver] . identifier[append] ( identifier[i] )
keyword[try] :
keyword[return] identifier[str_ver] [ literal[int] ]
keyword[except] identifier[IndexError] :
keyword[raise] identifier[NoGitTagsException] | def last_git_release_tag(git_tags: str) -> str:
"""
:git_tags: chronos.helpers.git_tags() function output.
Returns the latest Git tag ending with a SemVer as a string.
"""
semver_re = re.compile('[0-9]+\\.[0-9]+\\.[0-9]+$')
str_ver = []
for i in git_tags.split():
if semver_re.search(i):
str_ver.append(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
try:
return str_ver[0] # depends on [control=['try'], data=[]]
except IndexError:
raise NoGitTagsException # depends on [control=['except'], data=[]] |
def run(self, event, lambda_context):
"""Run policy in push mode against given event.
Lambda automatically generates cloud watch logs, and metrics
for us, albeit with some deficienies, metrics no longer count
against valid resources matches, but against execution.
If metrics execution option is enabled, custodian will generate
metrics per normal.
"""
from c7n.actions import EventAction
mode = self.policy.data.get('mode', {})
if not bool(mode.get("log", True)):
root = logging.getLogger()
map(root.removeHandler, root.handlers[:])
root.handlers = [logging.NullHandler()]
resources = self.resolve_resources(event)
if not resources:
return resources
resources = self.policy.resource_manager.filter_resources(
resources, event)
if 'debug' in event:
self.policy.log.info("Filtered resources %d" % len(resources))
if not resources:
self.policy.log.info(
"policy: %s resources: %s no resources matched" % (
self.policy.name, self.policy.resource_type))
return
with self.policy.ctx:
self.policy.ctx.metrics.put_metric(
'ResourceCount', len(resources), 'Count', Scope="Policy",
buffer=False)
if 'debug' in event:
self.policy.log.info(
"Invoking actions %s", self.policy.resource_manager.actions)
self.policy._write_file(
'resources.json', utils.dumps(resources, indent=2))
for action in self.policy.resource_manager.actions:
self.policy.log.info(
"policy: %s invoking action: %s resources: %d",
self.policy.name, action.name, len(resources))
if isinstance(action, EventAction):
results = action.process(resources, event)
else:
results = action.process(resources)
self.policy._write_file(
"action-%s" % action.name, utils.dumps(results))
return resources | def function[run, parameter[self, event, lambda_context]]:
constant[Run policy in push mode against given event.
Lambda automatically generates cloud watch logs, and metrics
for us, albeit with some deficienies, metrics no longer count
against valid resources matches, but against execution.
If metrics execution option is enabled, custodian will generate
metrics per normal.
]
from relative_module[c7n.actions] import module[EventAction]
variable[mode] assign[=] call[name[self].policy.data.get, parameter[constant[mode], dictionary[[], []]]]
if <ast.UnaryOp object at 0x7da2054a7a30> begin[:]
variable[root] assign[=] call[name[logging].getLogger, parameter[]]
call[name[map], parameter[name[root].removeHandler, call[name[root].handlers][<ast.Slice object at 0x7da2054a5570>]]]
name[root].handlers assign[=] list[[<ast.Call object at 0x7da2054a7a00>]]
variable[resources] assign[=] call[name[self].resolve_resources, parameter[name[event]]]
if <ast.UnaryOp object at 0x7da2054a7a60> begin[:]
return[name[resources]]
variable[resources] assign[=] call[name[self].policy.resource_manager.filter_resources, parameter[name[resources], name[event]]]
if compare[constant[debug] in name[event]] begin[:]
call[name[self].policy.log.info, parameter[binary_operation[constant[Filtered resources %d] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[resources]]]]]]
if <ast.UnaryOp object at 0x7da1b2098c40> begin[:]
call[name[self].policy.log.info, parameter[binary_operation[constant[policy: %s resources: %s no resources matched] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b209a020>, <ast.Attribute object at 0x7da1b2098370>]]]]]
return[None]
with name[self].policy.ctx begin[:]
call[name[self].policy.ctx.metrics.put_metric, parameter[constant[ResourceCount], call[name[len], parameter[name[resources]]], constant[Count]]]
if compare[constant[debug] in name[event]] begin[:]
call[name[self].policy.log.info, parameter[constant[Invoking actions %s], name[self].policy.resource_manager.actions]]
call[name[self].policy._write_file, parameter[constant[resources.json], call[name[utils].dumps, parameter[name[resources]]]]]
for taget[name[action]] in starred[name[self].policy.resource_manager.actions] begin[:]
call[name[self].policy.log.info, parameter[constant[policy: %s invoking action: %s resources: %d], name[self].policy.name, name[action].name, call[name[len], parameter[name[resources]]]]]
if call[name[isinstance], parameter[name[action], name[EventAction]]] begin[:]
variable[results] assign[=] call[name[action].process, parameter[name[resources], name[event]]]
call[name[self].policy._write_file, parameter[binary_operation[constant[action-%s] <ast.Mod object at 0x7da2590d6920> name[action].name], call[name[utils].dumps, parameter[name[results]]]]]
return[name[resources]] | keyword[def] identifier[run] ( identifier[self] , identifier[event] , identifier[lambda_context] ):
literal[string]
keyword[from] identifier[c7n] . identifier[actions] keyword[import] identifier[EventAction]
identifier[mode] = identifier[self] . identifier[policy] . identifier[data] . identifier[get] ( literal[string] ,{})
keyword[if] keyword[not] identifier[bool] ( identifier[mode] . identifier[get] ( literal[string] , keyword[True] )):
identifier[root] = identifier[logging] . identifier[getLogger] ()
identifier[map] ( identifier[root] . identifier[removeHandler] , identifier[root] . identifier[handlers] [:])
identifier[root] . identifier[handlers] =[ identifier[logging] . identifier[NullHandler] ()]
identifier[resources] = identifier[self] . identifier[resolve_resources] ( identifier[event] )
keyword[if] keyword[not] identifier[resources] :
keyword[return] identifier[resources]
identifier[resources] = identifier[self] . identifier[policy] . identifier[resource_manager] . identifier[filter_resources] (
identifier[resources] , identifier[event] )
keyword[if] literal[string] keyword[in] identifier[event] :
identifier[self] . identifier[policy] . identifier[log] . identifier[info] ( literal[string] % identifier[len] ( identifier[resources] ))
keyword[if] keyword[not] identifier[resources] :
identifier[self] . identifier[policy] . identifier[log] . identifier[info] (
literal[string] %(
identifier[self] . identifier[policy] . identifier[name] , identifier[self] . identifier[policy] . identifier[resource_type] ))
keyword[return]
keyword[with] identifier[self] . identifier[policy] . identifier[ctx] :
identifier[self] . identifier[policy] . identifier[ctx] . identifier[metrics] . identifier[put_metric] (
literal[string] , identifier[len] ( identifier[resources] ), literal[string] , identifier[Scope] = literal[string] ,
identifier[buffer] = keyword[False] )
keyword[if] literal[string] keyword[in] identifier[event] :
identifier[self] . identifier[policy] . identifier[log] . identifier[info] (
literal[string] , identifier[self] . identifier[policy] . identifier[resource_manager] . identifier[actions] )
identifier[self] . identifier[policy] . identifier[_write_file] (
literal[string] , identifier[utils] . identifier[dumps] ( identifier[resources] , identifier[indent] = literal[int] ))
keyword[for] identifier[action] keyword[in] identifier[self] . identifier[policy] . identifier[resource_manager] . identifier[actions] :
identifier[self] . identifier[policy] . identifier[log] . identifier[info] (
literal[string] ,
identifier[self] . identifier[policy] . identifier[name] , identifier[action] . identifier[name] , identifier[len] ( identifier[resources] ))
keyword[if] identifier[isinstance] ( identifier[action] , identifier[EventAction] ):
identifier[results] = identifier[action] . identifier[process] ( identifier[resources] , identifier[event] )
keyword[else] :
identifier[results] = identifier[action] . identifier[process] ( identifier[resources] )
identifier[self] . identifier[policy] . identifier[_write_file] (
literal[string] % identifier[action] . identifier[name] , identifier[utils] . identifier[dumps] ( identifier[results] ))
keyword[return] identifier[resources] | def run(self, event, lambda_context):
"""Run policy in push mode against given event.
Lambda automatically generates cloud watch logs, and metrics
for us, albeit with some deficienies, metrics no longer count
against valid resources matches, but against execution.
If metrics execution option is enabled, custodian will generate
metrics per normal.
"""
from c7n.actions import EventAction
mode = self.policy.data.get('mode', {})
if not bool(mode.get('log', True)):
root = logging.getLogger()
map(root.removeHandler, root.handlers[:])
root.handlers = [logging.NullHandler()] # depends on [control=['if'], data=[]]
resources = self.resolve_resources(event)
if not resources:
return resources # depends on [control=['if'], data=[]]
resources = self.policy.resource_manager.filter_resources(resources, event)
if 'debug' in event:
self.policy.log.info('Filtered resources %d' % len(resources)) # depends on [control=['if'], data=[]]
if not resources:
self.policy.log.info('policy: %s resources: %s no resources matched' % (self.policy.name, self.policy.resource_type))
return # depends on [control=['if'], data=[]]
with self.policy.ctx:
self.policy.ctx.metrics.put_metric('ResourceCount', len(resources), 'Count', Scope='Policy', buffer=False)
if 'debug' in event:
self.policy.log.info('Invoking actions %s', self.policy.resource_manager.actions) # depends on [control=['if'], data=[]]
self.policy._write_file('resources.json', utils.dumps(resources, indent=2))
for action in self.policy.resource_manager.actions:
self.policy.log.info('policy: %s invoking action: %s resources: %d', self.policy.name, action.name, len(resources))
if isinstance(action, EventAction):
results = action.process(resources, event) # depends on [control=['if'], data=[]]
else:
results = action.process(resources)
self.policy._write_file('action-%s' % action.name, utils.dumps(results)) # depends on [control=['for'], data=['action']] # depends on [control=['with'], data=[]]
return resources |
def _transform(transformer_chain: Sequence[Tuple[DataTransformer, Type]], data: S, context: PipelineContext = None) -> T:
"""Transform data to a new type.
Args:
transformer_chain: A sequence of (transformer, type) pairs to convert the data.
data: The data to be transformed.
context: The context of the transformations (mutable).
Returns:
The transformed data.
"""
for transformer, target_type in transformer_chain:
# noinspection PyTypeChecker
data = transformer.transform(target_type, data, context)
return data | def function[_transform, parameter[transformer_chain, data, context]]:
constant[Transform data to a new type.
Args:
transformer_chain: A sequence of (transformer, type) pairs to convert the data.
data: The data to be transformed.
context: The context of the transformations (mutable).
Returns:
The transformed data.
]
for taget[tuple[[<ast.Name object at 0x7da1b1932aa0>, <ast.Name object at 0x7da1b1933dc0>]]] in starred[name[transformer_chain]] begin[:]
variable[data] assign[=] call[name[transformer].transform, parameter[name[target_type], name[data], name[context]]]
return[name[data]] | keyword[def] identifier[_transform] ( identifier[transformer_chain] : identifier[Sequence] [ identifier[Tuple] [ identifier[DataTransformer] , identifier[Type] ]], identifier[data] : identifier[S] , identifier[context] : identifier[PipelineContext] = keyword[None] )-> identifier[T] :
literal[string]
keyword[for] identifier[transformer] , identifier[target_type] keyword[in] identifier[transformer_chain] :
identifier[data] = identifier[transformer] . identifier[transform] ( identifier[target_type] , identifier[data] , identifier[context] )
keyword[return] identifier[data] | def _transform(transformer_chain: Sequence[Tuple[DataTransformer, Type]], data: S, context: PipelineContext=None) -> T:
"""Transform data to a new type.
Args:
transformer_chain: A sequence of (transformer, type) pairs to convert the data.
data: The data to be transformed.
context: The context of the transformations (mutable).
Returns:
The transformed data.
"""
for (transformer, target_type) in transformer_chain:
# noinspection PyTypeChecker
data = transformer.transform(target_type, data, context) # depends on [control=['for'], data=[]]
return data |
def _build_likelihood(self):
r"""
Construct a tensorflow function to compute the likelihood.
\log p(Y | theta).
"""
K = self.kern.K(self.X) + tf.eye(tf.shape(self.X)[0], dtype=settings.float_type) * self.likelihood.variance
L = tf.cholesky(K)
m = self.mean_function(self.X)
logpdf = multivariate_normal(self.Y, m, L) # (R,) log-likelihoods for each independent dimension of Y
return tf.reduce_sum(logpdf) | def function[_build_likelihood, parameter[self]]:
constant[
Construct a tensorflow function to compute the likelihood.
\log p(Y | theta).
]
variable[K] assign[=] binary_operation[call[name[self].kern.K, parameter[name[self].X]] + binary_operation[call[name[tf].eye, parameter[call[call[name[tf].shape, parameter[name[self].X]]][constant[0]]]] * name[self].likelihood.variance]]
variable[L] assign[=] call[name[tf].cholesky, parameter[name[K]]]
variable[m] assign[=] call[name[self].mean_function, parameter[name[self].X]]
variable[logpdf] assign[=] call[name[multivariate_normal], parameter[name[self].Y, name[m], name[L]]]
return[call[name[tf].reduce_sum, parameter[name[logpdf]]]] | keyword[def] identifier[_build_likelihood] ( identifier[self] ):
literal[string]
identifier[K] = identifier[self] . identifier[kern] . identifier[K] ( identifier[self] . identifier[X] )+ identifier[tf] . identifier[eye] ( identifier[tf] . identifier[shape] ( identifier[self] . identifier[X] )[ literal[int] ], identifier[dtype] = identifier[settings] . identifier[float_type] )* identifier[self] . identifier[likelihood] . identifier[variance]
identifier[L] = identifier[tf] . identifier[cholesky] ( identifier[K] )
identifier[m] = identifier[self] . identifier[mean_function] ( identifier[self] . identifier[X] )
identifier[logpdf] = identifier[multivariate_normal] ( identifier[self] . identifier[Y] , identifier[m] , identifier[L] )
keyword[return] identifier[tf] . identifier[reduce_sum] ( identifier[logpdf] ) | def _build_likelihood(self):
"""
Construct a tensorflow function to compute the likelihood.
\\log p(Y | theta).
"""
K = self.kern.K(self.X) + tf.eye(tf.shape(self.X)[0], dtype=settings.float_type) * self.likelihood.variance
L = tf.cholesky(K)
m = self.mean_function(self.X)
logpdf = multivariate_normal(self.Y, m, L) # (R,) log-likelihoods for each independent dimension of Y
return tf.reduce_sum(logpdf) |
def classify(self, token_type, value, lineno, column, line):
"""Find the label for a token."""
if token_type == self.grammar.KEYWORD_TOKEN:
label_index = self.grammar.keyword_ids.get(value, -1)
if label_index != -1:
return label_index
label_index = self.grammar.token_ids.get(token_type, -1)
if label_index == -1:
raise ParseError("invalid token", token_type, value, lineno, column,
line)
return label_index | def function[classify, parameter[self, token_type, value, lineno, column, line]]:
constant[Find the label for a token.]
if compare[name[token_type] equal[==] name[self].grammar.KEYWORD_TOKEN] begin[:]
variable[label_index] assign[=] call[name[self].grammar.keyword_ids.get, parameter[name[value], <ast.UnaryOp object at 0x7da2047e9780>]]
if compare[name[label_index] not_equal[!=] <ast.UnaryOp object at 0x7da2047e8730>] begin[:]
return[name[label_index]]
variable[label_index] assign[=] call[name[self].grammar.token_ids.get, parameter[name[token_type], <ast.UnaryOp object at 0x7da2047e8910>]]
if compare[name[label_index] equal[==] <ast.UnaryOp object at 0x7da2047e9ff0>] begin[:]
<ast.Raise object at 0x7da2047eb850>
return[name[label_index]] | keyword[def] identifier[classify] ( identifier[self] , identifier[token_type] , identifier[value] , identifier[lineno] , identifier[column] , identifier[line] ):
literal[string]
keyword[if] identifier[token_type] == identifier[self] . identifier[grammar] . identifier[KEYWORD_TOKEN] :
identifier[label_index] = identifier[self] . identifier[grammar] . identifier[keyword_ids] . identifier[get] ( identifier[value] ,- literal[int] )
keyword[if] identifier[label_index] !=- literal[int] :
keyword[return] identifier[label_index]
identifier[label_index] = identifier[self] . identifier[grammar] . identifier[token_ids] . identifier[get] ( identifier[token_type] ,- literal[int] )
keyword[if] identifier[label_index] ==- literal[int] :
keyword[raise] identifier[ParseError] ( literal[string] , identifier[token_type] , identifier[value] , identifier[lineno] , identifier[column] ,
identifier[line] )
keyword[return] identifier[label_index] | def classify(self, token_type, value, lineno, column, line):
"""Find the label for a token."""
if token_type == self.grammar.KEYWORD_TOKEN:
label_index = self.grammar.keyword_ids.get(value, -1)
if label_index != -1:
return label_index # depends on [control=['if'], data=['label_index']] # depends on [control=['if'], data=[]]
label_index = self.grammar.token_ids.get(token_type, -1)
if label_index == -1:
raise ParseError('invalid token', token_type, value, lineno, column, line) # depends on [control=['if'], data=[]]
return label_index |
def SetLines(self, lines):
"""Set number of screen lines.
Args:
lines: An int, number of lines. If None, use terminal dimensions.
Raises:
ValueError, TypeError: Not a valid integer representation.
"""
(self._cli_lines, self._cli_cols) = TerminalSize()
if lines:
self._cli_lines = int(lines) | def function[SetLines, parameter[self, lines]]:
constant[Set number of screen lines.
Args:
lines: An int, number of lines. If None, use terminal dimensions.
Raises:
ValueError, TypeError: Not a valid integer representation.
]
<ast.Tuple object at 0x7da20c6a8dc0> assign[=] call[name[TerminalSize], parameter[]]
if name[lines] begin[:]
name[self]._cli_lines assign[=] call[name[int], parameter[name[lines]]] | keyword[def] identifier[SetLines] ( identifier[self] , identifier[lines] ):
literal[string]
( identifier[self] . identifier[_cli_lines] , identifier[self] . identifier[_cli_cols] )= identifier[TerminalSize] ()
keyword[if] identifier[lines] :
identifier[self] . identifier[_cli_lines] = identifier[int] ( identifier[lines] ) | def SetLines(self, lines):
"""Set number of screen lines.
Args:
lines: An int, number of lines. If None, use terminal dimensions.
Raises:
ValueError, TypeError: Not a valid integer representation.
"""
(self._cli_lines, self._cli_cols) = TerminalSize()
if lines:
self._cli_lines = int(lines) # depends on [control=['if'], data=[]] |
def propose_unif(self):
"""Propose a new live point by sampling *uniformly*
within the ellipsoid."""
while True:
# Sample a point from the ellipsoid.
u = self.ell.sample(rstate=self.rstate)
# Check if `u` is within the unit cube.
if unitcheck(u, self.nonperiodic):
break # if it is, we're done!
return u, self.ell.axes | def function[propose_unif, parameter[self]]:
constant[Propose a new live point by sampling *uniformly*
within the ellipsoid.]
while constant[True] begin[:]
variable[u] assign[=] call[name[self].ell.sample, parameter[]]
if call[name[unitcheck], parameter[name[u], name[self].nonperiodic]] begin[:]
break
return[tuple[[<ast.Name object at 0x7da1b1e01c60>, <ast.Attribute object at 0x7da1b1e01b10>]]] | keyword[def] identifier[propose_unif] ( identifier[self] ):
literal[string]
keyword[while] keyword[True] :
identifier[u] = identifier[self] . identifier[ell] . identifier[sample] ( identifier[rstate] = identifier[self] . identifier[rstate] )
keyword[if] identifier[unitcheck] ( identifier[u] , identifier[self] . identifier[nonperiodic] ):
keyword[break]
keyword[return] identifier[u] , identifier[self] . identifier[ell] . identifier[axes] | def propose_unif(self):
"""Propose a new live point by sampling *uniformly*
within the ellipsoid."""
while True:
# Sample a point from the ellipsoid.
u = self.ell.sample(rstate=self.rstate)
# Check if `u` is within the unit cube.
if unitcheck(u, self.nonperiodic):
break # if it is, we're done! # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return (u, self.ell.axes) |
def filter(pred: Callable, xs: Iterable):
"""
Applied a predicate to a list returning a :py:class:`PromisedObject`
containing the values satisfying the predicate.
:param pred: predicate function.
:param xs: iterable object.
:returns: :py:class:`PromisedObject`
"""
generator = (x for x in xs if pred(x))
return gather(*generator) | def function[filter, parameter[pred, xs]]:
constant[
Applied a predicate to a list returning a :py:class:`PromisedObject`
containing the values satisfying the predicate.
:param pred: predicate function.
:param xs: iterable object.
:returns: :py:class:`PromisedObject`
]
variable[generator] assign[=] <ast.GeneratorExp object at 0x7da18f09c400>
return[call[name[gather], parameter[<ast.Starred object at 0x7da18f09f550>]]] | keyword[def] identifier[filter] ( identifier[pred] : identifier[Callable] , identifier[xs] : identifier[Iterable] ):
literal[string]
identifier[generator] =( identifier[x] keyword[for] identifier[x] keyword[in] identifier[xs] keyword[if] identifier[pred] ( identifier[x] ))
keyword[return] identifier[gather] (* identifier[generator] ) | def filter(pred: Callable, xs: Iterable):
"""
Applied a predicate to a list returning a :py:class:`PromisedObject`
containing the values satisfying the predicate.
:param pred: predicate function.
:param xs: iterable object.
:returns: :py:class:`PromisedObject`
"""
generator = (x for x in xs if pred(x))
return gather(*generator) |
def _run_strip_accents(self, text):
"""Strips accents from a piece of text."""
text = unicodedata.normalize("NFD", text)
output = []
for char in text:
cat = unicodedata.category(char)
if cat == "Mn":
continue
output.append(char)
return "".join(output) | def function[_run_strip_accents, parameter[self, text]]:
constant[Strips accents from a piece of text.]
variable[text] assign[=] call[name[unicodedata].normalize, parameter[constant[NFD], name[text]]]
variable[output] assign[=] list[[]]
for taget[name[char]] in starred[name[text]] begin[:]
variable[cat] assign[=] call[name[unicodedata].category, parameter[name[char]]]
if compare[name[cat] equal[==] constant[Mn]] begin[:]
continue
call[name[output].append, parameter[name[char]]]
return[call[constant[].join, parameter[name[output]]]] | keyword[def] identifier[_run_strip_accents] ( identifier[self] , identifier[text] ):
literal[string]
identifier[text] = identifier[unicodedata] . identifier[normalize] ( literal[string] , identifier[text] )
identifier[output] =[]
keyword[for] identifier[char] keyword[in] identifier[text] :
identifier[cat] = identifier[unicodedata] . identifier[category] ( identifier[char] )
keyword[if] identifier[cat] == literal[string] :
keyword[continue]
identifier[output] . identifier[append] ( identifier[char] )
keyword[return] literal[string] . identifier[join] ( identifier[output] ) | def _run_strip_accents(self, text):
"""Strips accents from a piece of text."""
text = unicodedata.normalize('NFD', text)
output = []
for char in text:
cat = unicodedata.category(char)
if cat == 'Mn':
continue # depends on [control=['if'], data=[]]
output.append(char) # depends on [control=['for'], data=['char']]
return ''.join(output) |
async def load(self, node_id=None):
"""Load nodes from KLF 200, if no node_id is specified all nodes are loaded."""
if node_id is not None:
await self._load_node(node_id=node_id)
else:
await self._load_all_nodes() | <ast.AsyncFunctionDef object at 0x7da1b26ad990> | keyword[async] keyword[def] identifier[load] ( identifier[self] , identifier[node_id] = keyword[None] ):
literal[string]
keyword[if] identifier[node_id] keyword[is] keyword[not] keyword[None] :
keyword[await] identifier[self] . identifier[_load_node] ( identifier[node_id] = identifier[node_id] )
keyword[else] :
keyword[await] identifier[self] . identifier[_load_all_nodes] () | async def load(self, node_id=None):
"""Load nodes from KLF 200, if no node_id is specified all nodes are loaded."""
if node_id is not None:
await self._load_node(node_id=node_id) # depends on [control=['if'], data=['node_id']]
else:
await self._load_all_nodes() |
def editorData( self, editor ):
"""
Pulls the value from the inputed editor.
:param editor | <QWidget>
:return <variant>
"""
# set the information from a multi-tag edit
if ( isinstance(editor, XMultiTagEdit) ):
return editor.tags()
# set the information from a combo box
elif ( isinstance(editor, QComboBox) ):
return nativestring(editor.currentText())
# set the information from a line edit
elif ( isinstance(editor, QLineEdit) ):
return nativestring(editor.text())
return None | def function[editorData, parameter[self, editor]]:
constant[
Pulls the value from the inputed editor.
:param editor | <QWidget>
:return <variant>
]
if call[name[isinstance], parameter[name[editor], name[XMultiTagEdit]]] begin[:]
return[call[name[editor].tags, parameter[]]]
return[constant[None]] | keyword[def] identifier[editorData] ( identifier[self] , identifier[editor] ):
literal[string]
keyword[if] ( identifier[isinstance] ( identifier[editor] , identifier[XMultiTagEdit] )):
keyword[return] identifier[editor] . identifier[tags] ()
keyword[elif] ( identifier[isinstance] ( identifier[editor] , identifier[QComboBox] )):
keyword[return] identifier[nativestring] ( identifier[editor] . identifier[currentText] ())
keyword[elif] ( identifier[isinstance] ( identifier[editor] , identifier[QLineEdit] )):
keyword[return] identifier[nativestring] ( identifier[editor] . identifier[text] ())
keyword[return] keyword[None] | def editorData(self, editor):
"""
Pulls the value from the inputed editor.
:param editor | <QWidget>
:return <variant>
""" # set the information from a multi-tag edit
if isinstance(editor, XMultiTagEdit):
return editor.tags() # depends on [control=['if'], data=[]] # set the information from a combo box
elif isinstance(editor, QComboBox):
return nativestring(editor.currentText()) # depends on [control=['if'], data=[]] # set the information from a line edit
elif isinstance(editor, QLineEdit):
return nativestring(editor.text()) # depends on [control=['if'], data=[]]
return None |
def NameImport(package, as_name=None, prefix=None):
"""
Accepts a package (Name node), name to import it as (string), and
optional prefix and returns a node:
import <package> [as <as_name>]
"""
if prefix is None:
prefix = u""
children = [Name(u"import", prefix=prefix), package]
if as_name is not None:
children.extend([Name(u"as", prefix=u" "),
Name(as_name, prefix=u" ")])
return Node(syms.import_name, children) | def function[NameImport, parameter[package, as_name, prefix]]:
constant[
Accepts a package (Name node), name to import it as (string), and
optional prefix and returns a node:
import <package> [as <as_name>]
]
if compare[name[prefix] is constant[None]] begin[:]
variable[prefix] assign[=] constant[]
variable[children] assign[=] list[[<ast.Call object at 0x7da18f811540>, <ast.Name object at 0x7da18f812290>]]
if compare[name[as_name] is_not constant[None]] begin[:]
call[name[children].extend, parameter[list[[<ast.Call object at 0x7da18f8100a0>, <ast.Call object at 0x7da18f813fa0>]]]]
return[call[name[Node], parameter[name[syms].import_name, name[children]]]] | keyword[def] identifier[NameImport] ( identifier[package] , identifier[as_name] = keyword[None] , identifier[prefix] = keyword[None] ):
literal[string]
keyword[if] identifier[prefix] keyword[is] keyword[None] :
identifier[prefix] = literal[string]
identifier[children] =[ identifier[Name] ( literal[string] , identifier[prefix] = identifier[prefix] ), identifier[package] ]
keyword[if] identifier[as_name] keyword[is] keyword[not] keyword[None] :
identifier[children] . identifier[extend] ([ identifier[Name] ( literal[string] , identifier[prefix] = literal[string] ),
identifier[Name] ( identifier[as_name] , identifier[prefix] = literal[string] )])
keyword[return] identifier[Node] ( identifier[syms] . identifier[import_name] , identifier[children] ) | def NameImport(package, as_name=None, prefix=None):
"""
Accepts a package (Name node), name to import it as (string), and
optional prefix and returns a node:
import <package> [as <as_name>]
"""
if prefix is None:
prefix = u'' # depends on [control=['if'], data=['prefix']]
children = [Name(u'import', prefix=prefix), package]
if as_name is not None:
children.extend([Name(u'as', prefix=u' '), Name(as_name, prefix=u' ')]) # depends on [control=['if'], data=['as_name']]
return Node(syms.import_name, children) |
def request_show(self, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/requests#show-request"
api_path = "/api/v2/requests/{id}.json"
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) | def function[request_show, parameter[self, id]]:
constant[https://developer.zendesk.com/rest_api/docs/core/requests#show-request]
variable[api_path] assign[=] constant[/api/v2/requests/{id}.json]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[request_show] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[id] = identifier[id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] ) | def request_show(self, id, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/requests#show-request"""
api_path = '/api/v2/requests/{id}.json'
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) |
def mutable(function):
'''Add the instance internal state as the second parameter
of the decorated function.'''
def wrapper(self, *args, **kwargs):
state = self._get_state()
return function(self, state, *args, **kwargs)
return wrapper | def function[mutable, parameter[function]]:
constant[Add the instance internal state as the second parameter
of the decorated function.]
def function[wrapper, parameter[self]]:
variable[state] assign[=] call[name[self]._get_state, parameter[]]
return[call[name[function], parameter[name[self], name[state], <ast.Starred object at 0x7da1b09ba7a0>]]]
return[name[wrapper]] | keyword[def] identifier[mutable] ( identifier[function] ):
literal[string]
keyword[def] identifier[wrapper] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
identifier[state] = identifier[self] . identifier[_get_state] ()
keyword[return] identifier[function] ( identifier[self] , identifier[state] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapper] | def mutable(function):
"""Add the instance internal state as the second parameter
of the decorated function."""
def wrapper(self, *args, **kwargs):
state = self._get_state()
return function(self, state, *args, **kwargs)
return wrapper |
def delete(self, uri_relative, custom_headers):
"""
:type uri_relative: str
:type custom_headers: dict[str, str]
:rtype: BunqResponseRaw
"""
return self._request(
self._METHOD_DELETE,
uri_relative,
self._BYTES_EMPTY,
{},
custom_headers
) | def function[delete, parameter[self, uri_relative, custom_headers]]:
constant[
:type uri_relative: str
:type custom_headers: dict[str, str]
:rtype: BunqResponseRaw
]
return[call[name[self]._request, parameter[name[self]._METHOD_DELETE, name[uri_relative], name[self]._BYTES_EMPTY, dictionary[[], []], name[custom_headers]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[uri_relative] , identifier[custom_headers] ):
literal[string]
keyword[return] identifier[self] . identifier[_request] (
identifier[self] . identifier[_METHOD_DELETE] ,
identifier[uri_relative] ,
identifier[self] . identifier[_BYTES_EMPTY] ,
{},
identifier[custom_headers]
) | def delete(self, uri_relative, custom_headers):
"""
:type uri_relative: str
:type custom_headers: dict[str, str]
:rtype: BunqResponseRaw
"""
return self._request(self._METHOD_DELETE, uri_relative, self._BYTES_EMPTY, {}, custom_headers) |
def add_volume(self,colorchange=True,column=None,name='',str='{name}',**kwargs):
"""
Add 'volume' study to QuantFigure.studies
Parameters:
colorchange : bool
If True then each volume bar will have a fill color
depending on if 'base' had a positive or negative
change compared to the previous value
If False then each volume bar will have a fill color
depending on if the volume data itself had a positive or negative
change compared to the previous value
column :string
Defines the data column name that contains the volume data.
Default: 'volume'
name : string
Name given to the study
str : string
Label factory for studies
The following wildcards can be used:
{name} : Name of the column
{study} : Name of the study
{period} : Period used
Examples:
'study: {study} - period: {period}'
kwargs :
base : string
Defines the column which will define the
positive/negative changes (if colorchange=True).
Default = 'close'
up_color : string
Color for positive bars
down_color : string
Color for negative bars
"""
if not column:
column=self._d['volume']
up_color=kwargs.pop('up_color',self.theme['up_color'])
down_color=kwargs.pop('down_color',self.theme['down_color'])
study={'kind':'volume',
'name':name,
'params':{'colorchange':colorchange,'base':'close','column':column,
'str':None},
'display':utils.merge_dict({'up_color':up_color,'down_color':down_color},kwargs)}
self._add_study(study) | def function[add_volume, parameter[self, colorchange, column, name, str]]:
constant[
Add 'volume' study to QuantFigure.studies
Parameters:
colorchange : bool
If True then each volume bar will have a fill color
depending on if 'base' had a positive or negative
change compared to the previous value
If False then each volume bar will have a fill color
depending on if the volume data itself had a positive or negative
change compared to the previous value
column :string
Defines the data column name that contains the volume data.
Default: 'volume'
name : string
Name given to the study
str : string
Label factory for studies
The following wildcards can be used:
{name} : Name of the column
{study} : Name of the study
{period} : Period used
Examples:
'study: {study} - period: {period}'
kwargs :
base : string
Defines the column which will define the
positive/negative changes (if colorchange=True).
Default = 'close'
up_color : string
Color for positive bars
down_color : string
Color for negative bars
]
if <ast.UnaryOp object at 0x7da1b1cfe260> begin[:]
variable[column] assign[=] call[name[self]._d][constant[volume]]
variable[up_color] assign[=] call[name[kwargs].pop, parameter[constant[up_color], call[name[self].theme][constant[up_color]]]]
variable[down_color] assign[=] call[name[kwargs].pop, parameter[constant[down_color], call[name[self].theme][constant[down_color]]]]
variable[study] assign[=] dictionary[[<ast.Constant object at 0x7da1b216fe80>, <ast.Constant object at 0x7da1b216f310>, <ast.Constant object at 0x7da1b1b46aa0>, <ast.Constant object at 0x7da1b1b46950>], [<ast.Constant object at 0x7da1b1b468f0>, <ast.Name object at 0x7da1b1b469b0>, <ast.Dict object at 0x7da1b1b47790>, <ast.Call object at 0x7da1b1b45b70>]]
call[name[self]._add_study, parameter[name[study]]] | keyword[def] identifier[add_volume] ( identifier[self] , identifier[colorchange] = keyword[True] , identifier[column] = keyword[None] , identifier[name] = literal[string] , identifier[str] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[column] :
identifier[column] = identifier[self] . identifier[_d] [ literal[string] ]
identifier[up_color] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[theme] [ literal[string] ])
identifier[down_color] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[theme] [ literal[string] ])
identifier[study] ={ literal[string] : literal[string] ,
literal[string] : identifier[name] ,
literal[string] :{ literal[string] : identifier[colorchange] , literal[string] : literal[string] , literal[string] : identifier[column] ,
literal[string] : keyword[None] },
literal[string] : identifier[utils] . identifier[merge_dict] ({ literal[string] : identifier[up_color] , literal[string] : identifier[down_color] }, identifier[kwargs] )}
identifier[self] . identifier[_add_study] ( identifier[study] ) | def add_volume(self, colorchange=True, column=None, name='', str='{name}', **kwargs):
"""
Add 'volume' study to QuantFigure.studies
Parameters:
colorchange : bool
If True then each volume bar will have a fill color
depending on if 'base' had a positive or negative
change compared to the previous value
If False then each volume bar will have a fill color
depending on if the volume data itself had a positive or negative
change compared to the previous value
column :string
Defines the data column name that contains the volume data.
Default: 'volume'
name : string
Name given to the study
str : string
Label factory for studies
The following wildcards can be used:
{name} : Name of the column
{study} : Name of the study
{period} : Period used
Examples:
'study: {study} - period: {period}'
kwargs :
base : string
Defines the column which will define the
positive/negative changes (if colorchange=True).
Default = 'close'
up_color : string
Color for positive bars
down_color : string
Color for negative bars
"""
if not column:
column = self._d['volume'] # depends on [control=['if'], data=[]]
up_color = kwargs.pop('up_color', self.theme['up_color'])
down_color = kwargs.pop('down_color', self.theme['down_color'])
study = {'kind': 'volume', 'name': name, 'params': {'colorchange': colorchange, 'base': 'close', 'column': column, 'str': None}, 'display': utils.merge_dict({'up_color': up_color, 'down_color': down_color}, kwargs)}
self._add_study(study) |
def on_usb_device_attach(self, device, error, masked_interfaces, capture_filename):
"""Triggered when a request to capture a USB device (as a result
of matched USB filters or direct call to
:py:func:`IConsole.attach_usb_device` ) has completed.
A @c null @a error object means success, otherwise it
describes a failure.
in device of type :class:`IUSBDevice`
in error of type :class:`IVirtualBoxErrorInfo`
in masked_interfaces of type int
in capture_filename of type str
raises :class:`VBoxErrorInvalidVmState`
Session state prevents operation.
raises :class:`VBoxErrorInvalidObjectState`
Session type prevents operation.
"""
if not isinstance(device, IUSBDevice):
raise TypeError("device can only be an instance of type IUSBDevice")
if not isinstance(error, IVirtualBoxErrorInfo):
raise TypeError("error can only be an instance of type IVirtualBoxErrorInfo")
if not isinstance(masked_interfaces, baseinteger):
raise TypeError("masked_interfaces can only be an instance of type baseinteger")
if not isinstance(capture_filename, basestring):
raise TypeError("capture_filename can only be an instance of type basestring")
self._call("onUSBDeviceAttach",
in_p=[device, error, masked_interfaces, capture_filename]) | def function[on_usb_device_attach, parameter[self, device, error, masked_interfaces, capture_filename]]:
constant[Triggered when a request to capture a USB device (as a result
of matched USB filters or direct call to
:py:func:`IConsole.attach_usb_device` ) has completed.
A @c null @a error object means success, otherwise it
describes a failure.
in device of type :class:`IUSBDevice`
in error of type :class:`IVirtualBoxErrorInfo`
in masked_interfaces of type int
in capture_filename of type str
raises :class:`VBoxErrorInvalidVmState`
Session state prevents operation.
raises :class:`VBoxErrorInvalidObjectState`
Session type prevents operation.
]
if <ast.UnaryOp object at 0x7da20c6c64d0> begin[:]
<ast.Raise object at 0x7da20c6c4fa0>
if <ast.UnaryOp object at 0x7da20c6c4340> begin[:]
<ast.Raise object at 0x7da20c6c6ec0>
if <ast.UnaryOp object at 0x7da20e9b1330> begin[:]
<ast.Raise object at 0x7da20e9b0fd0>
if <ast.UnaryOp object at 0x7da20e9b16c0> begin[:]
<ast.Raise object at 0x7da20e9b0580>
call[name[self]._call, parameter[constant[onUSBDeviceAttach]]] | keyword[def] identifier[on_usb_device_attach] ( identifier[self] , identifier[device] , identifier[error] , identifier[masked_interfaces] , identifier[capture_filename] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[device] , identifier[IUSBDevice] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[error] , identifier[IVirtualBoxErrorInfo] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[masked_interfaces] , identifier[baseinteger] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[capture_filename] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[device] , identifier[error] , identifier[masked_interfaces] , identifier[capture_filename] ]) | def on_usb_device_attach(self, device, error, masked_interfaces, capture_filename):
"""Triggered when a request to capture a USB device (as a result
of matched USB filters or direct call to
:py:func:`IConsole.attach_usb_device` ) has completed.
A @c null @a error object means success, otherwise it
describes a failure.
in device of type :class:`IUSBDevice`
in error of type :class:`IVirtualBoxErrorInfo`
in masked_interfaces of type int
in capture_filename of type str
raises :class:`VBoxErrorInvalidVmState`
Session state prevents operation.
raises :class:`VBoxErrorInvalidObjectState`
Session type prevents operation.
"""
if not isinstance(device, IUSBDevice):
raise TypeError('device can only be an instance of type IUSBDevice') # depends on [control=['if'], data=[]]
if not isinstance(error, IVirtualBoxErrorInfo):
raise TypeError('error can only be an instance of type IVirtualBoxErrorInfo') # depends on [control=['if'], data=[]]
if not isinstance(masked_interfaces, baseinteger):
raise TypeError('masked_interfaces can only be an instance of type baseinteger') # depends on [control=['if'], data=[]]
if not isinstance(capture_filename, basestring):
raise TypeError('capture_filename can only be an instance of type basestring') # depends on [control=['if'], data=[]]
self._call('onUSBDeviceAttach', in_p=[device, error, masked_interfaces, capture_filename]) |
def _create_make_unique(inputs):
"""Replaces the lower bits of each element with iota.
The iota is used to derive the index, and also serves the purpose to
make each element unique to break ties.
Args:
inputs: A tensor with rank of 2 and dtype of tf.float32.
[batch_size, original_size].
Returns:
A tensor after element wise transformation, with dtype the same as inputs.
[batch_size, original_size].
Raises:
ValueError: If the rank of the input tensor does not equal 2.
"""
if inputs.shape.ndims != 2:
raise ValueError("Input of top_k_with_unique must be rank-2 "
"but got: %s" % inputs.shape)
height = inputs.shape[0]
width = inputs.shape[1]
zeros = tf.zeros([height, width], dtype=tf.int32)
# Count_mask is used to mask away the low order bits to ensure that every
# element is distinct.
log2_ceiling = int(math.ceil(math.log(int(width), 2)))
next_power_of_two = 1 << log2_ceiling
count_mask = ~(next_power_of_two - 1)
count_mask_r0 = tf.constant(count_mask)
count_mask_r2 = tf.fill([height, width], count_mask_r0)
# Smallest_normal is the bit representation of the smallest positive normal
# floating point number. The sign is zero, exponent is one, and the fraction
# is zero.
smallest_normal = 1 << 23
smallest_normal_r0 = tf.constant(smallest_normal, dtype=tf.int32)
smallest_normal_r2 = tf.fill([height, width], smallest_normal_r0)
# Low_bit_mask is used to mask away the sign bit when computing the absolute
# value.
low_bit_mask = ~(1 << 31)
low_bit_mask_r0 = tf.constant(low_bit_mask, dtype=tf.int32)
low_bit_mask_r2 = tf.fill([height, width], low_bit_mask_r0)
iota = tf.tile(tf.expand_dims(tf.range(width, dtype=tf.int32), 0),
[height, 1])
# Compare the absolute value with positive zero to handle negative zero.
input_r2 = tf.bitcast(inputs, tf.int32)
abs_r2 = tf.bitwise.bitwise_and(input_r2, low_bit_mask_r2)
if_zero_r2 = tf.equal(abs_r2, zeros)
smallest_normal_preserving_sign_r2 = tf.bitwise.bitwise_or(
input_r2, smallest_normal_r2)
input_no_zeros_r2 = tf.where(
if_zero_r2, smallest_normal_preserving_sign_r2, input_r2)
# Discard the low-order bits and replace with iota.
and_r2 = tf.bitwise.bitwise_and(input_no_zeros_r2, count_mask_r2)
or_r2 = tf.bitwise.bitwise_or(and_r2, iota)
return tf.bitcast(or_r2, tf.float32) | def function[_create_make_unique, parameter[inputs]]:
constant[Replaces the lower bits of each element with iota.
The iota is used to derive the index, and also serves the purpose to
make each element unique to break ties.
Args:
inputs: A tensor with rank of 2 and dtype of tf.float32.
[batch_size, original_size].
Returns:
A tensor after element wise transformation, with dtype the same as inputs.
[batch_size, original_size].
Raises:
ValueError: If the rank of the input tensor does not equal 2.
]
if compare[name[inputs].shape.ndims not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da1b2058a90>
variable[height] assign[=] call[name[inputs].shape][constant[0]]
variable[width] assign[=] call[name[inputs].shape][constant[1]]
variable[zeros] assign[=] call[name[tf].zeros, parameter[list[[<ast.Name object at 0x7da1b2059930>, <ast.Name object at 0x7da1b20585e0>]]]]
variable[log2_ceiling] assign[=] call[name[int], parameter[call[name[math].ceil, parameter[call[name[math].log, parameter[call[name[int], parameter[name[width]]], constant[2]]]]]]]
variable[next_power_of_two] assign[=] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> name[log2_ceiling]]
variable[count_mask] assign[=] <ast.UnaryOp object at 0x7da20c76f880>
variable[count_mask_r0] assign[=] call[name[tf].constant, parameter[name[count_mask]]]
variable[count_mask_r2] assign[=] call[name[tf].fill, parameter[list[[<ast.Name object at 0x7da20c76ed10>, <ast.Name object at 0x7da20c76ea70>]], name[count_mask_r0]]]
variable[smallest_normal] assign[=] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> constant[23]]
variable[smallest_normal_r0] assign[=] call[name[tf].constant, parameter[name[smallest_normal]]]
variable[smallest_normal_r2] assign[=] call[name[tf].fill, parameter[list[[<ast.Name object at 0x7da20c76c730>, <ast.Name object at 0x7da20c76f970>]], name[smallest_normal_r0]]]
variable[low_bit_mask] assign[=] <ast.UnaryOp object at 0x7da20c76f9d0>
variable[low_bit_mask_r0] assign[=] call[name[tf].constant, parameter[name[low_bit_mask]]]
variable[low_bit_mask_r2] assign[=] call[name[tf].fill, parameter[list[[<ast.Name object at 0x7da20c76ea40>, <ast.Name object at 0x7da20c76c190>]], name[low_bit_mask_r0]]]
variable[iota] assign[=] call[name[tf].tile, parameter[call[name[tf].expand_dims, parameter[call[name[tf].range, parameter[name[width]]], constant[0]]], list[[<ast.Name object at 0x7da20c76e830>, <ast.Constant object at 0x7da20c76c070>]]]]
variable[input_r2] assign[=] call[name[tf].bitcast, parameter[name[inputs], name[tf].int32]]
variable[abs_r2] assign[=] call[name[tf].bitwise.bitwise_and, parameter[name[input_r2], name[low_bit_mask_r2]]]
variable[if_zero_r2] assign[=] call[name[tf].equal, parameter[name[abs_r2], name[zeros]]]
variable[smallest_normal_preserving_sign_r2] assign[=] call[name[tf].bitwise.bitwise_or, parameter[name[input_r2], name[smallest_normal_r2]]]
variable[input_no_zeros_r2] assign[=] call[name[tf].where, parameter[name[if_zero_r2], name[smallest_normal_preserving_sign_r2], name[input_r2]]]
variable[and_r2] assign[=] call[name[tf].bitwise.bitwise_and, parameter[name[input_no_zeros_r2], name[count_mask_r2]]]
variable[or_r2] assign[=] call[name[tf].bitwise.bitwise_or, parameter[name[and_r2], name[iota]]]
return[call[name[tf].bitcast, parameter[name[or_r2], name[tf].float32]]] | keyword[def] identifier[_create_make_unique] ( identifier[inputs] ):
literal[string]
keyword[if] identifier[inputs] . identifier[shape] . identifier[ndims] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[inputs] . identifier[shape] )
identifier[height] = identifier[inputs] . identifier[shape] [ literal[int] ]
identifier[width] = identifier[inputs] . identifier[shape] [ literal[int] ]
identifier[zeros] = identifier[tf] . identifier[zeros] ([ identifier[height] , identifier[width] ], identifier[dtype] = identifier[tf] . identifier[int32] )
identifier[log2_ceiling] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[math] . identifier[log] ( identifier[int] ( identifier[width] ), literal[int] )))
identifier[next_power_of_two] = literal[int] << identifier[log2_ceiling]
identifier[count_mask] =~( identifier[next_power_of_two] - literal[int] )
identifier[count_mask_r0] = identifier[tf] . identifier[constant] ( identifier[count_mask] )
identifier[count_mask_r2] = identifier[tf] . identifier[fill] ([ identifier[height] , identifier[width] ], identifier[count_mask_r0] )
identifier[smallest_normal] = literal[int] << literal[int]
identifier[smallest_normal_r0] = identifier[tf] . identifier[constant] ( identifier[smallest_normal] , identifier[dtype] = identifier[tf] . identifier[int32] )
identifier[smallest_normal_r2] = identifier[tf] . identifier[fill] ([ identifier[height] , identifier[width] ], identifier[smallest_normal_r0] )
identifier[low_bit_mask] =~( literal[int] << literal[int] )
identifier[low_bit_mask_r0] = identifier[tf] . identifier[constant] ( identifier[low_bit_mask] , identifier[dtype] = identifier[tf] . identifier[int32] )
identifier[low_bit_mask_r2] = identifier[tf] . identifier[fill] ([ identifier[height] , identifier[width] ], identifier[low_bit_mask_r0] )
identifier[iota] = identifier[tf] . identifier[tile] ( identifier[tf] . identifier[expand_dims] ( identifier[tf] . identifier[range] ( identifier[width] , identifier[dtype] = identifier[tf] . identifier[int32] ), literal[int] ),
[ identifier[height] , literal[int] ])
identifier[input_r2] = identifier[tf] . identifier[bitcast] ( identifier[inputs] , identifier[tf] . identifier[int32] )
identifier[abs_r2] = identifier[tf] . identifier[bitwise] . identifier[bitwise_and] ( identifier[input_r2] , identifier[low_bit_mask_r2] )
identifier[if_zero_r2] = identifier[tf] . identifier[equal] ( identifier[abs_r2] , identifier[zeros] )
identifier[smallest_normal_preserving_sign_r2] = identifier[tf] . identifier[bitwise] . identifier[bitwise_or] (
identifier[input_r2] , identifier[smallest_normal_r2] )
identifier[input_no_zeros_r2] = identifier[tf] . identifier[where] (
identifier[if_zero_r2] , identifier[smallest_normal_preserving_sign_r2] , identifier[input_r2] )
identifier[and_r2] = identifier[tf] . identifier[bitwise] . identifier[bitwise_and] ( identifier[input_no_zeros_r2] , identifier[count_mask_r2] )
identifier[or_r2] = identifier[tf] . identifier[bitwise] . identifier[bitwise_or] ( identifier[and_r2] , identifier[iota] )
keyword[return] identifier[tf] . identifier[bitcast] ( identifier[or_r2] , identifier[tf] . identifier[float32] ) | def _create_make_unique(inputs):
"""Replaces the lower bits of each element with iota.
The iota is used to derive the index, and also serves the purpose to
make each element unique to break ties.
Args:
inputs: A tensor with rank of 2 and dtype of tf.float32.
[batch_size, original_size].
Returns:
A tensor after element wise transformation, with dtype the same as inputs.
[batch_size, original_size].
Raises:
ValueError: If the rank of the input tensor does not equal 2.
"""
if inputs.shape.ndims != 2:
raise ValueError('Input of top_k_with_unique must be rank-2 but got: %s' % inputs.shape) # depends on [control=['if'], data=[]]
height = inputs.shape[0]
width = inputs.shape[1]
zeros = tf.zeros([height, width], dtype=tf.int32)
# Count_mask is used to mask away the low order bits to ensure that every
# element is distinct.
log2_ceiling = int(math.ceil(math.log(int(width), 2)))
next_power_of_two = 1 << log2_ceiling
count_mask = ~(next_power_of_two - 1)
count_mask_r0 = tf.constant(count_mask)
count_mask_r2 = tf.fill([height, width], count_mask_r0)
# Smallest_normal is the bit representation of the smallest positive normal
# floating point number. The sign is zero, exponent is one, and the fraction
# is zero.
smallest_normal = 1 << 23
smallest_normal_r0 = tf.constant(smallest_normal, dtype=tf.int32)
smallest_normal_r2 = tf.fill([height, width], smallest_normal_r0)
# Low_bit_mask is used to mask away the sign bit when computing the absolute
# value.
low_bit_mask = ~(1 << 31)
low_bit_mask_r0 = tf.constant(low_bit_mask, dtype=tf.int32)
low_bit_mask_r2 = tf.fill([height, width], low_bit_mask_r0)
iota = tf.tile(tf.expand_dims(tf.range(width, dtype=tf.int32), 0), [height, 1])
# Compare the absolute value with positive zero to handle negative zero.
input_r2 = tf.bitcast(inputs, tf.int32)
abs_r2 = tf.bitwise.bitwise_and(input_r2, low_bit_mask_r2)
if_zero_r2 = tf.equal(abs_r2, zeros)
smallest_normal_preserving_sign_r2 = tf.bitwise.bitwise_or(input_r2, smallest_normal_r2)
input_no_zeros_r2 = tf.where(if_zero_r2, smallest_normal_preserving_sign_r2, input_r2)
# Discard the low-order bits and replace with iota.
and_r2 = tf.bitwise.bitwise_and(input_no_zeros_r2, count_mask_r2)
or_r2 = tf.bitwise.bitwise_or(and_r2, iota)
return tf.bitcast(or_r2, tf.float32) |
def iter_all_users(self, number=-1, etag=None, per_page=None):
"""Iterate over every user in the order they signed up for GitHub.
:param int number: (optional), number of users to return. Default: -1,
returns all of them
:param str etag: (optional), ETag from a previous request to the same
endpoint
:param int per_page: (optional), number of users to list per request
:returns: generator of :class:`User <github3.users.User>`
"""
url = self._build_url('users')
return self._iter(int(number), url, User,
params={'per_page': per_page}, etag=etag) | def function[iter_all_users, parameter[self, number, etag, per_page]]:
constant[Iterate over every user in the order they signed up for GitHub.
:param int number: (optional), number of users to return. Default: -1,
returns all of them
:param str etag: (optional), ETag from a previous request to the same
endpoint
:param int per_page: (optional), number of users to list per request
:returns: generator of :class:`User <github3.users.User>`
]
variable[url] assign[=] call[name[self]._build_url, parameter[constant[users]]]
return[call[name[self]._iter, parameter[call[name[int], parameter[name[number]]], name[url], name[User]]]] | keyword[def] identifier[iter_all_users] ( identifier[self] , identifier[number] =- literal[int] , identifier[etag] = keyword[None] , identifier[per_page] = keyword[None] ):
literal[string]
identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] )
keyword[return] identifier[self] . identifier[_iter] ( identifier[int] ( identifier[number] ), identifier[url] , identifier[User] ,
identifier[params] ={ literal[string] : identifier[per_page] }, identifier[etag] = identifier[etag] ) | def iter_all_users(self, number=-1, etag=None, per_page=None):
"""Iterate over every user in the order they signed up for GitHub.
:param int number: (optional), number of users to return. Default: -1,
returns all of them
:param str etag: (optional), ETag from a previous request to the same
endpoint
:param int per_page: (optional), number of users to list per request
:returns: generator of :class:`User <github3.users.User>`
"""
url = self._build_url('users')
return self._iter(int(number), url, User, params={'per_page': per_page}, etag=etag) |
def submit(self, task):
"""
:type task: Task
"""
if self.api_url == 'http://STUB_URL':
logging.info(u'STARTER CLIENT DEV MODE Задача условно поставлена')
return
url = self.api_url + '/services/' + task.serviceId + '/tasks'
last_e = None
for idx in range(self.max_retries):
try:
resp = requests.post(
url=url,
data=json.dumps(task.__dict__),
headers=self.headers,
timeout=15
)
try:
return json.loads(resp.text)
except:
raise IOError("Starter response read error: " + resp.text)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
# При ошибках подключения пытаемся еще раз
last_e = e
sleep(3)
raise last_e | def function[submit, parameter[self, task]]:
constant[
:type task: Task
]
if compare[name[self].api_url equal[==] constant[http://STUB_URL]] begin[:]
call[name[logging].info, parameter[constant[STARTER CLIENT DEV MODE Задача условно поставлена]]]
return[None]
variable[url] assign[=] binary_operation[binary_operation[binary_operation[name[self].api_url + constant[/services/]] + name[task].serviceId] + constant[/tasks]]
variable[last_e] assign[=] constant[None]
for taget[name[idx]] in starred[call[name[range], parameter[name[self].max_retries]]] begin[:]
<ast.Try object at 0x7da1b168d570>
<ast.Raise object at 0x7da1b168e920> | keyword[def] identifier[submit] ( identifier[self] , identifier[task] ):
literal[string]
keyword[if] identifier[self] . identifier[api_url] == literal[string] :
identifier[logging] . identifier[info] ( literal[string] )
keyword[return]
identifier[url] = identifier[self] . identifier[api_url] + literal[string] + identifier[task] . identifier[serviceId] + literal[string]
identifier[last_e] = keyword[None]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[self] . identifier[max_retries] ):
keyword[try] :
identifier[resp] = identifier[requests] . identifier[post] (
identifier[url] = identifier[url] ,
identifier[data] = identifier[json] . identifier[dumps] ( identifier[task] . identifier[__dict__] ),
identifier[headers] = identifier[self] . identifier[headers] ,
identifier[timeout] = literal[int]
)
keyword[try] :
keyword[return] identifier[json] . identifier[loads] ( identifier[resp] . identifier[text] )
keyword[except] :
keyword[raise] identifier[IOError] ( literal[string] + identifier[resp] . identifier[text] )
keyword[except] ( identifier[requests] . identifier[exceptions] . identifier[ConnectionError] , identifier[requests] . identifier[exceptions] . identifier[Timeout] ) keyword[as] identifier[e] :
identifier[last_e] = identifier[e]
identifier[sleep] ( literal[int] )
keyword[raise] identifier[last_e] | def submit(self, task):
"""
:type task: Task
"""
if self.api_url == 'http://STUB_URL':
logging.info(u'STARTER CLIENT DEV MODE Задача условно поставлена')
return # depends on [control=['if'], data=[]]
url = self.api_url + '/services/' + task.serviceId + '/tasks'
last_e = None
for idx in range(self.max_retries):
try:
resp = requests.post(url=url, data=json.dumps(task.__dict__), headers=self.headers, timeout=15)
try:
return json.loads(resp.text) # depends on [control=['try'], data=[]]
except:
raise IOError('Starter response read error: ' + resp.text) # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
# При ошибках подключения пытаемся еще раз
last_e = e
sleep(3) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=[]]
raise last_e |
def rename(old_name, new_name):
'''Rename the given virtual folder. This operation is irreversible!
You cannot change the vfolders that are shared by other users,
and the new name must be unique among all your accessible vfolders
including the shared ones.
OLD_NAME: The current name of a virtual folder.
NEW_NAME: The new name of a virtual folder.
'''
with Session() as session:
try:
session.VFolder(old_name).rename(new_name)
print_done('Renamed.')
except Exception as e:
print_error(e)
sys.exit(1) | def function[rename, parameter[old_name, new_name]]:
constant[Rename the given virtual folder. This operation is irreversible!
You cannot change the vfolders that are shared by other users,
and the new name must be unique among all your accessible vfolders
including the shared ones.
OLD_NAME: The current name of a virtual folder.
NEW_NAME: The new name of a virtual folder.
]
with call[name[Session], parameter[]] begin[:]
<ast.Try object at 0x7da18f8138e0> | keyword[def] identifier[rename] ( identifier[old_name] , identifier[new_name] ):
literal[string]
keyword[with] identifier[Session] () keyword[as] identifier[session] :
keyword[try] :
identifier[session] . identifier[VFolder] ( identifier[old_name] ). identifier[rename] ( identifier[new_name] )
identifier[print_done] ( literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print_error] ( identifier[e] )
identifier[sys] . identifier[exit] ( literal[int] ) | def rename(old_name, new_name):
"""Rename the given virtual folder. This operation is irreversible!
You cannot change the vfolders that are shared by other users,
and the new name must be unique among all your accessible vfolders
including the shared ones.
OLD_NAME: The current name of a virtual folder.
NEW_NAME: The new name of a virtual folder.
"""
with Session() as session:
try:
session.VFolder(old_name).rename(new_name)
print_done('Renamed.') # depends on [control=['try'], data=[]]
except Exception as e:
print_error(e)
sys.exit(1) # depends on [control=['except'], data=['e']] # depends on [control=['with'], data=['session']] |
def get_backend(tensor):
"""
Takes a correct backend (e.g. numpy backend if tensor is numpy.ndarray) for a tensor.
If needed, imports package and creates backend
"""
for framework_name, backend in _backends.items():
if backend.is_appropriate_type(tensor):
return backend
for BackendSubclass in AbstractBackend.__subclasses__():
if _debug_importing:
print('Testing for subclass of ', BackendSubclass)
if BackendSubclass.framework_name not in _backends:
# check that module was already imported. Otherwise it can't be imported
if BackendSubclass.framework_name in sys.modules:
if _debug_importing:
print('Imported backend for ', BackendSubclass.framework_name)
backend = BackendSubclass()
_backends[backend.framework_name] = backend
if backend.is_appropriate_type(tensor):
return backend
raise RuntimeError('Tensor type unknown to einops {}'.format(type(tensor))) | def function[get_backend, parameter[tensor]]:
constant[
Takes a correct backend (e.g. numpy backend if tensor is numpy.ndarray) for a tensor.
If needed, imports package and creates backend
]
for taget[tuple[[<ast.Name object at 0x7da20c6c45b0>, <ast.Name object at 0x7da20c6c7ee0>]]] in starred[call[name[_backends].items, parameter[]]] begin[:]
if call[name[backend].is_appropriate_type, parameter[name[tensor]]] begin[:]
return[name[backend]]
for taget[name[BackendSubclass]] in starred[call[name[AbstractBackend].__subclasses__, parameter[]]] begin[:]
if name[_debug_importing] begin[:]
call[name[print], parameter[constant[Testing for subclass of ], name[BackendSubclass]]]
if compare[name[BackendSubclass].framework_name <ast.NotIn object at 0x7da2590d7190> name[_backends]] begin[:]
if compare[name[BackendSubclass].framework_name in name[sys].modules] begin[:]
if name[_debug_importing] begin[:]
call[name[print], parameter[constant[Imported backend for ], name[BackendSubclass].framework_name]]
variable[backend] assign[=] call[name[BackendSubclass], parameter[]]
call[name[_backends]][name[backend].framework_name] assign[=] name[backend]
if call[name[backend].is_appropriate_type, parameter[name[tensor]]] begin[:]
return[name[backend]]
<ast.Raise object at 0x7da1b1113640> | keyword[def] identifier[get_backend] ( identifier[tensor] ):
literal[string]
keyword[for] identifier[framework_name] , identifier[backend] keyword[in] identifier[_backends] . identifier[items] ():
keyword[if] identifier[backend] . identifier[is_appropriate_type] ( identifier[tensor] ):
keyword[return] identifier[backend]
keyword[for] identifier[BackendSubclass] keyword[in] identifier[AbstractBackend] . identifier[__subclasses__] ():
keyword[if] identifier[_debug_importing] :
identifier[print] ( literal[string] , identifier[BackendSubclass] )
keyword[if] identifier[BackendSubclass] . identifier[framework_name] keyword[not] keyword[in] identifier[_backends] :
keyword[if] identifier[BackendSubclass] . identifier[framework_name] keyword[in] identifier[sys] . identifier[modules] :
keyword[if] identifier[_debug_importing] :
identifier[print] ( literal[string] , identifier[BackendSubclass] . identifier[framework_name] )
identifier[backend] = identifier[BackendSubclass] ()
identifier[_backends] [ identifier[backend] . identifier[framework_name] ]= identifier[backend]
keyword[if] identifier[backend] . identifier[is_appropriate_type] ( identifier[tensor] ):
keyword[return] identifier[backend]
keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[tensor] ))) | def get_backend(tensor):
"""
Takes a correct backend (e.g. numpy backend if tensor is numpy.ndarray) for a tensor.
If needed, imports package and creates backend
"""
for (framework_name, backend) in _backends.items():
if backend.is_appropriate_type(tensor):
return backend # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for BackendSubclass in AbstractBackend.__subclasses__():
if _debug_importing:
print('Testing for subclass of ', BackendSubclass) # depends on [control=['if'], data=[]]
if BackendSubclass.framework_name not in _backends:
# check that module was already imported. Otherwise it can't be imported
if BackendSubclass.framework_name in sys.modules:
if _debug_importing:
print('Imported backend for ', BackendSubclass.framework_name) # depends on [control=['if'], data=[]]
backend = BackendSubclass()
_backends[backend.framework_name] = backend
if backend.is_appropriate_type(tensor):
return backend # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['_backends']] # depends on [control=['for'], data=['BackendSubclass']]
raise RuntimeError('Tensor type unknown to einops {}'.format(type(tensor))) |
def newsnr_sgveto(snr, bchisq, sgchisq):
""" Combined SNR derived from NewSNR and Sine-Gaussian Chisq"""
nsnr = numpy.array(newsnr(snr, bchisq), ndmin=1)
sgchisq = numpy.array(sgchisq, ndmin=1)
t = numpy.array(sgchisq > 4, ndmin=1)
if len(t):
nsnr[t] = nsnr[t] / (sgchisq[t] / 4.0) ** 0.5
# If snr input is float, return a float. Otherwise return numpy array.
if hasattr(snr, '__len__'):
return nsnr
else:
return nsnr[0] | def function[newsnr_sgveto, parameter[snr, bchisq, sgchisq]]:
constant[ Combined SNR derived from NewSNR and Sine-Gaussian Chisq]
variable[nsnr] assign[=] call[name[numpy].array, parameter[call[name[newsnr], parameter[name[snr], name[bchisq]]]]]
variable[sgchisq] assign[=] call[name[numpy].array, parameter[name[sgchisq]]]
variable[t] assign[=] call[name[numpy].array, parameter[compare[name[sgchisq] greater[>] constant[4]]]]
if call[name[len], parameter[name[t]]] begin[:]
call[name[nsnr]][name[t]] assign[=] binary_operation[call[name[nsnr]][name[t]] / binary_operation[binary_operation[call[name[sgchisq]][name[t]] / constant[4.0]] ** constant[0.5]]]
if call[name[hasattr], parameter[name[snr], constant[__len__]]] begin[:]
return[name[nsnr]] | keyword[def] identifier[newsnr_sgveto] ( identifier[snr] , identifier[bchisq] , identifier[sgchisq] ):
literal[string]
identifier[nsnr] = identifier[numpy] . identifier[array] ( identifier[newsnr] ( identifier[snr] , identifier[bchisq] ), identifier[ndmin] = literal[int] )
identifier[sgchisq] = identifier[numpy] . identifier[array] ( identifier[sgchisq] , identifier[ndmin] = literal[int] )
identifier[t] = identifier[numpy] . identifier[array] ( identifier[sgchisq] > literal[int] , identifier[ndmin] = literal[int] )
keyword[if] identifier[len] ( identifier[t] ):
identifier[nsnr] [ identifier[t] ]= identifier[nsnr] [ identifier[t] ]/( identifier[sgchisq] [ identifier[t] ]/ literal[int] )** literal[int]
keyword[if] identifier[hasattr] ( identifier[snr] , literal[string] ):
keyword[return] identifier[nsnr]
keyword[else] :
keyword[return] identifier[nsnr] [ literal[int] ] | def newsnr_sgveto(snr, bchisq, sgchisq):
""" Combined SNR derived from NewSNR and Sine-Gaussian Chisq"""
nsnr = numpy.array(newsnr(snr, bchisq), ndmin=1)
sgchisq = numpy.array(sgchisq, ndmin=1)
t = numpy.array(sgchisq > 4, ndmin=1)
if len(t):
nsnr[t] = nsnr[t] / (sgchisq[t] / 4.0) ** 0.5 # depends on [control=['if'], data=[]]
# If snr input is float, return a float. Otherwise return numpy array.
if hasattr(snr, '__len__'):
return nsnr # depends on [control=['if'], data=[]]
else:
return nsnr[0] |
def DeleteList(self, listName):
"""Delete a List with given name"""
# Build Request
soap_request = soap('DeleteList')
soap_request.add_parameter('listName', listName)
self.last_request = str(soap_request)
# Send Request
response = self._session.post(url=self._url('Lists'),
headers=self._headers('DeleteList'),
data=str(soap_request),
verify=self._verify_ssl,
timeout=self.timeout)
# Parse Request
if response == 200:
return response.text
else:
return response | def function[DeleteList, parameter[self, listName]]:
constant[Delete a List with given name]
variable[soap_request] assign[=] call[name[soap], parameter[constant[DeleteList]]]
call[name[soap_request].add_parameter, parameter[constant[listName], name[listName]]]
name[self].last_request assign[=] call[name[str], parameter[name[soap_request]]]
variable[response] assign[=] call[name[self]._session.post, parameter[]]
if compare[name[response] equal[==] constant[200]] begin[:]
return[name[response].text] | keyword[def] identifier[DeleteList] ( identifier[self] , identifier[listName] ):
literal[string]
identifier[soap_request] = identifier[soap] ( literal[string] )
identifier[soap_request] . identifier[add_parameter] ( literal[string] , identifier[listName] )
identifier[self] . identifier[last_request] = identifier[str] ( identifier[soap_request] )
identifier[response] = identifier[self] . identifier[_session] . identifier[post] ( identifier[url] = identifier[self] . identifier[_url] ( literal[string] ),
identifier[headers] = identifier[self] . identifier[_headers] ( literal[string] ),
identifier[data] = identifier[str] ( identifier[soap_request] ),
identifier[verify] = identifier[self] . identifier[_verify_ssl] ,
identifier[timeout] = identifier[self] . identifier[timeout] )
keyword[if] identifier[response] == literal[int] :
keyword[return] identifier[response] . identifier[text]
keyword[else] :
keyword[return] identifier[response] | def DeleteList(self, listName):
"""Delete a List with given name"""
# Build Request
soap_request = soap('DeleteList')
soap_request.add_parameter('listName', listName)
self.last_request = str(soap_request)
# Send Request
response = self._session.post(url=self._url('Lists'), headers=self._headers('DeleteList'), data=str(soap_request), verify=self._verify_ssl, timeout=self.timeout)
# Parse Request
if response == 200:
return response.text # depends on [control=['if'], data=['response']]
else:
return response |
def create_oracle_cx_oracle(username, password, host, port, database, **kwargs): # pragma: no cover
"""
create an engine connected to a oracle database using cx_oracle.
"""
return create_engine(
_create_oracle_cx_oracle(username, password, host, port, database),
**kwargs
) | def function[create_oracle_cx_oracle, parameter[username, password, host, port, database]]:
constant[
create an engine connected to a oracle database using cx_oracle.
]
return[call[name[create_engine], parameter[call[name[_create_oracle_cx_oracle], parameter[name[username], name[password], name[host], name[port], name[database]]]]]] | keyword[def] identifier[create_oracle_cx_oracle] ( identifier[username] , identifier[password] , identifier[host] , identifier[port] , identifier[database] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[create_engine] (
identifier[_create_oracle_cx_oracle] ( identifier[username] , identifier[password] , identifier[host] , identifier[port] , identifier[database] ),
** identifier[kwargs]
) | def create_oracle_cx_oracle(username, password, host, port, database, **kwargs): # pragma: no cover
'\n create an engine connected to a oracle database using cx_oracle.\n '
return create_engine(_create_oracle_cx_oracle(username, password, host, port, database), **kwargs) |
def msg_callback(self, callback):
"""Set the message callback."""
if callable(callback):
self._msg_callback = callback
else:
self._msg_callback = None | def function[msg_callback, parameter[self, callback]]:
constant[Set the message callback.]
if call[name[callable], parameter[name[callback]]] begin[:]
name[self]._msg_callback assign[=] name[callback] | keyword[def] identifier[msg_callback] ( identifier[self] , identifier[callback] ):
literal[string]
keyword[if] identifier[callable] ( identifier[callback] ):
identifier[self] . identifier[_msg_callback] = identifier[callback]
keyword[else] :
identifier[self] . identifier[_msg_callback] = keyword[None] | def msg_callback(self, callback):
"""Set the message callback."""
if callable(callback):
self._msg_callback = callback # depends on [control=['if'], data=[]]
else:
self._msg_callback = None |
def pool_running(name,
ptype=None,
target=None,
permissions=None,
source=None,
transient=False,
autostart=True,
connection=None,
username=None,
password=None):
'''
Defines and starts a new pool with specified arguments.
.. versionadded:: 2019.2.0
:param ptype: libvirt pool type
:param target: full path to the target device or folder. (Default: ``None``)
:param permissions: target permissions. See the **Permissions definition**
section of the :py:func:`virt.pool_define
<salt.module.virt.pool_define>` documentation for more details on this
structure.
:param source:
dictionary containing keys matching the ``source_*`` parameters in function
:py:func:`virt.pool_define <salt.modules.virt.pool_define>`.
:param transient:
when set to ``True``, the pool will be automatically undefined after
being stopped. (Default: ``False``)
:param autostart:
Whether to start the pool when booting the host. (Default: ``True``)
:param start:
When ``True``, define and start the pool, otherwise the pool will be
left stopped.
:param connection: libvirt connection URI, overriding defaults
:param username: username to connect with, overriding defaults
:param password: password to connect with, overriding defaults
.. code-block:: yaml
pool_name:
virt.pool_define
.. code-block:: yaml
pool_name:
virt.pool_define:
- ptype: netfs
- target: /mnt/cifs
- permissions:
- mode: 0770
- owner: 1000
- group: 100
- source:
- dir: samba_share
- hosts:
one.example.com
two.example.com
- format: cifs
- autostart: True
'''
ret = {'name': name,
'changes': {},
'result': True,
'comment': ''
}
try:
info = __salt__['virt.pool_info'](name, connection=connection, username=username, password=password)
if info:
if info['state'] == 'running':
ret['comment'] = 'Pool {0} exists and is running'.format(name)
else:
__salt__['virt.pool_start'](name, connection=connection, username=username, password=password)
ret['changes'][name] = 'Pool started'
ret['comment'] = 'Pool {0} started'.format(name)
else:
__salt__['virt.pool_define'](name,
ptype=ptype,
target=target,
permissions=permissions,
source_devices=(source or {}).get('devices', None),
source_dir=(source or {}).get('dir', None),
source_adapter=(source or {}).get('adapter', None),
source_hosts=(source or {}).get('hosts', None),
source_auth=(source or {}).get('auth', None),
source_name=(source or {}).get('name', None),
source_format=(source or {}).get('format', None),
transient=transient,
start=True,
connection=connection,
username=username,
password=password)
if autostart:
__salt__['virt.pool_set_autostart'](name,
state='on' if autostart else 'off',
connection=connection,
username=username,
password=password)
__salt__['virt.pool_build'](name,
connection=connection,
username=username,
password=password)
__salt__['virt.pool_start'](name,
connection=connection,
username=username,
password=password)
ret['changes'][name] = 'Pool defined and started'
ret['comment'] = 'Pool {0} defined and started'.format(name)
except libvirt.libvirtError as err:
ret['comment'] = err.get_error_message()
ret['result'] = False
return ret | def function[pool_running, parameter[name, ptype, target, permissions, source, transient, autostart, connection, username, password]]:
constant[
Defines and starts a new pool with specified arguments.
.. versionadded:: 2019.2.0
:param ptype: libvirt pool type
:param target: full path to the target device or folder. (Default: ``None``)
:param permissions: target permissions. See the **Permissions definition**
section of the :py:func:`virt.pool_define
<salt.module.virt.pool_define>` documentation for more details on this
structure.
:param source:
dictionary containing keys matching the ``source_*`` parameters in function
:py:func:`virt.pool_define <salt.modules.virt.pool_define>`.
:param transient:
when set to ``True``, the pool will be automatically undefined after
being stopped. (Default: ``False``)
:param autostart:
Whether to start the pool when booting the host. (Default: ``True``)
:param start:
When ``True``, define and start the pool, otherwise the pool will be
left stopped.
:param connection: libvirt connection URI, overriding defaults
:param username: username to connect with, overriding defaults
:param password: password to connect with, overriding defaults
.. code-block:: yaml
pool_name:
virt.pool_define
.. code-block:: yaml
pool_name:
virt.pool_define:
- ptype: netfs
- target: /mnt/cifs
- permissions:
- mode: 0770
- owner: 1000
- group: 100
- source:
- dir: samba_share
- hosts:
one.example.com
two.example.com
- format: cifs
- autostart: True
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18f8102e0>, <ast.Constant object at 0x7da18f810fd0>, <ast.Constant object at 0x7da18f811c60>, <ast.Constant object at 0x7da18f813a00>], [<ast.Name object at 0x7da18f810280>, <ast.Dict object at 0x7da18f8124a0>, <ast.Constant object at 0x7da18f8104c0>, <ast.Constant object at 0x7da18f810400>]]
<ast.Try object at 0x7da18f812320>
return[name[ret]] | keyword[def] identifier[pool_running] ( identifier[name] ,
identifier[ptype] = keyword[None] ,
identifier[target] = keyword[None] ,
identifier[permissions] = keyword[None] ,
identifier[source] = keyword[None] ,
identifier[transient] = keyword[False] ,
identifier[autostart] = keyword[True] ,
identifier[connection] = keyword[None] ,
identifier[username] = keyword[None] ,
identifier[password] = keyword[None] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : keyword[True] ,
literal[string] : literal[string]
}
keyword[try] :
identifier[info] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[connection] = identifier[connection] , identifier[username] = identifier[username] , identifier[password] = identifier[password] )
keyword[if] identifier[info] :
keyword[if] identifier[info] [ literal[string] ]== literal[string] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[connection] = identifier[connection] , identifier[username] = identifier[username] , identifier[password] = identifier[password] )
identifier[ret] [ literal[string] ][ identifier[name] ]= literal[string]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[__salt__] [ literal[string] ]( identifier[name] ,
identifier[ptype] = identifier[ptype] ,
identifier[target] = identifier[target] ,
identifier[permissions] = identifier[permissions] ,
identifier[source_devices] =( identifier[source] keyword[or] {}). identifier[get] ( literal[string] , keyword[None] ),
identifier[source_dir] =( identifier[source] keyword[or] {}). identifier[get] ( literal[string] , keyword[None] ),
identifier[source_adapter] =( identifier[source] keyword[or] {}). identifier[get] ( literal[string] , keyword[None] ),
identifier[source_hosts] =( identifier[source] keyword[or] {}). identifier[get] ( literal[string] , keyword[None] ),
identifier[source_auth] =( identifier[source] keyword[or] {}). identifier[get] ( literal[string] , keyword[None] ),
identifier[source_name] =( identifier[source] keyword[or] {}). identifier[get] ( literal[string] , keyword[None] ),
identifier[source_format] =( identifier[source] keyword[or] {}). identifier[get] ( literal[string] , keyword[None] ),
identifier[transient] = identifier[transient] ,
identifier[start] = keyword[True] ,
identifier[connection] = identifier[connection] ,
identifier[username] = identifier[username] ,
identifier[password] = identifier[password] )
keyword[if] identifier[autostart] :
identifier[__salt__] [ literal[string] ]( identifier[name] ,
identifier[state] = literal[string] keyword[if] identifier[autostart] keyword[else] literal[string] ,
identifier[connection] = identifier[connection] ,
identifier[username] = identifier[username] ,
identifier[password] = identifier[password] )
identifier[__salt__] [ literal[string] ]( identifier[name] ,
identifier[connection] = identifier[connection] ,
identifier[username] = identifier[username] ,
identifier[password] = identifier[password] )
identifier[__salt__] [ literal[string] ]( identifier[name] ,
identifier[connection] = identifier[connection] ,
identifier[username] = identifier[username] ,
identifier[password] = identifier[password] )
identifier[ret] [ literal[string] ][ identifier[name] ]= literal[string]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[except] identifier[libvirt] . identifier[libvirtError] keyword[as] identifier[err] :
identifier[ret] [ literal[string] ]= identifier[err] . identifier[get_error_message] ()
identifier[ret] [ literal[string] ]= keyword[False]
keyword[return] identifier[ret] | def pool_running(name, ptype=None, target=None, permissions=None, source=None, transient=False, autostart=True, connection=None, username=None, password=None):
"""
Defines and starts a new pool with specified arguments.
.. versionadded:: 2019.2.0
:param ptype: libvirt pool type
:param target: full path to the target device or folder. (Default: ``None``)
:param permissions: target permissions. See the **Permissions definition**
section of the :py:func:`virt.pool_define
<salt.module.virt.pool_define>` documentation for more details on this
structure.
:param source:
dictionary containing keys matching the ``source_*`` parameters in function
:py:func:`virt.pool_define <salt.modules.virt.pool_define>`.
:param transient:
when set to ``True``, the pool will be automatically undefined after
being stopped. (Default: ``False``)
:param autostart:
Whether to start the pool when booting the host. (Default: ``True``)
:param start:
When ``True``, define and start the pool, otherwise the pool will be
left stopped.
:param connection: libvirt connection URI, overriding defaults
:param username: username to connect with, overriding defaults
:param password: password to connect with, overriding defaults
.. code-block:: yaml
pool_name:
virt.pool_define
.. code-block:: yaml
pool_name:
virt.pool_define:
- ptype: netfs
- target: /mnt/cifs
- permissions:
- mode: 0770
- owner: 1000
- group: 100
- source:
- dir: samba_share
- hosts:
one.example.com
two.example.com
- format: cifs
- autostart: True
"""
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
info = __salt__['virt.pool_info'](name, connection=connection, username=username, password=password)
if info:
if info['state'] == 'running':
ret['comment'] = 'Pool {0} exists and is running'.format(name) # depends on [control=['if'], data=[]]
else:
__salt__['virt.pool_start'](name, connection=connection, username=username, password=password)
ret['changes'][name] = 'Pool started'
ret['comment'] = 'Pool {0} started'.format(name) # depends on [control=['if'], data=[]]
else:
__salt__['virt.pool_define'](name, ptype=ptype, target=target, permissions=permissions, source_devices=(source or {}).get('devices', None), source_dir=(source or {}).get('dir', None), source_adapter=(source or {}).get('adapter', None), source_hosts=(source or {}).get('hosts', None), source_auth=(source or {}).get('auth', None), source_name=(source or {}).get('name', None), source_format=(source or {}).get('format', None), transient=transient, start=True, connection=connection, username=username, password=password)
if autostart:
__salt__['virt.pool_set_autostart'](name, state='on' if autostart else 'off', connection=connection, username=username, password=password) # depends on [control=['if'], data=[]]
__salt__['virt.pool_build'](name, connection=connection, username=username, password=password)
__salt__['virt.pool_start'](name, connection=connection, username=username, password=password)
ret['changes'][name] = 'Pool defined and started'
ret['comment'] = 'Pool {0} defined and started'.format(name) # depends on [control=['try'], data=[]]
except libvirt.libvirtError as err:
ret['comment'] = err.get_error_message()
ret['result'] = False # depends on [control=['except'], data=['err']]
return ret |
def config(config, skip_defaults):
"""
Generates configuration file from config specifications
"""
configurator = ClickConfigurator(
vodka.plugin,
skip_defaults=skip_defaults
)
configurator.configure(vodka.config.instance, vodka.config.InstanceHandler)
try:
dst = munge_config.parse_url(config)
except ValueError:
config = os.path.join(config, "config.yaml")
dst = munge_config.parse_url(config)
config_dir = os.path.dirname(config)
if not os.path.exists(config_dir) and config_dir:
os.makedirs(config_dir)
dst.cls().dumpu(vodka.config.instance, dst.url.path)
if configurator.action_required:
click.echo("")
click.echo("not all required values could be set by this script, please manually edit the config and set the following values")
click.echo("")
for item in configurator.action_required:
click.echo("- %s" % item)
click.echo("")
click.echo("Config written to %s" % dst.url.path) | def function[config, parameter[config, skip_defaults]]:
constant[
Generates configuration file from config specifications
]
variable[configurator] assign[=] call[name[ClickConfigurator], parameter[name[vodka].plugin]]
call[name[configurator].configure, parameter[name[vodka].config.instance, name[vodka].config.InstanceHandler]]
<ast.Try object at 0x7da1b0a84190>
variable[config_dir] assign[=] call[name[os].path.dirname, parameter[name[config]]]
if <ast.BoolOp object at 0x7da1b0a86650> begin[:]
call[name[os].makedirs, parameter[name[config_dir]]]
call[call[name[dst].cls, parameter[]].dumpu, parameter[name[vodka].config.instance, name[dst].url.path]]
if name[configurator].action_required begin[:]
call[name[click].echo, parameter[constant[]]]
call[name[click].echo, parameter[constant[not all required values could be set by this script, please manually edit the config and set the following values]]]
call[name[click].echo, parameter[constant[]]]
for taget[name[item]] in starred[name[configurator].action_required] begin[:]
call[name[click].echo, parameter[binary_operation[constant[- %s] <ast.Mod object at 0x7da2590d6920> name[item]]]]
call[name[click].echo, parameter[constant[]]]
call[name[click].echo, parameter[binary_operation[constant[Config written to %s] <ast.Mod object at 0x7da2590d6920> name[dst].url.path]]] | keyword[def] identifier[config] ( identifier[config] , identifier[skip_defaults] ):
literal[string]
identifier[configurator] = identifier[ClickConfigurator] (
identifier[vodka] . identifier[plugin] ,
identifier[skip_defaults] = identifier[skip_defaults]
)
identifier[configurator] . identifier[configure] ( identifier[vodka] . identifier[config] . identifier[instance] , identifier[vodka] . identifier[config] . identifier[InstanceHandler] )
keyword[try] :
identifier[dst] = identifier[munge_config] . identifier[parse_url] ( identifier[config] )
keyword[except] identifier[ValueError] :
identifier[config] = identifier[os] . identifier[path] . identifier[join] ( identifier[config] , literal[string] )
identifier[dst] = identifier[munge_config] . identifier[parse_url] ( identifier[config] )
identifier[config_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[config] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[config_dir] ) keyword[and] identifier[config_dir] :
identifier[os] . identifier[makedirs] ( identifier[config_dir] )
identifier[dst] . identifier[cls] (). identifier[dumpu] ( identifier[vodka] . identifier[config] . identifier[instance] , identifier[dst] . identifier[url] . identifier[path] )
keyword[if] identifier[configurator] . identifier[action_required] :
identifier[click] . identifier[echo] ( literal[string] )
identifier[click] . identifier[echo] ( literal[string] )
identifier[click] . identifier[echo] ( literal[string] )
keyword[for] identifier[item] keyword[in] identifier[configurator] . identifier[action_required] :
identifier[click] . identifier[echo] ( literal[string] % identifier[item] )
identifier[click] . identifier[echo] ( literal[string] )
identifier[click] . identifier[echo] ( literal[string] % identifier[dst] . identifier[url] . identifier[path] ) | def config(config, skip_defaults):
"""
Generates configuration file from config specifications
"""
configurator = ClickConfigurator(vodka.plugin, skip_defaults=skip_defaults)
configurator.configure(vodka.config.instance, vodka.config.InstanceHandler)
try:
dst = munge_config.parse_url(config) # depends on [control=['try'], data=[]]
except ValueError:
config = os.path.join(config, 'config.yaml')
dst = munge_config.parse_url(config) # depends on [control=['except'], data=[]]
config_dir = os.path.dirname(config)
if not os.path.exists(config_dir) and config_dir:
os.makedirs(config_dir) # depends on [control=['if'], data=[]]
dst.cls().dumpu(vodka.config.instance, dst.url.path)
if configurator.action_required:
click.echo('')
click.echo('not all required values could be set by this script, please manually edit the config and set the following values')
click.echo('')
for item in configurator.action_required:
click.echo('- %s' % item) # depends on [control=['for'], data=['item']]
click.echo('') # depends on [control=['if'], data=[]]
click.echo('Config written to %s' % dst.url.path) |
def get_related(self):
'''
get ore:aggregates for this resource, optionally retrieving resource payload
Args:
retrieve (bool): if True, issue .refresh() on resource thereby confirming existence and retrieving payload
'''
if self.exists and hasattr(self.rdf.triples, 'ore') and hasattr(self.rdf.triples.ore, 'aggregates'):
related = [ self.repo.parse_uri(uri) for uri in self.rdf.triples.ore.aggregates ]
# return
return related
else:
return [] | def function[get_related, parameter[self]]:
constant[
get ore:aggregates for this resource, optionally retrieving resource payload
Args:
retrieve (bool): if True, issue .refresh() on resource thereby confirming existence and retrieving payload
]
if <ast.BoolOp object at 0x7da1b24e2680> begin[:]
variable[related] assign[=] <ast.ListComp object at 0x7da1b2353d90>
return[name[related]] | keyword[def] identifier[get_related] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[exists] keyword[and] identifier[hasattr] ( identifier[self] . identifier[rdf] . identifier[triples] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[self] . identifier[rdf] . identifier[triples] . identifier[ore] , literal[string] ):
identifier[related] =[ identifier[self] . identifier[repo] . identifier[parse_uri] ( identifier[uri] ) keyword[for] identifier[uri] keyword[in] identifier[self] . identifier[rdf] . identifier[triples] . identifier[ore] . identifier[aggregates] ]
keyword[return] identifier[related]
keyword[else] :
keyword[return] [] | def get_related(self):
"""
get ore:aggregates for this resource, optionally retrieving resource payload
Args:
retrieve (bool): if True, issue .refresh() on resource thereby confirming existence and retrieving payload
"""
if self.exists and hasattr(self.rdf.triples, 'ore') and hasattr(self.rdf.triples.ore, 'aggregates'):
related = [self.repo.parse_uri(uri) for uri in self.rdf.triples.ore.aggregates] # return
return related # depends on [control=['if'], data=[]]
else:
return [] |
def __send_command(self, command, args=[]):
'''Send a raw command.'''
self.ws.send(json.dumps({"op": command, "args": args})) | def function[__send_command, parameter[self, command, args]]:
constant[Send a raw command.]
call[name[self].ws.send, parameter[call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da20e954f70>, <ast.Constant object at 0x7da20e956bf0>], [<ast.Name object at 0x7da20e9575e0>, <ast.Name object at 0x7da20e9568c0>]]]]]] | keyword[def] identifier[__send_command] ( identifier[self] , identifier[command] , identifier[args] =[]):
literal[string]
identifier[self] . identifier[ws] . identifier[send] ( identifier[json] . identifier[dumps] ({ literal[string] : identifier[command] , literal[string] : identifier[args] })) | def __send_command(self, command, args=[]):
"""Send a raw command."""
self.ws.send(json.dumps({'op': command, 'args': args})) |
def parse_definite_clause(s):
"Return the antecedents and the consequent of a definite clause."
assert is_definite_clause(s)
if is_symbol(s.op):
return [], s
else:
antecedent, consequent = s.args
return conjuncts(antecedent), consequent | def function[parse_definite_clause, parameter[s]]:
constant[Return the antecedents and the consequent of a definite clause.]
assert[call[name[is_definite_clause], parameter[name[s]]]]
if call[name[is_symbol], parameter[name[s].op]] begin[:]
return[tuple[[<ast.List object at 0x7da207f00190>, <ast.Name object at 0x7da207f01c90>]]] | keyword[def] identifier[parse_definite_clause] ( identifier[s] ):
literal[string]
keyword[assert] identifier[is_definite_clause] ( identifier[s] )
keyword[if] identifier[is_symbol] ( identifier[s] . identifier[op] ):
keyword[return] [], identifier[s]
keyword[else] :
identifier[antecedent] , identifier[consequent] = identifier[s] . identifier[args]
keyword[return] identifier[conjuncts] ( identifier[antecedent] ), identifier[consequent] | def parse_definite_clause(s):
"""Return the antecedents and the consequent of a definite clause."""
assert is_definite_clause(s)
if is_symbol(s.op):
return ([], s) # depends on [control=['if'], data=[]]
else:
(antecedent, consequent) = s.args
return (conjuncts(antecedent), consequent) |
def get_config_directory(appname):
"""
Get OS-specific configuration directory.
:type appname: str
:arg appname: capitalized name of the application
"""
if platform.system().lower() == 'windows':
path = os.path.join(os.getenv('APPDATA') or '~', appname, appname)
elif platform.system().lower() == 'darwin':
path = os.path.join('~', 'Library', 'Application Support', appname)
else:
path = os.path.join(os.getenv('XDG_CONFIG_HOME') or '~/.config',
appname.lower())
return os.path.expanduser(path) | def function[get_config_directory, parameter[appname]]:
constant[
Get OS-specific configuration directory.
:type appname: str
:arg appname: capitalized name of the application
]
if compare[call[call[name[platform].system, parameter[]].lower, parameter[]] equal[==] constant[windows]] begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[<ast.BoolOp object at 0x7da1b0b72860>, name[appname], name[appname]]]
return[call[name[os].path.expanduser, parameter[name[path]]]] | keyword[def] identifier[get_config_directory] ( identifier[appname] ):
literal[string]
keyword[if] identifier[platform] . identifier[system] (). identifier[lower] ()== literal[string] :
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getenv] ( literal[string] ) keyword[or] literal[string] , identifier[appname] , identifier[appname] )
keyword[elif] identifier[platform] . identifier[system] (). identifier[lower] ()== literal[string] :
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] , identifier[appname] )
keyword[else] :
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getenv] ( literal[string] ) keyword[or] literal[string] ,
identifier[appname] . identifier[lower] ())
keyword[return] identifier[os] . identifier[path] . identifier[expanduser] ( identifier[path] ) | def get_config_directory(appname):
"""
Get OS-specific configuration directory.
:type appname: str
:arg appname: capitalized name of the application
"""
if platform.system().lower() == 'windows':
path = os.path.join(os.getenv('APPDATA') or '~', appname, appname) # depends on [control=['if'], data=[]]
elif platform.system().lower() == 'darwin':
path = os.path.join('~', 'Library', 'Application Support', appname) # depends on [control=['if'], data=[]]
else:
path = os.path.join(os.getenv('XDG_CONFIG_HOME') or '~/.config', appname.lower())
return os.path.expanduser(path) |
def apply_T1(word):
'''There is a syllable boundary in front of every CV-sequence.'''
# split consonants and vowels: 'balloon' -> ['b', 'a', 'll', 'oo', 'n']
WORD = [w for w in re.split('([ieAyOauo]+)', word) if w]
count = 0
for i, v in enumerate(WORD):
if i == 0 and is_consonant(v[0]):
continue
elif is_consonant(v[0]) and i + 1 != len(WORD):
if is_cluster(v): # WSP
if count % 2 == 0:
WORD[i] = v[0] + '.' + v[1:] # CC > C.C, CCC > C.CC
else:
WORD[i] = '.' + v # CC > .CC, CCC > .CCC
# elif is_sonorant(v[0]) and is_cluster(v[1:]): # NEW
# if count % 2 == 0:
# WORD[i] = v[0:2] + '.' + v[2:]
# else:
# WORD[i] = v[0] + '.' + v[1:]
else:
WORD[i] = v[:-1] + '.' + v[-1] # CC > C.C, CCC > CC.C
count += 1
WORD = ''.join(WORD)
RULE = ' T1' if word != WORD else ''
return WORD, RULE | def function[apply_T1, parameter[word]]:
constant[There is a syllable boundary in front of every CV-sequence.]
variable[WORD] assign[=] <ast.ListComp object at 0x7da1b11a6320>
variable[count] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b11a7d00>, <ast.Name object at 0x7da1b11a4100>]]] in starred[call[name[enumerate], parameter[name[WORD]]]] begin[:]
if <ast.BoolOp object at 0x7da1b11a4d00> begin[:]
continue
variable[WORD] assign[=] call[constant[].join, parameter[name[WORD]]]
variable[RULE] assign[=] <ast.IfExp object at 0x7da1b11a49a0>
return[tuple[[<ast.Name object at 0x7da1b11a6200>, <ast.Name object at 0x7da1b11a5ff0>]]] | keyword[def] identifier[apply_T1] ( identifier[word] ):
literal[string]
identifier[WORD] =[ identifier[w] keyword[for] identifier[w] keyword[in] identifier[re] . identifier[split] ( literal[string] , identifier[word] ) keyword[if] identifier[w] ]
identifier[count] = literal[int]
keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[WORD] ):
keyword[if] identifier[i] == literal[int] keyword[and] identifier[is_consonant] ( identifier[v] [ literal[int] ]):
keyword[continue]
keyword[elif] identifier[is_consonant] ( identifier[v] [ literal[int] ]) keyword[and] identifier[i] + literal[int] != identifier[len] ( identifier[WORD] ):
keyword[if] identifier[is_cluster] ( identifier[v] ):
keyword[if] identifier[count] % literal[int] == literal[int] :
identifier[WORD] [ identifier[i] ]= identifier[v] [ literal[int] ]+ literal[string] + identifier[v] [ literal[int] :]
keyword[else] :
identifier[WORD] [ identifier[i] ]= literal[string] + identifier[v]
keyword[else] :
identifier[WORD] [ identifier[i] ]= identifier[v] [:- literal[int] ]+ literal[string] + identifier[v] [- literal[int] ]
identifier[count] += literal[int]
identifier[WORD] = literal[string] . identifier[join] ( identifier[WORD] )
identifier[RULE] = literal[string] keyword[if] identifier[word] != identifier[WORD] keyword[else] literal[string]
keyword[return] identifier[WORD] , identifier[RULE] | def apply_T1(word):
"""There is a syllable boundary in front of every CV-sequence."""
# split consonants and vowels: 'balloon' -> ['b', 'a', 'll', 'oo', 'n']
WORD = [w for w in re.split('([ieAyOauo]+)', word) if w]
count = 0
for (i, v) in enumerate(WORD):
if i == 0 and is_consonant(v[0]):
continue # depends on [control=['if'], data=[]]
elif is_consonant(v[0]) and i + 1 != len(WORD):
if is_cluster(v): # WSP
if count % 2 == 0:
WORD[i] = v[0] + '.' + v[1:] # CC > C.C, CCC > C.CC # depends on [control=['if'], data=[]]
else:
WORD[i] = '.' + v # CC > .CC, CCC > .CCC # depends on [control=['if'], data=[]]
else:
# elif is_sonorant(v[0]) and is_cluster(v[1:]): # NEW
# if count % 2 == 0:
# WORD[i] = v[0:2] + '.' + v[2:]
# else:
# WORD[i] = v[0] + '.' + v[1:]
WORD[i] = v[:-1] + '.' + v[-1] # CC > C.C, CCC > CC.C
count += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
WORD = ''.join(WORD)
RULE = ' T1' if word != WORD else ''
return (WORD, RULE) |
def _print_summary(case, summary):
""" Show some statistics from the run """
for dof, data in summary.items():
b4b = data["Bit for Bit"]
conf = data["Configurations"]
stdout = data["Std. Out Files"]
print(" " + case + " " + str(dof))
print(" --------------------")
print(" Bit for bit matches : " + str(b4b[0]) + " of " + str(b4b[1]))
print(" Configuration matches : " + str(conf[0]) + " of " + str(conf[1]))
print(" Std. Out files parsed : " + str(stdout))
print("") | def function[_print_summary, parameter[case, summary]]:
constant[ Show some statistics from the run ]
for taget[tuple[[<ast.Name object at 0x7da18f09f3d0>, <ast.Name object at 0x7da18f09c040>]]] in starred[call[name[summary].items, parameter[]]] begin[:]
variable[b4b] assign[=] call[name[data]][constant[Bit for Bit]]
variable[conf] assign[=] call[name[data]][constant[Configurations]]
variable[stdout] assign[=] call[name[data]][constant[Std. Out Files]]
call[name[print], parameter[binary_operation[binary_operation[binary_operation[constant[ ] + name[case]] + constant[ ]] + call[name[str], parameter[name[dof]]]]]]
call[name[print], parameter[constant[ --------------------]]]
call[name[print], parameter[binary_operation[binary_operation[binary_operation[constant[ Bit for bit matches : ] + call[name[str], parameter[call[name[b4b]][constant[0]]]]] + constant[ of ]] + call[name[str], parameter[call[name[b4b]][constant[1]]]]]]]
call[name[print], parameter[binary_operation[binary_operation[binary_operation[constant[ Configuration matches : ] + call[name[str], parameter[call[name[conf]][constant[0]]]]] + constant[ of ]] + call[name[str], parameter[call[name[conf]][constant[1]]]]]]]
call[name[print], parameter[binary_operation[constant[ Std. Out files parsed : ] + call[name[str], parameter[name[stdout]]]]]]
call[name[print], parameter[constant[]]] | keyword[def] identifier[_print_summary] ( identifier[case] , identifier[summary] ):
literal[string]
keyword[for] identifier[dof] , identifier[data] keyword[in] identifier[summary] . identifier[items] ():
identifier[b4b] = identifier[data] [ literal[string] ]
identifier[conf] = identifier[data] [ literal[string] ]
identifier[stdout] = identifier[data] [ literal[string] ]
identifier[print] ( literal[string] + identifier[case] + literal[string] + identifier[str] ( identifier[dof] ))
identifier[print] ( literal[string] )
identifier[print] ( literal[string] + identifier[str] ( identifier[b4b] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[b4b] [ literal[int] ]))
identifier[print] ( literal[string] + identifier[str] ( identifier[conf] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[conf] [ literal[int] ]))
identifier[print] ( literal[string] + identifier[str] ( identifier[stdout] ))
identifier[print] ( literal[string] ) | def _print_summary(case, summary):
""" Show some statistics from the run """
for (dof, data) in summary.items():
b4b = data['Bit for Bit']
conf = data['Configurations']
stdout = data['Std. Out Files']
print(' ' + case + ' ' + str(dof))
print(' --------------------')
print(' Bit for bit matches : ' + str(b4b[0]) + ' of ' + str(b4b[1]))
print(' Configuration matches : ' + str(conf[0]) + ' of ' + str(conf[1]))
print(' Std. Out files parsed : ' + str(stdout))
print('') # depends on [control=['for'], data=[]] |
def gelu(x):
"""An approximation of gelu.
See: https://arxiv.org/pdf/1606.08415.pdf
"""
return 0.5 * x * (1.0 + K.tanh(math.sqrt(2.0 / math.pi) * (x + 0.044715 * K.pow(x, 3)))) | def function[gelu, parameter[x]]:
constant[An approximation of gelu.
See: https://arxiv.org/pdf/1606.08415.pdf
]
return[binary_operation[binary_operation[constant[0.5] * name[x]] * binary_operation[constant[1.0] + call[name[K].tanh, parameter[binary_operation[call[name[math].sqrt, parameter[binary_operation[constant[2.0] / name[math].pi]]] * binary_operation[name[x] + binary_operation[constant[0.044715] * call[name[K].pow, parameter[name[x], constant[3]]]]]]]]]]] | keyword[def] identifier[gelu] ( identifier[x] ):
literal[string]
keyword[return] literal[int] * identifier[x] *( literal[int] + identifier[K] . identifier[tanh] ( identifier[math] . identifier[sqrt] ( literal[int] / identifier[math] . identifier[pi] )*( identifier[x] + literal[int] * identifier[K] . identifier[pow] ( identifier[x] , literal[int] )))) | def gelu(x):
"""An approximation of gelu.
See: https://arxiv.org/pdf/1606.08415.pdf
"""
return 0.5 * x * (1.0 + K.tanh(math.sqrt(2.0 / math.pi) * (x + 0.044715 * K.pow(x, 3)))) |
def _sim_prediction(self, lmda, Y, scores, h, t_params, simulations):
""" Simulates a h-step ahead mean prediction
Parameters
----------
lmda : np.array
The past predicted values
Y : np.array
The past data
scores : np.array
The past scores
h : int
How many steps ahead for the prediction
t_params : np.array
A vector of (transformed) latent variables
simulations : int
How many simulations to perform
Returns
----------
Matrix of simulations
"""
sim_vector = np.zeros([simulations,h])
for n in range(0,simulations):
# Create arrays to iteratre over
lmda_exp = lmda.copy()
scores_exp = scores.copy()
Y_exp = Y.copy()
# Loop over h time periods
for t in range(0,h):
new_value = t_params[0]
if self.p != 0:
for j in range(1,self.p+1):
new_value += t_params[j]*lmda_exp[-j]
if self.q != 0:
for k in range(1,self.q+1):
new_value += t_params[k+self.p]*scores_exp[-k]
if self.leverage is True:
new_value += t_params[1+self.p+self.q]*np.sign(-(Y_exp[-1]-t_params[-2]-t_params[-1]*np.exp(lmda_exp[-1]/2.0)))*(scores_exp[-1]+1)
lmda_exp = np.append(lmda_exp,[new_value]) # For indexing consistency
scores_exp = np.append(scores_exp,scores[np.random.randint(scores.shape[0])]) # expectation of score is zero
Y_exp = np.append(Y_exp,Y[np.random.randint(Y.shape[0])]) # bootstrap returns
sim_vector[n] = lmda_exp[-h:]
return np.transpose(sim_vector) | def function[_sim_prediction, parameter[self, lmda, Y, scores, h, t_params, simulations]]:
constant[ Simulates a h-step ahead mean prediction
Parameters
----------
lmda : np.array
The past predicted values
Y : np.array
The past data
scores : np.array
The past scores
h : int
How many steps ahead for the prediction
t_params : np.array
A vector of (transformed) latent variables
simulations : int
How many simulations to perform
Returns
----------
Matrix of simulations
]
variable[sim_vector] assign[=] call[name[np].zeros, parameter[list[[<ast.Name object at 0x7da204347d00>, <ast.Name object at 0x7da204345150>]]]]
for taget[name[n]] in starred[call[name[range], parameter[constant[0], name[simulations]]]] begin[:]
variable[lmda_exp] assign[=] call[name[lmda].copy, parameter[]]
variable[scores_exp] assign[=] call[name[scores].copy, parameter[]]
variable[Y_exp] assign[=] call[name[Y].copy, parameter[]]
for taget[name[t]] in starred[call[name[range], parameter[constant[0], name[h]]]] begin[:]
variable[new_value] assign[=] call[name[t_params]][constant[0]]
if compare[name[self].p not_equal[!=] constant[0]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[constant[1], binary_operation[name[self].p + constant[1]]]]] begin[:]
<ast.AugAssign object at 0x7da204345960>
if compare[name[self].q not_equal[!=] constant[0]] begin[:]
for taget[name[k]] in starred[call[name[range], parameter[constant[1], binary_operation[name[self].q + constant[1]]]]] begin[:]
<ast.AugAssign object at 0x7da204345360>
if compare[name[self].leverage is constant[True]] begin[:]
<ast.AugAssign object at 0x7da204345b10>
variable[lmda_exp] assign[=] call[name[np].append, parameter[name[lmda_exp], list[[<ast.Name object at 0x7da18f58ed10>]]]]
variable[scores_exp] assign[=] call[name[np].append, parameter[name[scores_exp], call[name[scores]][call[name[np].random.randint, parameter[call[name[scores].shape][constant[0]]]]]]]
variable[Y_exp] assign[=] call[name[np].append, parameter[name[Y_exp], call[name[Y]][call[name[np].random.randint, parameter[call[name[Y].shape][constant[0]]]]]]]
call[name[sim_vector]][name[n]] assign[=] call[name[lmda_exp]][<ast.Slice object at 0x7da18f58c670>]
return[call[name[np].transpose, parameter[name[sim_vector]]]] | keyword[def] identifier[_sim_prediction] ( identifier[self] , identifier[lmda] , identifier[Y] , identifier[scores] , identifier[h] , identifier[t_params] , identifier[simulations] ):
literal[string]
identifier[sim_vector] = identifier[np] . identifier[zeros] ([ identifier[simulations] , identifier[h] ])
keyword[for] identifier[n] keyword[in] identifier[range] ( literal[int] , identifier[simulations] ):
identifier[lmda_exp] = identifier[lmda] . identifier[copy] ()
identifier[scores_exp] = identifier[scores] . identifier[copy] ()
identifier[Y_exp] = identifier[Y] . identifier[copy] ()
keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[h] ):
identifier[new_value] = identifier[t_params] [ literal[int] ]
keyword[if] identifier[self] . identifier[p] != literal[int] :
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[p] + literal[int] ):
identifier[new_value] += identifier[t_params] [ identifier[j] ]* identifier[lmda_exp] [- identifier[j] ]
keyword[if] identifier[self] . identifier[q] != literal[int] :
keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[q] + literal[int] ):
identifier[new_value] += identifier[t_params] [ identifier[k] + identifier[self] . identifier[p] ]* identifier[scores_exp] [- identifier[k] ]
keyword[if] identifier[self] . identifier[leverage] keyword[is] keyword[True] :
identifier[new_value] += identifier[t_params] [ literal[int] + identifier[self] . identifier[p] + identifier[self] . identifier[q] ]* identifier[np] . identifier[sign] (-( identifier[Y_exp] [- literal[int] ]- identifier[t_params] [- literal[int] ]- identifier[t_params] [- literal[int] ]* identifier[np] . identifier[exp] ( identifier[lmda_exp] [- literal[int] ]/ literal[int] )))*( identifier[scores_exp] [- literal[int] ]+ literal[int] )
identifier[lmda_exp] = identifier[np] . identifier[append] ( identifier[lmda_exp] ,[ identifier[new_value] ])
identifier[scores_exp] = identifier[np] . identifier[append] ( identifier[scores_exp] , identifier[scores] [ identifier[np] . identifier[random] . identifier[randint] ( identifier[scores] . identifier[shape] [ literal[int] ])])
identifier[Y_exp] = identifier[np] . identifier[append] ( identifier[Y_exp] , identifier[Y] [ identifier[np] . identifier[random] . identifier[randint] ( identifier[Y] . identifier[shape] [ literal[int] ])])
identifier[sim_vector] [ identifier[n] ]= identifier[lmda_exp] [- identifier[h] :]
keyword[return] identifier[np] . identifier[transpose] ( identifier[sim_vector] ) | def _sim_prediction(self, lmda, Y, scores, h, t_params, simulations):
""" Simulates a h-step ahead mean prediction
Parameters
----------
lmda : np.array
The past predicted values
Y : np.array
The past data
scores : np.array
The past scores
h : int
How many steps ahead for the prediction
t_params : np.array
A vector of (transformed) latent variables
simulations : int
How many simulations to perform
Returns
----------
Matrix of simulations
"""
sim_vector = np.zeros([simulations, h])
for n in range(0, simulations): # Create arrays to iteratre over
lmda_exp = lmda.copy()
scores_exp = scores.copy()
Y_exp = Y.copy() # Loop over h time periods
for t in range(0, h):
new_value = t_params[0]
if self.p != 0:
for j in range(1, self.p + 1):
new_value += t_params[j] * lmda_exp[-j] # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]]
if self.q != 0:
for k in range(1, self.q + 1):
new_value += t_params[k + self.p] * scores_exp[-k] # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
if self.leverage is True:
new_value += t_params[1 + self.p + self.q] * np.sign(-(Y_exp[-1] - t_params[-2] - t_params[-1] * np.exp(lmda_exp[-1] / 2.0))) * (scores_exp[-1] + 1) # depends on [control=['if'], data=[]]
lmda_exp = np.append(lmda_exp, [new_value]) # For indexing consistency
scores_exp = np.append(scores_exp, scores[np.random.randint(scores.shape[0])]) # expectation of score is zero
Y_exp = np.append(Y_exp, Y[np.random.randint(Y.shape[0])]) # bootstrap returns # depends on [control=['for'], data=[]]
sim_vector[n] = lmda_exp[-h:] # depends on [control=['for'], data=['n']]
return np.transpose(sim_vector) |
def get_stp_mst_detail_output_cist_port_edge_delay(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_stp_mst_detail = ET.Element("get_stp_mst_detail")
config = get_stp_mst_detail
output = ET.SubElement(get_stp_mst_detail, "output")
cist = ET.SubElement(output, "cist")
port = ET.SubElement(cist, "port")
edge_delay = ET.SubElement(port, "edge-delay")
edge_delay.text = kwargs.pop('edge_delay')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_stp_mst_detail_output_cist_port_edge_delay, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_stp_mst_detail] assign[=] call[name[ET].Element, parameter[constant[get_stp_mst_detail]]]
variable[config] assign[=] name[get_stp_mst_detail]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_stp_mst_detail], constant[output]]]
variable[cist] assign[=] call[name[ET].SubElement, parameter[name[output], constant[cist]]]
variable[port] assign[=] call[name[ET].SubElement, parameter[name[cist], constant[port]]]
variable[edge_delay] assign[=] call[name[ET].SubElement, parameter[name[port], constant[edge-delay]]]
name[edge_delay].text assign[=] call[name[kwargs].pop, parameter[constant[edge_delay]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_stp_mst_detail_output_cist_port_edge_delay] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_stp_mst_detail] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_stp_mst_detail]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_stp_mst_detail] , literal[string] )
identifier[cist] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[port] = identifier[ET] . identifier[SubElement] ( identifier[cist] , literal[string] )
identifier[edge_delay] = identifier[ET] . identifier[SubElement] ( identifier[port] , literal[string] )
identifier[edge_delay] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_stp_mst_detail_output_cist_port_edge_delay(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_stp_mst_detail = ET.Element('get_stp_mst_detail')
config = get_stp_mst_detail
output = ET.SubElement(get_stp_mst_detail, 'output')
cist = ET.SubElement(output, 'cist')
port = ET.SubElement(cist, 'port')
edge_delay = ET.SubElement(port, 'edge-delay')
edge_delay.text = kwargs.pop('edge_delay')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def monthly(self):
"""
Access the monthly
:returns: twilio.rest.api.v2010.account.usage.record.monthly.MonthlyList
:rtype: twilio.rest.api.v2010.account.usage.record.monthly.MonthlyList
"""
if self._monthly is None:
self._monthly = MonthlyList(self._version, account_sid=self._solution['account_sid'], )
return self._monthly | def function[monthly, parameter[self]]:
constant[
Access the monthly
:returns: twilio.rest.api.v2010.account.usage.record.monthly.MonthlyList
:rtype: twilio.rest.api.v2010.account.usage.record.monthly.MonthlyList
]
if compare[name[self]._monthly is constant[None]] begin[:]
name[self]._monthly assign[=] call[name[MonthlyList], parameter[name[self]._version]]
return[name[self]._monthly] | keyword[def] identifier[monthly] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_monthly] keyword[is] keyword[None] :
identifier[self] . identifier[_monthly] = identifier[MonthlyList] ( identifier[self] . identifier[_version] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
keyword[return] identifier[self] . identifier[_monthly] | def monthly(self):
"""
Access the monthly
:returns: twilio.rest.api.v2010.account.usage.record.monthly.MonthlyList
:rtype: twilio.rest.api.v2010.account.usage.record.monthly.MonthlyList
"""
if self._monthly is None:
self._monthly = MonthlyList(self._version, account_sid=self._solution['account_sid']) # depends on [control=['if'], data=[]]
return self._monthly |
def _uncythonized_model(self, beta):
""" Creates the structure of the model
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
theta : np.array
Contains the predicted values for the time series
Y : np.array
Contains the length-adjusted time series (accounting for lags)
scores : np.array
Contains the scores for the time series
"""
parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])])
theta = np.zeros(self.model_Y.shape[0])
model_scale, model_shape, model_skewness = self._get_scale_and_shape(parm)
# Loop over time series
theta, self.model_scores = gas_llev_recursion(parm, theta, self.model_scores, self.model_Y, self.model_Y.shape[0],
self.family.score_function, self.link, model_scale, model_shape, model_skewness, self.max_lag)
return theta, self.model_Y, self.model_scores | def function[_uncythonized_model, parameter[self, beta]]:
constant[ Creates the structure of the model
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
theta : np.array
Contains the predicted values for the time series
Y : np.array
Contains the length-adjusted time series (accounting for lags)
scores : np.array
Contains the scores for the time series
]
variable[parm] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20cabd180>]]
variable[theta] assign[=] call[name[np].zeros, parameter[call[name[self].model_Y.shape][constant[0]]]]
<ast.Tuple object at 0x7da204620100> assign[=] call[name[self]._get_scale_and_shape, parameter[name[parm]]]
<ast.Tuple object at 0x7da2046236a0> assign[=] call[name[gas_llev_recursion], parameter[name[parm], name[theta], name[self].model_scores, name[self].model_Y, call[name[self].model_Y.shape][constant[0]], name[self].family.score_function, name[self].link, name[model_scale], name[model_shape], name[model_skewness], name[self].max_lag]]
return[tuple[[<ast.Name object at 0x7da18dc04d30>, <ast.Attribute object at 0x7da18dc05ab0>, <ast.Attribute object at 0x7da18dc064a0>]]] | keyword[def] identifier[_uncythonized_model] ( identifier[self] , identifier[beta] ):
literal[string]
identifier[parm] = identifier[np] . identifier[array] ([ identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[transform] ( identifier[beta] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[beta] . identifier[shape] [ literal[int] ])])
identifier[theta] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[model_Y] . identifier[shape] [ literal[int] ])
identifier[model_scale] , identifier[model_shape] , identifier[model_skewness] = identifier[self] . identifier[_get_scale_and_shape] ( identifier[parm] )
identifier[theta] , identifier[self] . identifier[model_scores] = identifier[gas_llev_recursion] ( identifier[parm] , identifier[theta] , identifier[self] . identifier[model_scores] , identifier[self] . identifier[model_Y] , identifier[self] . identifier[model_Y] . identifier[shape] [ literal[int] ],
identifier[self] . identifier[family] . identifier[score_function] , identifier[self] . identifier[link] , identifier[model_scale] , identifier[model_shape] , identifier[model_skewness] , identifier[self] . identifier[max_lag] )
keyword[return] identifier[theta] , identifier[self] . identifier[model_Y] , identifier[self] . identifier[model_scores] | def _uncythonized_model(self, beta):
""" Creates the structure of the model
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
theta : np.array
Contains the predicted values for the time series
Y : np.array
Contains the length-adjusted time series (accounting for lags)
scores : np.array
Contains the scores for the time series
"""
parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])])
theta = np.zeros(self.model_Y.shape[0])
(model_scale, model_shape, model_skewness) = self._get_scale_and_shape(parm)
# Loop over time series
(theta, self.model_scores) = gas_llev_recursion(parm, theta, self.model_scores, self.model_Y, self.model_Y.shape[0], self.family.score_function, self.link, model_scale, model_shape, model_skewness, self.max_lag)
return (theta, self.model_Y, self.model_scores) |
def link(self, source_key, target_key):
'''Creates a symbolic link key pointing from `target_key` to `source_key`'''
link_value = self._link_value_for_key(source_key)
# put straight into the child, to avoid following previous links.
self.child_datastore.put(target_key, link_value)
# exercise the link. ensure there are no cycles.
self.get(target_key) | def function[link, parameter[self, source_key, target_key]]:
constant[Creates a symbolic link key pointing from `target_key` to `source_key`]
variable[link_value] assign[=] call[name[self]._link_value_for_key, parameter[name[source_key]]]
call[name[self].child_datastore.put, parameter[name[target_key], name[link_value]]]
call[name[self].get, parameter[name[target_key]]] | keyword[def] identifier[link] ( identifier[self] , identifier[source_key] , identifier[target_key] ):
literal[string]
identifier[link_value] = identifier[self] . identifier[_link_value_for_key] ( identifier[source_key] )
identifier[self] . identifier[child_datastore] . identifier[put] ( identifier[target_key] , identifier[link_value] )
identifier[self] . identifier[get] ( identifier[target_key] ) | def link(self, source_key, target_key):
"""Creates a symbolic link key pointing from `target_key` to `source_key`"""
link_value = self._link_value_for_key(source_key)
# put straight into the child, to avoid following previous links.
self.child_datastore.put(target_key, link_value)
# exercise the link. ensure there are no cycles.
self.get(target_key) |
def proc_line_coordinate(self, line):
"""Extracts data from columns in ATOM/HETATM record."""
pdb_atom_col_dict = global_settings['ampal']['pdb_atom_col_dict']
at_type = line[0:6].strip() # 0
at_ser = int(line[6:11].strip()) # 1
at_name = line[12:16].strip() # 2
alt_loc = line[16].strip() # 3
res_name = line[17:20].strip() # 4
chain_id = line[21].strip() # 5
res_seq = int(line[22:26].strip()) # 6
i_code = line[26].strip() # 7
x = float(line[30:38].strip()) # 8
y = float(line[38:46].strip()) # 9
z = float(line[46:54].strip()) # 10
occupancy = float(line[54:60].strip()) # 11
temp_factor = float(line[60:66].strip()) # 12
element = line[76:78].strip() # 13
charge = line[78:80].strip() # 14
if at_name not in pdb_atom_col_dict:
pdb_atom_col_dict[at_name] = line[12:16]
pdb_col_e = PDBColFormat(atom_name=at_name, atom_col=line[12:16])
ampal_data_session.add(pdb_col_e)
self.new_labels = True
return (at_type, at_ser, at_name, alt_loc, res_name, chain_id, res_seq,
i_code, x, y, z, occupancy, temp_factor, element, charge) | def function[proc_line_coordinate, parameter[self, line]]:
constant[Extracts data from columns in ATOM/HETATM record.]
variable[pdb_atom_col_dict] assign[=] call[call[name[global_settings]][constant[ampal]]][constant[pdb_atom_col_dict]]
variable[at_type] assign[=] call[call[name[line]][<ast.Slice object at 0x7da1b26091e0>].strip, parameter[]]
variable[at_ser] assign[=] call[name[int], parameter[call[call[name[line]][<ast.Slice object at 0x7da1b2609090>].strip, parameter[]]]]
variable[at_name] assign[=] call[call[name[line]][<ast.Slice object at 0x7da1b26087c0>].strip, parameter[]]
variable[alt_loc] assign[=] call[call[name[line]][constant[16]].strip, parameter[]]
variable[res_name] assign[=] call[call[name[line]][<ast.Slice object at 0x7da1b2608880>].strip, parameter[]]
variable[chain_id] assign[=] call[call[name[line]][constant[21]].strip, parameter[]]
variable[res_seq] assign[=] call[name[int], parameter[call[call[name[line]][<ast.Slice object at 0x7da1b2609180>].strip, parameter[]]]]
variable[i_code] assign[=] call[call[name[line]][constant[26]].strip, parameter[]]
variable[x] assign[=] call[name[float], parameter[call[call[name[line]][<ast.Slice object at 0x7da2041da5f0>].strip, parameter[]]]]
variable[y] assign[=] call[name[float], parameter[call[call[name[line]][<ast.Slice object at 0x7da2041dbbe0>].strip, parameter[]]]]
variable[z] assign[=] call[name[float], parameter[call[call[name[line]][<ast.Slice object at 0x7da2041d83a0>].strip, parameter[]]]]
variable[occupancy] assign[=] call[name[float], parameter[call[call[name[line]][<ast.Slice object at 0x7da1b28dde40>].strip, parameter[]]]]
variable[temp_factor] assign[=] call[name[float], parameter[call[call[name[line]][<ast.Slice object at 0x7da1b28dc2b0>].strip, parameter[]]]]
variable[element] assign[=] call[call[name[line]][<ast.Slice object at 0x7da1b2852140>].strip, parameter[]]
variable[charge] assign[=] call[call[name[line]][<ast.Slice object at 0x7da1b2850100>].strip, parameter[]]
if compare[name[at_name] <ast.NotIn object at 0x7da2590d7190> name[pdb_atom_col_dict]] begin[:]
call[name[pdb_atom_col_dict]][name[at_name]] assign[=] call[name[line]][<ast.Slice object at 0x7da1b2851300>]
variable[pdb_col_e] assign[=] call[name[PDBColFormat], parameter[]]
call[name[ampal_data_session].add, parameter[name[pdb_col_e]]]
name[self].new_labels assign[=] constant[True]
return[tuple[[<ast.Name object at 0x7da1b28521d0>, <ast.Name object at 0x7da1b2853730>, <ast.Name object at 0x7da1b28530d0>, <ast.Name object at 0x7da1b28506a0>, <ast.Name object at 0x7da1b2852590>, <ast.Name object at 0x7da1b2851f00>, <ast.Name object at 0x7da1b2851060>, <ast.Name object at 0x7da1b2853c40>, <ast.Name object at 0x7da1b28508e0>, <ast.Name object at 0x7da1b2851660>, <ast.Name object at 0x7da1b28506d0>, <ast.Name object at 0x7da1b26249a0>, <ast.Name object at 0x7da1b26273a0>, <ast.Name object at 0x7da1b26254e0>, <ast.Name object at 0x7da1b2626ec0>]]] | keyword[def] identifier[proc_line_coordinate] ( identifier[self] , identifier[line] ):
literal[string]
identifier[pdb_atom_col_dict] = identifier[global_settings] [ literal[string] ][ literal[string] ]
identifier[at_type] = identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ()
identifier[at_ser] = identifier[int] ( identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ())
identifier[at_name] = identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ()
identifier[alt_loc] = identifier[line] [ literal[int] ]. identifier[strip] ()
identifier[res_name] = identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ()
identifier[chain_id] = identifier[line] [ literal[int] ]. identifier[strip] ()
identifier[res_seq] = identifier[int] ( identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ())
identifier[i_code] = identifier[line] [ literal[int] ]. identifier[strip] ()
identifier[x] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ())
identifier[y] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ())
identifier[z] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ())
identifier[occupancy] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ())
identifier[temp_factor] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ())
identifier[element] = identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ()
identifier[charge] = identifier[line] [ literal[int] : literal[int] ]. identifier[strip] ()
keyword[if] identifier[at_name] keyword[not] keyword[in] identifier[pdb_atom_col_dict] :
identifier[pdb_atom_col_dict] [ identifier[at_name] ]= identifier[line] [ literal[int] : literal[int] ]
identifier[pdb_col_e] = identifier[PDBColFormat] ( identifier[atom_name] = identifier[at_name] , identifier[atom_col] = identifier[line] [ literal[int] : literal[int] ])
identifier[ampal_data_session] . identifier[add] ( identifier[pdb_col_e] )
identifier[self] . identifier[new_labels] = keyword[True]
keyword[return] ( identifier[at_type] , identifier[at_ser] , identifier[at_name] , identifier[alt_loc] , identifier[res_name] , identifier[chain_id] , identifier[res_seq] ,
identifier[i_code] , identifier[x] , identifier[y] , identifier[z] , identifier[occupancy] , identifier[temp_factor] , identifier[element] , identifier[charge] ) | def proc_line_coordinate(self, line):
"""Extracts data from columns in ATOM/HETATM record."""
pdb_atom_col_dict = global_settings['ampal']['pdb_atom_col_dict']
at_type = line[0:6].strip() # 0
at_ser = int(line[6:11].strip()) # 1
at_name = line[12:16].strip() # 2
alt_loc = line[16].strip() # 3
res_name = line[17:20].strip() # 4
chain_id = line[21].strip() # 5
res_seq = int(line[22:26].strip()) # 6
i_code = line[26].strip() # 7
x = float(line[30:38].strip()) # 8
y = float(line[38:46].strip()) # 9
z = float(line[46:54].strip()) # 10
occupancy = float(line[54:60].strip()) # 11
temp_factor = float(line[60:66].strip()) # 12
element = line[76:78].strip() # 13
charge = line[78:80].strip() # 14
if at_name not in pdb_atom_col_dict:
pdb_atom_col_dict[at_name] = line[12:16]
pdb_col_e = PDBColFormat(atom_name=at_name, atom_col=line[12:16])
ampal_data_session.add(pdb_col_e)
self.new_labels = True # depends on [control=['if'], data=['at_name', 'pdb_atom_col_dict']]
return (at_type, at_ser, at_name, alt_loc, res_name, chain_id, res_seq, i_code, x, y, z, occupancy, temp_factor, element, charge) |
def analysis_of_prot_lig_interactions(self):
"""
The classes and function that deal with protein-ligand interaction analysis.
"""
self.hbonds = HBonds(self.topol_data,self.trajectory,self.start,self.end,self.skip,self.analysis_cutoff,distance=3)
self.pistacking = PiStacking(self.topol_data,self.trajectory,self.start,self.end,self.skip, self.analysis_cutoff)
self.sasa = SASA(self.topol_data,self.trajectory)
self.lig_descr = LigDescr(self.topol_data)
if self.trajectory!=[]:
self.rmsf = RMSF_measurements(self.topol_data,self.topology,self.trajectory,self.ligand,self.start,self.end,self.skip)
self.salt_bridges = SaltBridges(self.topol_data,self.trajectory,self.lig_descr,self.start,self.end,self.skip,self.analysis_cutoff) | def function[analysis_of_prot_lig_interactions, parameter[self]]:
constant[
The classes and function that deal with protein-ligand interaction analysis.
]
name[self].hbonds assign[=] call[name[HBonds], parameter[name[self].topol_data, name[self].trajectory, name[self].start, name[self].end, name[self].skip, name[self].analysis_cutoff]]
name[self].pistacking assign[=] call[name[PiStacking], parameter[name[self].topol_data, name[self].trajectory, name[self].start, name[self].end, name[self].skip, name[self].analysis_cutoff]]
name[self].sasa assign[=] call[name[SASA], parameter[name[self].topol_data, name[self].trajectory]]
name[self].lig_descr assign[=] call[name[LigDescr], parameter[name[self].topol_data]]
if compare[name[self].trajectory not_equal[!=] list[[]]] begin[:]
name[self].rmsf assign[=] call[name[RMSF_measurements], parameter[name[self].topol_data, name[self].topology, name[self].trajectory, name[self].ligand, name[self].start, name[self].end, name[self].skip]]
name[self].salt_bridges assign[=] call[name[SaltBridges], parameter[name[self].topol_data, name[self].trajectory, name[self].lig_descr, name[self].start, name[self].end, name[self].skip, name[self].analysis_cutoff]] | keyword[def] identifier[analysis_of_prot_lig_interactions] ( identifier[self] ):
literal[string]
identifier[self] . identifier[hbonds] = identifier[HBonds] ( identifier[self] . identifier[topol_data] , identifier[self] . identifier[trajectory] , identifier[self] . identifier[start] , identifier[self] . identifier[end] , identifier[self] . identifier[skip] , identifier[self] . identifier[analysis_cutoff] , identifier[distance] = literal[int] )
identifier[self] . identifier[pistacking] = identifier[PiStacking] ( identifier[self] . identifier[topol_data] , identifier[self] . identifier[trajectory] , identifier[self] . identifier[start] , identifier[self] . identifier[end] , identifier[self] . identifier[skip] , identifier[self] . identifier[analysis_cutoff] )
identifier[self] . identifier[sasa] = identifier[SASA] ( identifier[self] . identifier[topol_data] , identifier[self] . identifier[trajectory] )
identifier[self] . identifier[lig_descr] = identifier[LigDescr] ( identifier[self] . identifier[topol_data] )
keyword[if] identifier[self] . identifier[trajectory] !=[]:
identifier[self] . identifier[rmsf] = identifier[RMSF_measurements] ( identifier[self] . identifier[topol_data] , identifier[self] . identifier[topology] , identifier[self] . identifier[trajectory] , identifier[self] . identifier[ligand] , identifier[self] . identifier[start] , identifier[self] . identifier[end] , identifier[self] . identifier[skip] )
identifier[self] . identifier[salt_bridges] = identifier[SaltBridges] ( identifier[self] . identifier[topol_data] , identifier[self] . identifier[trajectory] , identifier[self] . identifier[lig_descr] , identifier[self] . identifier[start] , identifier[self] . identifier[end] , identifier[self] . identifier[skip] , identifier[self] . identifier[analysis_cutoff] ) | def analysis_of_prot_lig_interactions(self):
"""
The classes and function that deal with protein-ligand interaction analysis.
"""
self.hbonds = HBonds(self.topol_data, self.trajectory, self.start, self.end, self.skip, self.analysis_cutoff, distance=3)
self.pistacking = PiStacking(self.topol_data, self.trajectory, self.start, self.end, self.skip, self.analysis_cutoff)
self.sasa = SASA(self.topol_data, self.trajectory)
self.lig_descr = LigDescr(self.topol_data)
if self.trajectory != []:
self.rmsf = RMSF_measurements(self.topol_data, self.topology, self.trajectory, self.ligand, self.start, self.end, self.skip) # depends on [control=['if'], data=[]]
self.salt_bridges = SaltBridges(self.topol_data, self.trajectory, self.lig_descr, self.start, self.end, self.skip, self.analysis_cutoff) |
def run_simple(app, *, host="127.0.0.1", port=500,
debug=True, autoreload=True, **kwargs):
"""Start a WSGI application.
Optional features include a reloader, multithreading and fork support.
"""
kwargs.setdefault("use_evalex", debug)
return serving.run_simple(host, port, app,
use_debugger=debug,
use_reloader=autoreload,
**kwargs) | def function[run_simple, parameter[app]]:
constant[Start a WSGI application.
Optional features include a reloader, multithreading and fork support.
]
call[name[kwargs].setdefault, parameter[constant[use_evalex], name[debug]]]
return[call[name[serving].run_simple, parameter[name[host], name[port], name[app]]]] | keyword[def] identifier[run_simple] ( identifier[app] ,*, identifier[host] = literal[string] , identifier[port] = literal[int] ,
identifier[debug] = keyword[True] , identifier[autoreload] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[debug] )
keyword[return] identifier[serving] . identifier[run_simple] ( identifier[host] , identifier[port] , identifier[app] ,
identifier[use_debugger] = identifier[debug] ,
identifier[use_reloader] = identifier[autoreload] ,
** identifier[kwargs] ) | def run_simple(app, *, host='127.0.0.1', port=500, debug=True, autoreload=True, **kwargs):
"""Start a WSGI application.
Optional features include a reloader, multithreading and fork support.
"""
kwargs.setdefault('use_evalex', debug)
return serving.run_simple(host, port, app, use_debugger=debug, use_reloader=autoreload, **kwargs) |
def acquire(self):
'''
Get a new connection from the pool.
This will return an existing connection, if one is available in the
pool, or create a new connection.
.. warning:: If the pool was created with `maxsize` and `block=True`,
this method may block until a connection is available in the pool.
'''
self._condition.acquire()
try:
# Wait for a connection if there is an upper bound to the pool.
if self._maxsize is not None and self._block:
while not self._pool and self._nconnections == self._maxsize:
self._condition.wait(timeout=None) # block indefinitely
# Check the pool for a non-stale connection.
while self._pool:
pooledconn = self._pool.pop(0) # get least recently used connection
if self._idlettl is not None and (pooledconn.released + self._idlettl) < time.time():
pooledconn.connection.close()
self._nconnections -= 1
else:
return pooledconn.connection
connection = self._dbapi2.connect(*(), **self._connection_args.copy())
self._nconnections += 1
return connection
finally:
self._condition.release() | def function[acquire, parameter[self]]:
constant[
Get a new connection from the pool.
This will return an existing connection, if one is available in the
pool, or create a new connection.
.. warning:: If the pool was created with `maxsize` and `block=True`,
this method may block until a connection is available in the pool.
]
call[name[self]._condition.acquire, parameter[]]
<ast.Try object at 0x7da1b1baa3b0> | keyword[def] identifier[acquire] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_condition] . identifier[acquire] ()
keyword[try] :
keyword[if] identifier[self] . identifier[_maxsize] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[_block] :
keyword[while] keyword[not] identifier[self] . identifier[_pool] keyword[and] identifier[self] . identifier[_nconnections] == identifier[self] . identifier[_maxsize] :
identifier[self] . identifier[_condition] . identifier[wait] ( identifier[timeout] = keyword[None] )
keyword[while] identifier[self] . identifier[_pool] :
identifier[pooledconn] = identifier[self] . identifier[_pool] . identifier[pop] ( literal[int] )
keyword[if] identifier[self] . identifier[_idlettl] keyword[is] keyword[not] keyword[None] keyword[and] ( identifier[pooledconn] . identifier[released] + identifier[self] . identifier[_idlettl] )< identifier[time] . identifier[time] ():
identifier[pooledconn] . identifier[connection] . identifier[close] ()
identifier[self] . identifier[_nconnections] -= literal[int]
keyword[else] :
keyword[return] identifier[pooledconn] . identifier[connection]
identifier[connection] = identifier[self] . identifier[_dbapi2] . identifier[connect] (*(),** identifier[self] . identifier[_connection_args] . identifier[copy] ())
identifier[self] . identifier[_nconnections] += literal[int]
keyword[return] identifier[connection]
keyword[finally] :
identifier[self] . identifier[_condition] . identifier[release] () | def acquire(self):
"""
Get a new connection from the pool.
This will return an existing connection, if one is available in the
pool, or create a new connection.
.. warning:: If the pool was created with `maxsize` and `block=True`,
this method may block until a connection is available in the pool.
"""
self._condition.acquire()
try:
# Wait for a connection if there is an upper bound to the pool.
if self._maxsize is not None and self._block:
while not self._pool and self._nconnections == self._maxsize:
self._condition.wait(timeout=None) # block indefinitely # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
# Check the pool for a non-stale connection.
while self._pool:
pooledconn = self._pool.pop(0) # get least recently used connection
if self._idlettl is not None and pooledconn.released + self._idlettl < time.time():
pooledconn.connection.close()
self._nconnections -= 1 # depends on [control=['if'], data=[]]
else:
return pooledconn.connection # depends on [control=['while'], data=[]]
connection = self._dbapi2.connect(*(), **self._connection_args.copy())
self._nconnections += 1
return connection # depends on [control=['try'], data=[]]
finally:
self._condition.release() |
def aggregate(self, query):
"""
Issue an aggregation query
### Parameters
**query**: This can be either an `AggeregateRequest`, or a `Cursor`
An `AggregateResult` object is returned. You can access the rows from its
`rows` property, which will always yield the rows of the result
"""
if isinstance(query, AggregateRequest):
has_schema = query._with_schema
has_cursor = bool(query._cursor)
cmd = [self.AGGREGATE_CMD, self.index_name] + query.build_args()
elif isinstance(query, Cursor):
has_schema = False
has_cursor = True
cmd = [self.CURSOR_CMD, 'READ', self.index_name] + query.build_args()
else:
raise ValueError('Bad query', query)
raw = self.redis.execute_command(*cmd)
if has_cursor:
if isinstance(query, Cursor):
query.cid = raw[1]
cursor = query
else:
cursor = Cursor(raw[1])
raw = raw[0]
else:
cursor = None
if query._with_schema:
schema = raw[0]
rows = raw[2:]
else:
schema = None
rows = raw[1:]
res = AggregateResult(rows, cursor, schema)
return res | def function[aggregate, parameter[self, query]]:
constant[
Issue an aggregation query
### Parameters
**query**: This can be either an `AggeregateRequest`, or a `Cursor`
An `AggregateResult` object is returned. You can access the rows from its
`rows` property, which will always yield the rows of the result
]
if call[name[isinstance], parameter[name[query], name[AggregateRequest]]] begin[:]
variable[has_schema] assign[=] name[query]._with_schema
variable[has_cursor] assign[=] call[name[bool], parameter[name[query]._cursor]]
variable[cmd] assign[=] binary_operation[list[[<ast.Attribute object at 0x7da1b007f9a0>, <ast.Attribute object at 0x7da1b007f640>]] + call[name[query].build_args, parameter[]]]
variable[raw] assign[=] call[name[self].redis.execute_command, parameter[<ast.Starred object at 0x7da18f09c160>]]
if name[has_cursor] begin[:]
if call[name[isinstance], parameter[name[query], name[Cursor]]] begin[:]
name[query].cid assign[=] call[name[raw]][constant[1]]
variable[cursor] assign[=] name[query]
variable[raw] assign[=] call[name[raw]][constant[0]]
if name[query]._with_schema begin[:]
variable[schema] assign[=] call[name[raw]][constant[0]]
variable[rows] assign[=] call[name[raw]][<ast.Slice object at 0x7da20c990ca0>]
variable[res] assign[=] call[name[AggregateResult], parameter[name[rows], name[cursor], name[schema]]]
return[name[res]] | keyword[def] identifier[aggregate] ( identifier[self] , identifier[query] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[query] , identifier[AggregateRequest] ):
identifier[has_schema] = identifier[query] . identifier[_with_schema]
identifier[has_cursor] = identifier[bool] ( identifier[query] . identifier[_cursor] )
identifier[cmd] =[ identifier[self] . identifier[AGGREGATE_CMD] , identifier[self] . identifier[index_name] ]+ identifier[query] . identifier[build_args] ()
keyword[elif] identifier[isinstance] ( identifier[query] , identifier[Cursor] ):
identifier[has_schema] = keyword[False]
identifier[has_cursor] = keyword[True]
identifier[cmd] =[ identifier[self] . identifier[CURSOR_CMD] , literal[string] , identifier[self] . identifier[index_name] ]+ identifier[query] . identifier[build_args] ()
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] , identifier[query] )
identifier[raw] = identifier[self] . identifier[redis] . identifier[execute_command] (* identifier[cmd] )
keyword[if] identifier[has_cursor] :
keyword[if] identifier[isinstance] ( identifier[query] , identifier[Cursor] ):
identifier[query] . identifier[cid] = identifier[raw] [ literal[int] ]
identifier[cursor] = identifier[query]
keyword[else] :
identifier[cursor] = identifier[Cursor] ( identifier[raw] [ literal[int] ])
identifier[raw] = identifier[raw] [ literal[int] ]
keyword[else] :
identifier[cursor] = keyword[None]
keyword[if] identifier[query] . identifier[_with_schema] :
identifier[schema] = identifier[raw] [ literal[int] ]
identifier[rows] = identifier[raw] [ literal[int] :]
keyword[else] :
identifier[schema] = keyword[None]
identifier[rows] = identifier[raw] [ literal[int] :]
identifier[res] = identifier[AggregateResult] ( identifier[rows] , identifier[cursor] , identifier[schema] )
keyword[return] identifier[res] | def aggregate(self, query):
"""
Issue an aggregation query
### Parameters
**query**: This can be either an `AggeregateRequest`, or a `Cursor`
An `AggregateResult` object is returned. You can access the rows from its
`rows` property, which will always yield the rows of the result
"""
if isinstance(query, AggregateRequest):
has_schema = query._with_schema
has_cursor = bool(query._cursor)
cmd = [self.AGGREGATE_CMD, self.index_name] + query.build_args() # depends on [control=['if'], data=[]]
elif isinstance(query, Cursor):
has_schema = False
has_cursor = True
cmd = [self.CURSOR_CMD, 'READ', self.index_name] + query.build_args() # depends on [control=['if'], data=[]]
else:
raise ValueError('Bad query', query)
raw = self.redis.execute_command(*cmd)
if has_cursor:
if isinstance(query, Cursor):
query.cid = raw[1]
cursor = query # depends on [control=['if'], data=[]]
else:
cursor = Cursor(raw[1])
raw = raw[0] # depends on [control=['if'], data=[]]
else:
cursor = None
if query._with_schema:
schema = raw[0]
rows = raw[2:] # depends on [control=['if'], data=[]]
else:
schema = None
rows = raw[1:]
res = AggregateResult(rows, cursor, schema)
return res |
def main(_):
"""Convert a file to examples."""
if FLAGS.subword_text_encoder_filename:
encoder = text_encoder.SubwordTextEncoder(
FLAGS.subword_text_encoder_filename)
elif FLAGS.token_text_encoder_filename:
encoder = text_encoder.TokenTextEncoder(FLAGS.token_text_encoder_filename)
elif FLAGS.byte_text_encoder:
encoder = text_encoder.ByteTextEncoder()
else:
encoder = None
reader = tf.python_io.tf_record_iterator(FLAGS.input_filename)
total_sequences = 0
total_input_tokens = 0
total_target_tokens = 0
nonpadding_input_tokens = 0
nonpadding_target_tokens = 0
max_input_length = 0
max_target_length = 0
for record in reader:
x = tf.train.Example()
x.ParseFromString(record)
inputs = [int(i) for i in x.features.feature["inputs"].int64_list.value]
targets = [int(i) for i in x.features.feature["targets"].int64_list.value]
if FLAGS.print_inputs:
print("INPUTS:\n" + encoder.decode(inputs) if encoder else inputs)
if FLAGS.print_targets:
print("TARGETS:\n" + encoder.decode(targets) if encoder else targets)
nonpadding_input_tokens += len(inputs) - inputs.count(0)
nonpadding_target_tokens += len(targets) - targets.count(0)
total_input_tokens += len(inputs)
total_target_tokens += len(targets)
total_sequences += 1
max_input_length = max(max_input_length, len(inputs))
max_target_length = max(max_target_length, len(targets))
if FLAGS.print_all:
for k, v in six.iteritems(x.features.feature):
print("%s: %s" % (k, v.int64_list.value))
print("total_sequences: %d" % total_sequences)
print("total_input_tokens: %d" % total_input_tokens)
print("total_target_tokens: %d" % total_target_tokens)
print("nonpadding_input_tokens: %d" % nonpadding_input_tokens)
print("nonpadding_target_tokens: %d" % nonpadding_target_tokens)
print("max_input_length: %d" % max_input_length)
print("max_target_length: %d" % max_target_length) | def function[main, parameter[_]]:
constant[Convert a file to examples.]
if name[FLAGS].subword_text_encoder_filename begin[:]
variable[encoder] assign[=] call[name[text_encoder].SubwordTextEncoder, parameter[name[FLAGS].subword_text_encoder_filename]]
variable[reader] assign[=] call[name[tf].python_io.tf_record_iterator, parameter[name[FLAGS].input_filename]]
variable[total_sequences] assign[=] constant[0]
variable[total_input_tokens] assign[=] constant[0]
variable[total_target_tokens] assign[=] constant[0]
variable[nonpadding_input_tokens] assign[=] constant[0]
variable[nonpadding_target_tokens] assign[=] constant[0]
variable[max_input_length] assign[=] constant[0]
variable[max_target_length] assign[=] constant[0]
for taget[name[record]] in starred[name[reader]] begin[:]
variable[x] assign[=] call[name[tf].train.Example, parameter[]]
call[name[x].ParseFromString, parameter[name[record]]]
variable[inputs] assign[=] <ast.ListComp object at 0x7da20e956560>
variable[targets] assign[=] <ast.ListComp object at 0x7da20e9576d0>
if name[FLAGS].print_inputs begin[:]
call[name[print], parameter[<ast.IfExp object at 0x7da20e957df0>]]
if name[FLAGS].print_targets begin[:]
call[name[print], parameter[<ast.IfExp object at 0x7da20e9575e0>]]
<ast.AugAssign object at 0x7da20e957430>
<ast.AugAssign object at 0x7da20e957b80>
<ast.AugAssign object at 0x7da20e956f50>
<ast.AugAssign object at 0x7da20e957700>
<ast.AugAssign object at 0x7da20e957f70>
variable[max_input_length] assign[=] call[name[max], parameter[name[max_input_length], call[name[len], parameter[name[inputs]]]]]
variable[max_target_length] assign[=] call[name[max], parameter[name[max_target_length], call[name[len], parameter[name[targets]]]]]
if name[FLAGS].print_all begin[:]
for taget[tuple[[<ast.Name object at 0x7da20e955570>, <ast.Name object at 0x7da20e957160>]]] in starred[call[name[six].iteritems, parameter[name[x].features.feature]]] begin[:]
call[name[print], parameter[binary_operation[constant[%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e955330>, <ast.Attribute object at 0x7da20e956950>]]]]]
call[name[print], parameter[binary_operation[constant[total_sequences: %d] <ast.Mod object at 0x7da2590d6920> name[total_sequences]]]]
call[name[print], parameter[binary_operation[constant[total_input_tokens: %d] <ast.Mod object at 0x7da2590d6920> name[total_input_tokens]]]]
call[name[print], parameter[binary_operation[constant[total_target_tokens: %d] <ast.Mod object at 0x7da2590d6920> name[total_target_tokens]]]]
call[name[print], parameter[binary_operation[constant[nonpadding_input_tokens: %d] <ast.Mod object at 0x7da2590d6920> name[nonpadding_input_tokens]]]]
call[name[print], parameter[binary_operation[constant[nonpadding_target_tokens: %d] <ast.Mod object at 0x7da2590d6920> name[nonpadding_target_tokens]]]]
call[name[print], parameter[binary_operation[constant[max_input_length: %d] <ast.Mod object at 0x7da2590d6920> name[max_input_length]]]]
call[name[print], parameter[binary_operation[constant[max_target_length: %d] <ast.Mod object at 0x7da2590d6920> name[max_target_length]]]] | keyword[def] identifier[main] ( identifier[_] ):
literal[string]
keyword[if] identifier[FLAGS] . identifier[subword_text_encoder_filename] :
identifier[encoder] = identifier[text_encoder] . identifier[SubwordTextEncoder] (
identifier[FLAGS] . identifier[subword_text_encoder_filename] )
keyword[elif] identifier[FLAGS] . identifier[token_text_encoder_filename] :
identifier[encoder] = identifier[text_encoder] . identifier[TokenTextEncoder] ( identifier[FLAGS] . identifier[token_text_encoder_filename] )
keyword[elif] identifier[FLAGS] . identifier[byte_text_encoder] :
identifier[encoder] = identifier[text_encoder] . identifier[ByteTextEncoder] ()
keyword[else] :
identifier[encoder] = keyword[None]
identifier[reader] = identifier[tf] . identifier[python_io] . identifier[tf_record_iterator] ( identifier[FLAGS] . identifier[input_filename] )
identifier[total_sequences] = literal[int]
identifier[total_input_tokens] = literal[int]
identifier[total_target_tokens] = literal[int]
identifier[nonpadding_input_tokens] = literal[int]
identifier[nonpadding_target_tokens] = literal[int]
identifier[max_input_length] = literal[int]
identifier[max_target_length] = literal[int]
keyword[for] identifier[record] keyword[in] identifier[reader] :
identifier[x] = identifier[tf] . identifier[train] . identifier[Example] ()
identifier[x] . identifier[ParseFromString] ( identifier[record] )
identifier[inputs] =[ identifier[int] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[x] . identifier[features] . identifier[feature] [ literal[string] ]. identifier[int64_list] . identifier[value] ]
identifier[targets] =[ identifier[int] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[x] . identifier[features] . identifier[feature] [ literal[string] ]. identifier[int64_list] . identifier[value] ]
keyword[if] identifier[FLAGS] . identifier[print_inputs] :
identifier[print] ( literal[string] + identifier[encoder] . identifier[decode] ( identifier[inputs] ) keyword[if] identifier[encoder] keyword[else] identifier[inputs] )
keyword[if] identifier[FLAGS] . identifier[print_targets] :
identifier[print] ( literal[string] + identifier[encoder] . identifier[decode] ( identifier[targets] ) keyword[if] identifier[encoder] keyword[else] identifier[targets] )
identifier[nonpadding_input_tokens] += identifier[len] ( identifier[inputs] )- identifier[inputs] . identifier[count] ( literal[int] )
identifier[nonpadding_target_tokens] += identifier[len] ( identifier[targets] )- identifier[targets] . identifier[count] ( literal[int] )
identifier[total_input_tokens] += identifier[len] ( identifier[inputs] )
identifier[total_target_tokens] += identifier[len] ( identifier[targets] )
identifier[total_sequences] += literal[int]
identifier[max_input_length] = identifier[max] ( identifier[max_input_length] , identifier[len] ( identifier[inputs] ))
identifier[max_target_length] = identifier[max] ( identifier[max_target_length] , identifier[len] ( identifier[targets] ))
keyword[if] identifier[FLAGS] . identifier[print_all] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[x] . identifier[features] . identifier[feature] ):
identifier[print] ( literal[string] %( identifier[k] , identifier[v] . identifier[int64_list] . identifier[value] ))
identifier[print] ( literal[string] % identifier[total_sequences] )
identifier[print] ( literal[string] % identifier[total_input_tokens] )
identifier[print] ( literal[string] % identifier[total_target_tokens] )
identifier[print] ( literal[string] % identifier[nonpadding_input_tokens] )
identifier[print] ( literal[string] % identifier[nonpadding_target_tokens] )
identifier[print] ( literal[string] % identifier[max_input_length] )
identifier[print] ( literal[string] % identifier[max_target_length] ) | def main(_):
"""Convert a file to examples."""
if FLAGS.subword_text_encoder_filename:
encoder = text_encoder.SubwordTextEncoder(FLAGS.subword_text_encoder_filename) # depends on [control=['if'], data=[]]
elif FLAGS.token_text_encoder_filename:
encoder = text_encoder.TokenTextEncoder(FLAGS.token_text_encoder_filename) # depends on [control=['if'], data=[]]
elif FLAGS.byte_text_encoder:
encoder = text_encoder.ByteTextEncoder() # depends on [control=['if'], data=[]]
else:
encoder = None
reader = tf.python_io.tf_record_iterator(FLAGS.input_filename)
total_sequences = 0
total_input_tokens = 0
total_target_tokens = 0
nonpadding_input_tokens = 0
nonpadding_target_tokens = 0
max_input_length = 0
max_target_length = 0
for record in reader:
x = tf.train.Example()
x.ParseFromString(record)
inputs = [int(i) for i in x.features.feature['inputs'].int64_list.value]
targets = [int(i) for i in x.features.feature['targets'].int64_list.value]
if FLAGS.print_inputs:
print('INPUTS:\n' + encoder.decode(inputs) if encoder else inputs) # depends on [control=['if'], data=[]]
if FLAGS.print_targets:
print('TARGETS:\n' + encoder.decode(targets) if encoder else targets) # depends on [control=['if'], data=[]]
nonpadding_input_tokens += len(inputs) - inputs.count(0)
nonpadding_target_tokens += len(targets) - targets.count(0)
total_input_tokens += len(inputs)
total_target_tokens += len(targets)
total_sequences += 1
max_input_length = max(max_input_length, len(inputs))
max_target_length = max(max_target_length, len(targets))
if FLAGS.print_all:
for (k, v) in six.iteritems(x.features.feature):
print('%s: %s' % (k, v.int64_list.value)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['record']]
print('total_sequences: %d' % total_sequences)
print('total_input_tokens: %d' % total_input_tokens)
print('total_target_tokens: %d' % total_target_tokens)
print('nonpadding_input_tokens: %d' % nonpadding_input_tokens)
print('nonpadding_target_tokens: %d' % nonpadding_target_tokens)
print('max_input_length: %d' % max_input_length)
print('max_target_length: %d' % max_target_length) |
def validate_fields(func):
"""A decorator to automatically detect missing required fields from
json data."""
@functools.wraps(func)
def decorated(instance, *args, **kwargs):
"""The decorator function."""
data = request.get_json(force=True, silent=True)
if not data:
raise BadRequestException('No data received from request')
for key in data:
if key not in (
instance.__model__.required() +
instance.__model__.optional()):
raise BadRequestException('Unknown field [{}]'.format(key))
missing = set(instance.__model__.required()) - set(data)
if missing:
message = 'The following required fields are missing: ' + ', '.join(missing)
raise BadRequestException(message)
return func(instance, *args, **kwargs)
return decorated | def function[validate_fields, parameter[func]]:
constant[A decorator to automatically detect missing required fields from
json data.]
def function[decorated, parameter[instance]]:
constant[The decorator function.]
variable[data] assign[=] call[name[request].get_json, parameter[]]
if <ast.UnaryOp object at 0x7da20c6e5840> begin[:]
<ast.Raise object at 0x7da20c6e7be0>
for taget[name[key]] in starred[name[data]] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> binary_operation[call[name[instance].__model__.required, parameter[]] + call[name[instance].__model__.optional, parameter[]]]] begin[:]
<ast.Raise object at 0x7da20c6e56f0>
variable[missing] assign[=] binary_operation[call[name[set], parameter[call[name[instance].__model__.required, parameter[]]]] - call[name[set], parameter[name[data]]]]
if name[missing] begin[:]
variable[message] assign[=] binary_operation[constant[The following required fields are missing: ] + call[constant[, ].join, parameter[name[missing]]]]
<ast.Raise object at 0x7da20c76dea0>
return[call[name[func], parameter[name[instance], <ast.Starred object at 0x7da20c76d540>]]]
return[name[decorated]] | keyword[def] identifier[validate_fields] ( identifier[func] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[decorated] ( identifier[instance] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[data] = identifier[request] . identifier[get_json] ( identifier[force] = keyword[True] , identifier[silent] = keyword[True] )
keyword[if] keyword[not] identifier[data] :
keyword[raise] identifier[BadRequestException] ( literal[string] )
keyword[for] identifier[key] keyword[in] identifier[data] :
keyword[if] identifier[key] keyword[not] keyword[in] (
identifier[instance] . identifier[__model__] . identifier[required] ()+
identifier[instance] . identifier[__model__] . identifier[optional] ()):
keyword[raise] identifier[BadRequestException] ( literal[string] . identifier[format] ( identifier[key] ))
identifier[missing] = identifier[set] ( identifier[instance] . identifier[__model__] . identifier[required] ())- identifier[set] ( identifier[data] )
keyword[if] identifier[missing] :
identifier[message] = literal[string] + literal[string] . identifier[join] ( identifier[missing] )
keyword[raise] identifier[BadRequestException] ( identifier[message] )
keyword[return] identifier[func] ( identifier[instance] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[decorated] | def validate_fields(func):
"""A decorator to automatically detect missing required fields from
json data."""
@functools.wraps(func)
def decorated(instance, *args, **kwargs):
"""The decorator function."""
data = request.get_json(force=True, silent=True)
if not data:
raise BadRequestException('No data received from request') # depends on [control=['if'], data=[]]
for key in data:
if key not in instance.__model__.required() + instance.__model__.optional():
raise BadRequestException('Unknown field [{}]'.format(key)) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']]
missing = set(instance.__model__.required()) - set(data)
if missing:
message = 'The following required fields are missing: ' + ', '.join(missing)
raise BadRequestException(message) # depends on [control=['if'], data=[]]
return func(instance, *args, **kwargs)
return decorated |
def data(self, column, role):
"""Return the data for the specified column and role
Column 0: The name of the action
Column 1: The description of the action
Column 2: The status value
Column 3: The status message
Column 4: The traceback
:param column: the data column
:type column: int
:param role: the data role
:type role: QtCore.Qt.ItemDataRole
:returns: data depending on the role, or None if the column is out of range
:rtype: depending on the role or None
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
if column == 0:
return self._au.name
if column == 1:
return self._au.description
if column == 2:
return self._au.status.value
if column == 3:
return self._au.status.message
if column == 4:
return self._au.status.traceback
if role == QtCore.Qt.ForegroundRole:
if column == 2:
return self.colormapping.get(self._au.status.value) | def function[data, parameter[self, column, role]]:
constant[Return the data for the specified column and role
Column 0: The name of the action
Column 1: The description of the action
Column 2: The status value
Column 3: The status message
Column 4: The traceback
:param column: the data column
:type column: int
:param role: the data role
:type role: QtCore.Qt.ItemDataRole
:returns: data depending on the role, or None if the column is out of range
:rtype: depending on the role or None
:raises: None
]
if compare[name[role] equal[==] name[QtCore].Qt.DisplayRole] begin[:]
if compare[name[column] equal[==] constant[0]] begin[:]
return[name[self]._au.name]
if compare[name[column] equal[==] constant[1]] begin[:]
return[name[self]._au.description]
if compare[name[column] equal[==] constant[2]] begin[:]
return[name[self]._au.status.value]
if compare[name[column] equal[==] constant[3]] begin[:]
return[name[self]._au.status.message]
if compare[name[column] equal[==] constant[4]] begin[:]
return[name[self]._au.status.traceback]
if compare[name[role] equal[==] name[QtCore].Qt.ForegroundRole] begin[:]
if compare[name[column] equal[==] constant[2]] begin[:]
return[call[name[self].colormapping.get, parameter[name[self]._au.status.value]]] | keyword[def] identifier[data] ( identifier[self] , identifier[column] , identifier[role] ):
literal[string]
keyword[if] identifier[role] == identifier[QtCore] . identifier[Qt] . identifier[DisplayRole] :
keyword[if] identifier[column] == literal[int] :
keyword[return] identifier[self] . identifier[_au] . identifier[name]
keyword[if] identifier[column] == literal[int] :
keyword[return] identifier[self] . identifier[_au] . identifier[description]
keyword[if] identifier[column] == literal[int] :
keyword[return] identifier[self] . identifier[_au] . identifier[status] . identifier[value]
keyword[if] identifier[column] == literal[int] :
keyword[return] identifier[self] . identifier[_au] . identifier[status] . identifier[message]
keyword[if] identifier[column] == literal[int] :
keyword[return] identifier[self] . identifier[_au] . identifier[status] . identifier[traceback]
keyword[if] identifier[role] == identifier[QtCore] . identifier[Qt] . identifier[ForegroundRole] :
keyword[if] identifier[column] == literal[int] :
keyword[return] identifier[self] . identifier[colormapping] . identifier[get] ( identifier[self] . identifier[_au] . identifier[status] . identifier[value] ) | def data(self, column, role):
"""Return the data for the specified column and role
Column 0: The name of the action
Column 1: The description of the action
Column 2: The status value
Column 3: The status message
Column 4: The traceback
:param column: the data column
:type column: int
:param role: the data role
:type role: QtCore.Qt.ItemDataRole
:returns: data depending on the role, or None if the column is out of range
:rtype: depending on the role or None
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
if column == 0:
return self._au.name # depends on [control=['if'], data=[]]
if column == 1:
return self._au.description # depends on [control=['if'], data=[]]
if column == 2:
return self._au.status.value # depends on [control=['if'], data=[]]
if column == 3:
return self._au.status.message # depends on [control=['if'], data=[]]
if column == 4:
return self._au.status.traceback # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if role == QtCore.Qt.ForegroundRole:
if column == 2:
return self.colormapping.get(self._au.status.value) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def save_sequence_rule_enabler(self, sequence_rule_enabler_form, *args, **kwargs):
"""Pass through to provider SequenceRuleEnablerAdminSession.update_sequence_rule_enabler"""
# Implemented from kitosid template for -
# osid.resource.ResourceAdminSession.update_resource
if sequence_rule_enabler_form.is_for_update():
return self.update_sequence_rule_enabler(sequence_rule_enabler_form, *args, **kwargs)
else:
return self.create_sequence_rule_enabler(sequence_rule_enabler_form, *args, **kwargs) | def function[save_sequence_rule_enabler, parameter[self, sequence_rule_enabler_form]]:
constant[Pass through to provider SequenceRuleEnablerAdminSession.update_sequence_rule_enabler]
if call[name[sequence_rule_enabler_form].is_for_update, parameter[]] begin[:]
return[call[name[self].update_sequence_rule_enabler, parameter[name[sequence_rule_enabler_form], <ast.Starred object at 0x7da204960e50>]]] | keyword[def] identifier[save_sequence_rule_enabler] ( identifier[self] , identifier[sequence_rule_enabler_form] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[sequence_rule_enabler_form] . identifier[is_for_update] ():
keyword[return] identifier[self] . identifier[update_sequence_rule_enabler] ( identifier[sequence_rule_enabler_form] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
keyword[return] identifier[self] . identifier[create_sequence_rule_enabler] ( identifier[sequence_rule_enabler_form] ,* identifier[args] ,** identifier[kwargs] ) | def save_sequence_rule_enabler(self, sequence_rule_enabler_form, *args, **kwargs):
"""Pass through to provider SequenceRuleEnablerAdminSession.update_sequence_rule_enabler"""
# Implemented from kitosid template for -
# osid.resource.ResourceAdminSession.update_resource
if sequence_rule_enabler_form.is_for_update():
return self.update_sequence_rule_enabler(sequence_rule_enabler_form, *args, **kwargs) # depends on [control=['if'], data=[]]
else:
return self.create_sequence_rule_enabler(sequence_rule_enabler_form, *args, **kwargs) |
def _program_files_from_executable(self, executable, required_paths, parent_dir=False):
"""
Get a list of program files by expanding a list of path patterns
and interpreting it as relative to the executable.
This method can be used as helper for implementing the method program_files().
Contrary to the default implementation of program_files(), this method does not explicitly
add the executable to the list of returned files, it assumes that required_paths
contains a path that covers the executable.
@param executable: the path to the executable of the tool (typically the result of executable())
@param required_paths: a list of required path patterns
@param parent_dir: whether required_paths are relative to the directory of executable or the parent directory
@return a list of paths as strings, suitable for result of program_files()
"""
base_dir = os.path.dirname(executable)
if parent_dir:
base_dir = os.path.join(base_dir, os.path.pardir)
return util.flatten(
util.expand_filename_pattern(path, base_dir) for path in required_paths) | def function[_program_files_from_executable, parameter[self, executable, required_paths, parent_dir]]:
constant[
Get a list of program files by expanding a list of path patterns
and interpreting it as relative to the executable.
This method can be used as helper for implementing the method program_files().
Contrary to the default implementation of program_files(), this method does not explicitly
add the executable to the list of returned files, it assumes that required_paths
contains a path that covers the executable.
@param executable: the path to the executable of the tool (typically the result of executable())
@param required_paths: a list of required path patterns
@param parent_dir: whether required_paths are relative to the directory of executable or the parent directory
@return a list of paths as strings, suitable for result of program_files()
]
variable[base_dir] assign[=] call[name[os].path.dirname, parameter[name[executable]]]
if name[parent_dir] begin[:]
variable[base_dir] assign[=] call[name[os].path.join, parameter[name[base_dir], name[os].path.pardir]]
return[call[name[util].flatten, parameter[<ast.GeneratorExp object at 0x7da20c6c6140>]]] | keyword[def] identifier[_program_files_from_executable] ( identifier[self] , identifier[executable] , identifier[required_paths] , identifier[parent_dir] = keyword[False] ):
literal[string]
identifier[base_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[executable] )
keyword[if] identifier[parent_dir] :
identifier[base_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_dir] , identifier[os] . identifier[path] . identifier[pardir] )
keyword[return] identifier[util] . identifier[flatten] (
identifier[util] . identifier[expand_filename_pattern] ( identifier[path] , identifier[base_dir] ) keyword[for] identifier[path] keyword[in] identifier[required_paths] ) | def _program_files_from_executable(self, executable, required_paths, parent_dir=False):
"""
Get a list of program files by expanding a list of path patterns
and interpreting it as relative to the executable.
This method can be used as helper for implementing the method program_files().
Contrary to the default implementation of program_files(), this method does not explicitly
add the executable to the list of returned files, it assumes that required_paths
contains a path that covers the executable.
@param executable: the path to the executable of the tool (typically the result of executable())
@param required_paths: a list of required path patterns
@param parent_dir: whether required_paths are relative to the directory of executable or the parent directory
@return a list of paths as strings, suitable for result of program_files()
"""
base_dir = os.path.dirname(executable)
if parent_dir:
base_dir = os.path.join(base_dir, os.path.pardir) # depends on [control=['if'], data=[]]
return util.flatten((util.expand_filename_pattern(path, base_dir) for path in required_paths)) |
def find_locales(self) -> Dict[str, gettext.GNUTranslations]:
"""
Load all compiled locales from path
:return: dict with locales
"""
translations = {}
for name in os.listdir(self.path):
if not os.path.isdir(os.path.join(self.path, name)):
continue
mo_path = os.path.join(self.path, name, 'LC_MESSAGES', self.domain + '.mo')
if os.path.exists(mo_path):
with open(mo_path, 'rb') as fp:
translations[name] = gettext.GNUTranslations(fp)
elif os.path.exists(mo_path[:-2] + 'po'):
raise RuntimeError(f"Found locale '{name} but this language is not compiled!")
return translations | def function[find_locales, parameter[self]]:
constant[
Load all compiled locales from path
:return: dict with locales
]
variable[translations] assign[=] dictionary[[], []]
for taget[name[name]] in starred[call[name[os].listdir, parameter[name[self].path]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1845bd0> begin[:]
continue
variable[mo_path] assign[=] call[name[os].path.join, parameter[name[self].path, name[name], constant[LC_MESSAGES], binary_operation[name[self].domain + constant[.mo]]]]
if call[name[os].path.exists, parameter[name[mo_path]]] begin[:]
with call[name[open], parameter[name[mo_path], constant[rb]]] begin[:]
call[name[translations]][name[name]] assign[=] call[name[gettext].GNUTranslations, parameter[name[fp]]]
return[name[translations]] | keyword[def] identifier[find_locales] ( identifier[self] )-> identifier[Dict] [ identifier[str] , identifier[gettext] . identifier[GNUTranslations] ]:
literal[string]
identifier[translations] ={}
keyword[for] identifier[name] keyword[in] identifier[os] . identifier[listdir] ( identifier[self] . identifier[path] ):
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , identifier[name] )):
keyword[continue]
identifier[mo_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , identifier[name] , literal[string] , identifier[self] . identifier[domain] + literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[mo_path] ):
keyword[with] identifier[open] ( identifier[mo_path] , literal[string] ) keyword[as] identifier[fp] :
identifier[translations] [ identifier[name] ]= identifier[gettext] . identifier[GNUTranslations] ( identifier[fp] )
keyword[elif] identifier[os] . identifier[path] . identifier[exists] ( identifier[mo_path] [:- literal[int] ]+ literal[string] ):
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[return] identifier[translations] | def find_locales(self) -> Dict[str, gettext.GNUTranslations]:
"""
Load all compiled locales from path
:return: dict with locales
"""
translations = {}
for name in os.listdir(self.path):
if not os.path.isdir(os.path.join(self.path, name)):
continue # depends on [control=['if'], data=[]]
mo_path = os.path.join(self.path, name, 'LC_MESSAGES', self.domain + '.mo')
if os.path.exists(mo_path):
with open(mo_path, 'rb') as fp:
translations[name] = gettext.GNUTranslations(fp) # depends on [control=['with'], data=['fp']] # depends on [control=['if'], data=[]]
elif os.path.exists(mo_path[:-2] + 'po'):
raise RuntimeError(f"Found locale '{name} but this language is not compiled!") # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
return translations |
def run_sync(self, func, timeout=None):
"""Starts the `IOLoop`, runs the given function, and stops the loop.
The function must return either a yieldable object or
``None``. If the function returns a yieldable object, the
`IOLoop` will run until the yieldable is resolved (and
`run_sync()` will return the yieldable's result). If it raises
an exception, the `IOLoop` will stop and the exception will be
re-raised to the caller.
The keyword-only argument ``timeout`` may be used to set
a maximum duration for the function. If the timeout expires,
a `tornado.util.TimeoutError` is raised.
This method is useful in conjunction with `tornado.gen.coroutine`
to allow asynchronous calls in a ``main()`` function::
@gen.coroutine
def main():
# do stuff...
if __name__ == '__main__':
IOLoop.current().run_sync(main)
.. versionchanged:: 4.3
Returning a non-``None``, non-yieldable value is now an error.
"""
future_cell = [None]
def run():
try:
result = func()
if result is not None:
from .gen import convert_yielded
result = convert_yielded(result)
except Exception:
future_cell[0] = TracebackFuture()
future_cell[0].set_exc_info(sys.exc_info())
else:
if is_future(result):
future_cell[0] = result
else:
future_cell[0] = TracebackFuture()
future_cell[0].set_result(result)
self.add_future(future_cell[0], lambda future: self.stop())
self.add_callback(run)
if timeout is not None:
timeout_handle = self.add_timeout(self.time() + timeout, self.stop)
self.start()
if timeout is not None:
self.remove_timeout(timeout_handle)
if not future_cell[0].done():
raise TimeoutError('Operation timed out after %s seconds' % timeout)
return future_cell[0].result() | def function[run_sync, parameter[self, func, timeout]]:
constant[Starts the `IOLoop`, runs the given function, and stops the loop.
The function must return either a yieldable object or
``None``. If the function returns a yieldable object, the
`IOLoop` will run until the yieldable is resolved (and
`run_sync()` will return the yieldable's result). If it raises
an exception, the `IOLoop` will stop and the exception will be
re-raised to the caller.
The keyword-only argument ``timeout`` may be used to set
a maximum duration for the function. If the timeout expires,
a `tornado.util.TimeoutError` is raised.
This method is useful in conjunction with `tornado.gen.coroutine`
to allow asynchronous calls in a ``main()`` function::
@gen.coroutine
def main():
# do stuff...
if __name__ == '__main__':
IOLoop.current().run_sync(main)
.. versionchanged:: 4.3
Returning a non-``None``, non-yieldable value is now an error.
]
variable[future_cell] assign[=] list[[<ast.Constant object at 0x7da1b1c601c0>]]
def function[run, parameter[]]:
<ast.Try object at 0x7da1b1c61990>
call[name[self].add_future, parameter[call[name[future_cell]][constant[0]], <ast.Lambda object at 0x7da1b1c61750>]]
call[name[self].add_callback, parameter[name[run]]]
if compare[name[timeout] is_not constant[None]] begin[:]
variable[timeout_handle] assign[=] call[name[self].add_timeout, parameter[binary_operation[call[name[self].time, parameter[]] + name[timeout]], name[self].stop]]
call[name[self].start, parameter[]]
if compare[name[timeout] is_not constant[None]] begin[:]
call[name[self].remove_timeout, parameter[name[timeout_handle]]]
if <ast.UnaryOp object at 0x7da1b1b7e7a0> begin[:]
<ast.Raise object at 0x7da1b1b7d2d0>
return[call[call[name[future_cell]][constant[0]].result, parameter[]]] | keyword[def] identifier[run_sync] ( identifier[self] , identifier[func] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[future_cell] =[ keyword[None] ]
keyword[def] identifier[run] ():
keyword[try] :
identifier[result] = identifier[func] ()
keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] :
keyword[from] . identifier[gen] keyword[import] identifier[convert_yielded]
identifier[result] = identifier[convert_yielded] ( identifier[result] )
keyword[except] identifier[Exception] :
identifier[future_cell] [ literal[int] ]= identifier[TracebackFuture] ()
identifier[future_cell] [ literal[int] ]. identifier[set_exc_info] ( identifier[sys] . identifier[exc_info] ())
keyword[else] :
keyword[if] identifier[is_future] ( identifier[result] ):
identifier[future_cell] [ literal[int] ]= identifier[result]
keyword[else] :
identifier[future_cell] [ literal[int] ]= identifier[TracebackFuture] ()
identifier[future_cell] [ literal[int] ]. identifier[set_result] ( identifier[result] )
identifier[self] . identifier[add_future] ( identifier[future_cell] [ literal[int] ], keyword[lambda] identifier[future] : identifier[self] . identifier[stop] ())
identifier[self] . identifier[add_callback] ( identifier[run] )
keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] :
identifier[timeout_handle] = identifier[self] . identifier[add_timeout] ( identifier[self] . identifier[time] ()+ identifier[timeout] , identifier[self] . identifier[stop] )
identifier[self] . identifier[start] ()
keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[remove_timeout] ( identifier[timeout_handle] )
keyword[if] keyword[not] identifier[future_cell] [ literal[int] ]. identifier[done] ():
keyword[raise] identifier[TimeoutError] ( literal[string] % identifier[timeout] )
keyword[return] identifier[future_cell] [ literal[int] ]. identifier[result] () | def run_sync(self, func, timeout=None):
"""Starts the `IOLoop`, runs the given function, and stops the loop.
The function must return either a yieldable object or
``None``. If the function returns a yieldable object, the
`IOLoop` will run until the yieldable is resolved (and
`run_sync()` will return the yieldable's result). If it raises
an exception, the `IOLoop` will stop and the exception will be
re-raised to the caller.
The keyword-only argument ``timeout`` may be used to set
a maximum duration for the function. If the timeout expires,
a `tornado.util.TimeoutError` is raised.
This method is useful in conjunction with `tornado.gen.coroutine`
to allow asynchronous calls in a ``main()`` function::
@gen.coroutine
def main():
# do stuff...
if __name__ == '__main__':
IOLoop.current().run_sync(main)
.. versionchanged:: 4.3
Returning a non-``None``, non-yieldable value is now an error.
"""
future_cell = [None]
def run():
try:
result = func()
if result is not None:
from .gen import convert_yielded
result = convert_yielded(result) # depends on [control=['if'], data=['result']] # depends on [control=['try'], data=[]]
except Exception:
future_cell[0] = TracebackFuture()
future_cell[0].set_exc_info(sys.exc_info()) # depends on [control=['except'], data=[]]
else:
if is_future(result):
future_cell[0] = result # depends on [control=['if'], data=[]]
else:
future_cell[0] = TracebackFuture()
future_cell[0].set_result(result)
self.add_future(future_cell[0], lambda future: self.stop())
self.add_callback(run)
if timeout is not None:
timeout_handle = self.add_timeout(self.time() + timeout, self.stop) # depends on [control=['if'], data=['timeout']]
self.start()
if timeout is not None:
self.remove_timeout(timeout_handle) # depends on [control=['if'], data=[]]
if not future_cell[0].done():
raise TimeoutError('Operation timed out after %s seconds' % timeout) # depends on [control=['if'], data=[]]
return future_cell[0].result() |
def add_maxjobs_category(self,categoryName,maxJobsNum):
"""
Add a category to this DAG called categoryName with a maxjobs of maxJobsNum.
@param node: Add (categoryName,maxJobsNum) tuple to CondorDAG.__maxjobs_categories.
"""
self.__maxjobs_categories.append((str(categoryName),str(maxJobsNum))) | def function[add_maxjobs_category, parameter[self, categoryName, maxJobsNum]]:
constant[
Add a category to this DAG called categoryName with a maxjobs of maxJobsNum.
@param node: Add (categoryName,maxJobsNum) tuple to CondorDAG.__maxjobs_categories.
]
call[name[self].__maxjobs_categories.append, parameter[tuple[[<ast.Call object at 0x7da1b0b10bb0>, <ast.Call object at 0x7da1b0b118a0>]]]] | keyword[def] identifier[add_maxjobs_category] ( identifier[self] , identifier[categoryName] , identifier[maxJobsNum] ):
literal[string]
identifier[self] . identifier[__maxjobs_categories] . identifier[append] (( identifier[str] ( identifier[categoryName] ), identifier[str] ( identifier[maxJobsNum] ))) | def add_maxjobs_category(self, categoryName, maxJobsNum):
"""
Add a category to this DAG called categoryName with a maxjobs of maxJobsNum.
@param node: Add (categoryName,maxJobsNum) tuple to CondorDAG.__maxjobs_categories.
"""
self.__maxjobs_categories.append((str(categoryName), str(maxJobsNum))) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.