code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _fuzzy_custom_query(issn, titles):
"""
Este metodo constroi a lista de filtros por título de periódico que
será aplicada na pesquisa boleana como match por similaridade "should".
A lista de filtros é coletada do template de pesquisa customizada
do periódico, quanto este template existir.
"""
custom_queries = journal_titles.load(issn).get('should', [])
titles = [{'title': i} for i in titles if i not in [x['title'] for x in custom_queries]]
titles.extend(custom_queries)
for item in titles:
if len(item['title'].strip()) == 0:
continue
query = {
"fuzzy": {
"reference_source_cleaned": {
"value": utils.cleanup_string(item['title']),
"fuzziness": item.get('fuzziness', 3),
"max_expansions": 50
}
}
}
yield query | def function[_fuzzy_custom_query, parameter[issn, titles]]:
constant[
Este metodo constroi a lista de filtros por título de periódico que
será aplicada na pesquisa boleana como match por similaridade "should".
A lista de filtros é coletada do template de pesquisa customizada
do periódico, quanto este template existir.
]
variable[custom_queries] assign[=] call[call[name[journal_titles].load, parameter[name[issn]]].get, parameter[constant[should], list[[]]]]
variable[titles] assign[=] <ast.ListComp object at 0x7da204566a40>
call[name[titles].extend, parameter[name[custom_queries]]]
for taget[name[item]] in starred[name[titles]] begin[:]
if compare[call[name[len], parameter[call[call[name[item]][constant[title]].strip, parameter[]]]] equal[==] constant[0]] begin[:]
continue
variable[query] assign[=] dictionary[[<ast.Constant object at 0x7da2045655a0>], [<ast.Dict object at 0x7da204565ba0>]]
<ast.Yield object at 0x7da204566470> | keyword[def] identifier[_fuzzy_custom_query] ( identifier[issn] , identifier[titles] ):
literal[string]
identifier[custom_queries] = identifier[journal_titles] . identifier[load] ( identifier[issn] ). identifier[get] ( literal[string] ,[])
identifier[titles] =[{ literal[string] : identifier[i] } keyword[for] identifier[i] keyword[in] identifier[titles] keyword[if] identifier[i] keyword[not] keyword[in] [ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[custom_queries] ]]
identifier[titles] . identifier[extend] ( identifier[custom_queries] )
keyword[for] identifier[item] keyword[in] identifier[titles] :
keyword[if] identifier[len] ( identifier[item] [ literal[string] ]. identifier[strip] ())== literal[int] :
keyword[continue]
identifier[query] ={
literal[string] :{
literal[string] :{
literal[string] : identifier[utils] . identifier[cleanup_string] ( identifier[item] [ literal[string] ]),
literal[string] : identifier[item] . identifier[get] ( literal[string] , literal[int] ),
literal[string] : literal[int]
}
}
}
keyword[yield] identifier[query] | def _fuzzy_custom_query(issn, titles):
"""
Este metodo constroi a lista de filtros por título de periódico que
será aplicada na pesquisa boleana como match por similaridade "should".
A lista de filtros é coletada do template de pesquisa customizada
do periódico, quanto este template existir.
"""
custom_queries = journal_titles.load(issn).get('should', [])
titles = [{'title': i} for i in titles if i not in [x['title'] for x in custom_queries]]
titles.extend(custom_queries)
for item in titles:
if len(item['title'].strip()) == 0:
continue # depends on [control=['if'], data=[]]
query = {'fuzzy': {'reference_source_cleaned': {'value': utils.cleanup_string(item['title']), 'fuzziness': item.get('fuzziness', 3), 'max_expansions': 50}}}
yield query # depends on [control=['for'], data=['item']] |
def _MakePackagePages(self, package, showprivate=False, nested=False, showinh=False):
"""An internal helper to generate all of the pages for a given package
Args:
package (module): The top-level package to document
showprivate (bool): A flag for whether or not to display private members
nested (bool): Foor internal use ONLY
Returns:
str: The file names ready to be appended to a top-level toctree
"""
def checkNoNested(mod):
try:
all = mod.__all__
except AttributeError:
return False
mems = inspect.getmembers(mod, inspect.ismodule)
mems = [m for m in mems if m[0] in mod.__all__]
if len(mems) > 0:
return False
return True
# Get package module members
mods = inspect.getmembers(package, inspect.ismodule)
# Split into modules and sub-packages
nmods, pvt, npkgs = [], [], []
for mod in mods:
# Deal with private modules
if checkNoNested(mod[1]):
if mod[0][0] == '_': pvt.append(mod)
else: nmods.append(mod)
else: npkgs.append(mod)
if showprivate: nmods += pvt
# for each member that has a nested module
# recurse and keep track of index files for that package
files = []
ignore = []
for pkg in npkgs:
pt = '%s/%s/%s' % (self.path, package.__name__.replace('.', '/'), pkg[1].__name__.split('.')[-1])
if os.path.exists(pt): shutil.rmtree(pt)
os.makedirs(pt)
ignore += inspect.getmembers(pkg[1])
f = self._MakePackagePages(pkg[1], showprivate=showprivate, nested=True, showinh=showinh)
files.append(f.split(package.__name__.replace('.', '/')+'/')[1])
if nested:
try:
name = package.__displayname__
except AttributeError:
name = package.__name__
# Create index file here
index = r'''
%s
%s
.. toctree::
:maxdepth: 5
''' % (name, '*' * len(name))
# include sub packages first
index += '\n '.join(files)
# then include modules
index += '\n ' + self._ProduceContent(nmods, showprivate=showprivate, showinh=showinh)
findex = 'content/%s/index.rst' % (package.__name__.replace('.', '/'))
# Write the file
with open(findex, 'w') as f:
if package.__doc__: f.write(package.__doc__)
f.write(index)
# return filename for index file at package level
return '\n ' + findex
# Not nested: return all files
names = '\n %s/%s/' % ( self.path, package.__name__.replace('.', '/'))
nmods = [m for m in nmods if m not in ignore]
return names.join(self._ProduceContent(nmods, showprivate=showprivate, showinh=showinh).split('\n ')+files) | def function[_MakePackagePages, parameter[self, package, showprivate, nested, showinh]]:
constant[An internal helper to generate all of the pages for a given package
Args:
package (module): The top-level package to document
showprivate (bool): A flag for whether or not to display private members
nested (bool): Foor internal use ONLY
Returns:
str: The file names ready to be appended to a top-level toctree
]
def function[checkNoNested, parameter[mod]]:
<ast.Try object at 0x7da2054a5720>
variable[mems] assign[=] call[name[inspect].getmembers, parameter[name[mod], name[inspect].ismodule]]
variable[mems] assign[=] <ast.ListComp object at 0x7da2054a7790>
if compare[call[name[len], parameter[name[mems]]] greater[>] constant[0]] begin[:]
return[constant[False]]
return[constant[True]]
variable[mods] assign[=] call[name[inspect].getmembers, parameter[name[package], name[inspect].ismodule]]
<ast.Tuple object at 0x7da2044c1660> assign[=] tuple[[<ast.List object at 0x7da2044c0760>, <ast.List object at 0x7da2044c2aa0>, <ast.List object at 0x7da2044c1ab0>]]
for taget[name[mod]] in starred[name[mods]] begin[:]
if call[name[checkNoNested], parameter[call[name[mod]][constant[1]]]] begin[:]
if compare[call[call[name[mod]][constant[0]]][constant[0]] equal[==] constant[_]] begin[:]
call[name[pvt].append, parameter[name[mod]]]
if name[showprivate] begin[:]
<ast.AugAssign object at 0x7da2044c1e10>
variable[files] assign[=] list[[]]
variable[ignore] assign[=] list[[]]
for taget[name[pkg]] in starred[name[npkgs]] begin[:]
variable[pt] assign[=] binary_operation[constant[%s/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da2044c1ae0>, <ast.Call object at 0x7da2044c1ed0>, <ast.Subscript object at 0x7da2044c1d80>]]]
if call[name[os].path.exists, parameter[name[pt]]] begin[:]
call[name[shutil].rmtree, parameter[name[pt]]]
call[name[os].makedirs, parameter[name[pt]]]
<ast.AugAssign object at 0x7da2044c2980>
variable[f] assign[=] call[name[self]._MakePackagePages, parameter[call[name[pkg]][constant[1]]]]
call[name[files].append, parameter[call[call[name[f].split, parameter[binary_operation[call[name[package].__name__.replace, parameter[constant[.], constant[/]]] + constant[/]]]]][constant[1]]]]
if name[nested] begin[:]
<ast.Try object at 0x7da18fe934c0>
variable[index] assign[=] binary_operation[constant[
%s
%s
.. toctree::
:maxdepth: 5
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18fe91570>, <ast.BinOp object at 0x7da18fe92e60>]]]
<ast.AugAssign object at 0x7da18fe93d90>
<ast.AugAssign object at 0x7da18fe91930>
variable[findex] assign[=] binary_operation[constant[content/%s/index.rst] <ast.Mod object at 0x7da2590d6920> call[name[package].__name__.replace, parameter[constant[.], constant[/]]]]
with call[name[open], parameter[name[findex], constant[w]]] begin[:]
if name[package].__doc__ begin[:]
call[name[f].write, parameter[name[package].__doc__]]
call[name[f].write, parameter[name[index]]]
return[binary_operation[constant[
] + name[findex]]]
variable[names] assign[=] binary_operation[constant[
%s/%s/] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b20f95a0>, <ast.Call object at 0x7da1b20f9c90>]]]
variable[nmods] assign[=] <ast.ListComp object at 0x7da1b20fbd90>
return[call[name[names].join, parameter[binary_operation[call[call[name[self]._ProduceContent, parameter[name[nmods]]].split, parameter[constant[
]]] + name[files]]]]] | keyword[def] identifier[_MakePackagePages] ( identifier[self] , identifier[package] , identifier[showprivate] = keyword[False] , identifier[nested] = keyword[False] , identifier[showinh] = keyword[False] ):
literal[string]
keyword[def] identifier[checkNoNested] ( identifier[mod] ):
keyword[try] :
identifier[all] = identifier[mod] . identifier[__all__]
keyword[except] identifier[AttributeError] :
keyword[return] keyword[False]
identifier[mems] = identifier[inspect] . identifier[getmembers] ( identifier[mod] , identifier[inspect] . identifier[ismodule] )
identifier[mems] =[ identifier[m] keyword[for] identifier[m] keyword[in] identifier[mems] keyword[if] identifier[m] [ literal[int] ] keyword[in] identifier[mod] . identifier[__all__] ]
keyword[if] identifier[len] ( identifier[mems] )> literal[int] :
keyword[return] keyword[False]
keyword[return] keyword[True]
identifier[mods] = identifier[inspect] . identifier[getmembers] ( identifier[package] , identifier[inspect] . identifier[ismodule] )
identifier[nmods] , identifier[pvt] , identifier[npkgs] =[],[],[]
keyword[for] identifier[mod] keyword[in] identifier[mods] :
keyword[if] identifier[checkNoNested] ( identifier[mod] [ literal[int] ]):
keyword[if] identifier[mod] [ literal[int] ][ literal[int] ]== literal[string] : identifier[pvt] . identifier[append] ( identifier[mod] )
keyword[else] : identifier[nmods] . identifier[append] ( identifier[mod] )
keyword[else] : identifier[npkgs] . identifier[append] ( identifier[mod] )
keyword[if] identifier[showprivate] : identifier[nmods] += identifier[pvt]
identifier[files] =[]
identifier[ignore] =[]
keyword[for] identifier[pkg] keyword[in] identifier[npkgs] :
identifier[pt] = literal[string] %( identifier[self] . identifier[path] , identifier[package] . identifier[__name__] . identifier[replace] ( literal[string] , literal[string] ), identifier[pkg] [ literal[int] ]. identifier[__name__] . identifier[split] ( literal[string] )[- literal[int] ])
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[pt] ): identifier[shutil] . identifier[rmtree] ( identifier[pt] )
identifier[os] . identifier[makedirs] ( identifier[pt] )
identifier[ignore] += identifier[inspect] . identifier[getmembers] ( identifier[pkg] [ literal[int] ])
identifier[f] = identifier[self] . identifier[_MakePackagePages] ( identifier[pkg] [ literal[int] ], identifier[showprivate] = identifier[showprivate] , identifier[nested] = keyword[True] , identifier[showinh] = identifier[showinh] )
identifier[files] . identifier[append] ( identifier[f] . identifier[split] ( identifier[package] . identifier[__name__] . identifier[replace] ( literal[string] , literal[string] )+ literal[string] )[ literal[int] ])
keyword[if] identifier[nested] :
keyword[try] :
identifier[name] = identifier[package] . identifier[__displayname__]
keyword[except] identifier[AttributeError] :
identifier[name] = identifier[package] . identifier[__name__]
identifier[index] = literal[string] %( identifier[name] , literal[string] * identifier[len] ( identifier[name] ))
identifier[index] += literal[string] . identifier[join] ( identifier[files] )
identifier[index] += literal[string] + identifier[self] . identifier[_ProduceContent] ( identifier[nmods] , identifier[showprivate] = identifier[showprivate] , identifier[showinh] = identifier[showinh] )
identifier[findex] = literal[string] %( identifier[package] . identifier[__name__] . identifier[replace] ( literal[string] , literal[string] ))
keyword[with] identifier[open] ( identifier[findex] , literal[string] ) keyword[as] identifier[f] :
keyword[if] identifier[package] . identifier[__doc__] : identifier[f] . identifier[write] ( identifier[package] . identifier[__doc__] )
identifier[f] . identifier[write] ( identifier[index] )
keyword[return] literal[string] + identifier[findex]
identifier[names] = literal[string] %( identifier[self] . identifier[path] , identifier[package] . identifier[__name__] . identifier[replace] ( literal[string] , literal[string] ))
identifier[nmods] =[ identifier[m] keyword[for] identifier[m] keyword[in] identifier[nmods] keyword[if] identifier[m] keyword[not] keyword[in] identifier[ignore] ]
keyword[return] identifier[names] . identifier[join] ( identifier[self] . identifier[_ProduceContent] ( identifier[nmods] , identifier[showprivate] = identifier[showprivate] , identifier[showinh] = identifier[showinh] ). identifier[split] ( literal[string] )+ identifier[files] ) | def _MakePackagePages(self, package, showprivate=False, nested=False, showinh=False):
"""An internal helper to generate all of the pages for a given package
Args:
package (module): The top-level package to document
showprivate (bool): A flag for whether or not to display private members
nested (bool): Foor internal use ONLY
Returns:
str: The file names ready to be appended to a top-level toctree
"""
def checkNoNested(mod):
try:
all = mod.__all__ # depends on [control=['try'], data=[]]
except AttributeError:
return False # depends on [control=['except'], data=[]]
mems = inspect.getmembers(mod, inspect.ismodule)
mems = [m for m in mems if m[0] in mod.__all__]
if len(mems) > 0:
return False # depends on [control=['if'], data=[]]
return True
# Get package module members
mods = inspect.getmembers(package, inspect.ismodule)
# Split into modules and sub-packages
(nmods, pvt, npkgs) = ([], [], [])
for mod in mods:
# Deal with private modules
if checkNoNested(mod[1]):
if mod[0][0] == '_':
pvt.append(mod) # depends on [control=['if'], data=[]]
else:
nmods.append(mod) # depends on [control=['if'], data=[]]
else:
npkgs.append(mod) # depends on [control=['for'], data=['mod']]
if showprivate:
nmods += pvt # depends on [control=['if'], data=[]]
# for each member that has a nested module
# recurse and keep track of index files for that package
files = []
ignore = []
for pkg in npkgs:
pt = '%s/%s/%s' % (self.path, package.__name__.replace('.', '/'), pkg[1].__name__.split('.')[-1])
if os.path.exists(pt):
shutil.rmtree(pt) # depends on [control=['if'], data=[]]
os.makedirs(pt)
ignore += inspect.getmembers(pkg[1])
f = self._MakePackagePages(pkg[1], showprivate=showprivate, nested=True, showinh=showinh)
files.append(f.split(package.__name__.replace('.', '/') + '/')[1]) # depends on [control=['for'], data=['pkg']]
if nested:
try:
name = package.__displayname__ # depends on [control=['try'], data=[]]
except AttributeError:
name = package.__name__ # depends on [control=['except'], data=[]]
# Create index file here
index = '\n%s\n%s\n\n.. toctree::\n :maxdepth: 5\n\n ' % (name, '*' * len(name))
# include sub packages first
index += '\n '.join(files)
# then include modules
index += '\n ' + self._ProduceContent(nmods, showprivate=showprivate, showinh=showinh)
findex = 'content/%s/index.rst' % package.__name__.replace('.', '/')
# Write the file
with open(findex, 'w') as f:
if package.__doc__:
f.write(package.__doc__) # depends on [control=['if'], data=[]]
f.write(index) # depends on [control=['with'], data=['f']]
# return filename for index file at package level
return '\n ' + findex # depends on [control=['if'], data=[]]
# Not nested: return all files
names = '\n %s/%s/' % (self.path, package.__name__.replace('.', '/'))
nmods = [m for m in nmods if m not in ignore]
return names.join(self._ProduceContent(nmods, showprivate=showprivate, showinh=showinh).split('\n ') + files) |
def Authenticate(self, app_id, challenge_data,
print_callback=sys.stderr.write):
"""See base class."""
# If authenticator is not plugged in, prompt
try:
device = u2f.GetLocalU2FInterface(origin=self.origin)
except errors.NoDeviceFoundError:
print_callback('Please insert your security key and press enter...')
six.moves.input()
device = u2f.GetLocalU2FInterface(origin=self.origin)
print_callback('Please touch your security key.\n')
for challenge_item in challenge_data:
raw_challenge = challenge_item['challenge']
key = challenge_item['key']
try:
result = device.Authenticate(app_id, raw_challenge, [key])
except errors.U2FError as e:
if e.code == errors.U2FError.DEVICE_INELIGIBLE:
continue
else:
raise
client_data = self._base64encode(result.client_data.GetJson().encode())
signature_data = self._base64encode(result.signature_data)
key_handle = self._base64encode(result.key_handle)
return {
'clientData': client_data,
'signatureData': signature_data,
'applicationId': app_id,
'keyHandle': key_handle,
}
raise errors.U2FError(errors.U2FError.DEVICE_INELIGIBLE) | def function[Authenticate, parameter[self, app_id, challenge_data, print_callback]]:
constant[See base class.]
<ast.Try object at 0x7da20c76c130>
call[name[print_callback], parameter[constant[Please touch your security key.
]]]
for taget[name[challenge_item]] in starred[name[challenge_data]] begin[:]
variable[raw_challenge] assign[=] call[name[challenge_item]][constant[challenge]]
variable[key] assign[=] call[name[challenge_item]][constant[key]]
<ast.Try object at 0x7da1b19104f0>
variable[client_data] assign[=] call[name[self]._base64encode, parameter[call[call[name[result].client_data.GetJson, parameter[]].encode, parameter[]]]]
variable[signature_data] assign[=] call[name[self]._base64encode, parameter[name[result].signature_data]]
variable[key_handle] assign[=] call[name[self]._base64encode, parameter[name[result].key_handle]]
return[dictionary[[<ast.Constant object at 0x7da1b1913040>, <ast.Constant object at 0x7da1b1911000>, <ast.Constant object at 0x7da1b1910670>, <ast.Constant object at 0x7da1b19134c0>], [<ast.Name object at 0x7da1b1912380>, <ast.Name object at 0x7da1b1910100>, <ast.Name object at 0x7da1b19131c0>, <ast.Name object at 0x7da1b1912f50>]]]
<ast.Raise object at 0x7da1b1913250> | keyword[def] identifier[Authenticate] ( identifier[self] , identifier[app_id] , identifier[challenge_data] ,
identifier[print_callback] = identifier[sys] . identifier[stderr] . identifier[write] ):
literal[string]
keyword[try] :
identifier[device] = identifier[u2f] . identifier[GetLocalU2FInterface] ( identifier[origin] = identifier[self] . identifier[origin] )
keyword[except] identifier[errors] . identifier[NoDeviceFoundError] :
identifier[print_callback] ( literal[string] )
identifier[six] . identifier[moves] . identifier[input] ()
identifier[device] = identifier[u2f] . identifier[GetLocalU2FInterface] ( identifier[origin] = identifier[self] . identifier[origin] )
identifier[print_callback] ( literal[string] )
keyword[for] identifier[challenge_item] keyword[in] identifier[challenge_data] :
identifier[raw_challenge] = identifier[challenge_item] [ literal[string] ]
identifier[key] = identifier[challenge_item] [ literal[string] ]
keyword[try] :
identifier[result] = identifier[device] . identifier[Authenticate] ( identifier[app_id] , identifier[raw_challenge] ,[ identifier[key] ])
keyword[except] identifier[errors] . identifier[U2FError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[code] == identifier[errors] . identifier[U2FError] . identifier[DEVICE_INELIGIBLE] :
keyword[continue]
keyword[else] :
keyword[raise]
identifier[client_data] = identifier[self] . identifier[_base64encode] ( identifier[result] . identifier[client_data] . identifier[GetJson] (). identifier[encode] ())
identifier[signature_data] = identifier[self] . identifier[_base64encode] ( identifier[result] . identifier[signature_data] )
identifier[key_handle] = identifier[self] . identifier[_base64encode] ( identifier[result] . identifier[key_handle] )
keyword[return] {
literal[string] : identifier[client_data] ,
literal[string] : identifier[signature_data] ,
literal[string] : identifier[app_id] ,
literal[string] : identifier[key_handle] ,
}
keyword[raise] identifier[errors] . identifier[U2FError] ( identifier[errors] . identifier[U2FError] . identifier[DEVICE_INELIGIBLE] ) | def Authenticate(self, app_id, challenge_data, print_callback=sys.stderr.write):
"""See base class."""
# If authenticator is not plugged in, prompt
try:
device = u2f.GetLocalU2FInterface(origin=self.origin) # depends on [control=['try'], data=[]]
except errors.NoDeviceFoundError:
print_callback('Please insert your security key and press enter...')
six.moves.input()
device = u2f.GetLocalU2FInterface(origin=self.origin) # depends on [control=['except'], data=[]]
print_callback('Please touch your security key.\n')
for challenge_item in challenge_data:
raw_challenge = challenge_item['challenge']
key = challenge_item['key']
try:
result = device.Authenticate(app_id, raw_challenge, [key]) # depends on [control=['try'], data=[]]
except errors.U2FError as e:
if e.code == errors.U2FError.DEVICE_INELIGIBLE:
continue # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']]
client_data = self._base64encode(result.client_data.GetJson().encode())
signature_data = self._base64encode(result.signature_data)
key_handle = self._base64encode(result.key_handle)
return {'clientData': client_data, 'signatureData': signature_data, 'applicationId': app_id, 'keyHandle': key_handle} # depends on [control=['for'], data=['challenge_item']]
raise errors.U2FError(errors.U2FError.DEVICE_INELIGIBLE) |
def addMonths(date, months):
"""
Returns the new date based on the inputted months.
:param date | <datetime.date>
months | <int>
:return <datetime.date>
"""
# map from Qt information
if type(date).__name__ in ('QDate', 'QDateTime', 'QTime'):
date = date.toPython()
mult = months / abs(months)
years = mult * (abs(months) / 12)
months = mult * (abs(months) % 12)
# calculate the new month
month = date.month + months
if month < 1:
years -= 1
month = 12 - month
elif 12 < month:
years += 1
month %= 12
# calculate the new year
year = date.year + years
# calculate the new day
check = datetime.date(year, month, 1)
days = daysInMonth(check)
return datetime.date(year, month, min(date.day, days)) | def function[addMonths, parameter[date, months]]:
constant[
Returns the new date based on the inputted months.
:param date | <datetime.date>
months | <int>
:return <datetime.date>
]
if compare[call[name[type], parameter[name[date]]].__name__ in tuple[[<ast.Constant object at 0x7da1b28ff6d0>, <ast.Constant object at 0x7da1b28ff6a0>, <ast.Constant object at 0x7da1b28ff670>]]] begin[:]
variable[date] assign[=] call[name[date].toPython, parameter[]]
variable[mult] assign[=] binary_operation[name[months] / call[name[abs], parameter[name[months]]]]
variable[years] assign[=] binary_operation[name[mult] * binary_operation[call[name[abs], parameter[name[months]]] / constant[12]]]
variable[months] assign[=] binary_operation[name[mult] * binary_operation[call[name[abs], parameter[name[months]]] <ast.Mod object at 0x7da2590d6920> constant[12]]]
variable[month] assign[=] binary_operation[name[date].month + name[months]]
if compare[name[month] less[<] constant[1]] begin[:]
<ast.AugAssign object at 0x7da18dc98dc0>
variable[month] assign[=] binary_operation[constant[12] - name[month]]
variable[year] assign[=] binary_operation[name[date].year + name[years]]
variable[check] assign[=] call[name[datetime].date, parameter[name[year], name[month], constant[1]]]
variable[days] assign[=] call[name[daysInMonth], parameter[name[check]]]
return[call[name[datetime].date, parameter[name[year], name[month], call[name[min], parameter[name[date].day, name[days]]]]]] | keyword[def] identifier[addMonths] ( identifier[date] , identifier[months] ):
literal[string]
keyword[if] identifier[type] ( identifier[date] ). identifier[__name__] keyword[in] ( literal[string] , literal[string] , literal[string] ):
identifier[date] = identifier[date] . identifier[toPython] ()
identifier[mult] = identifier[months] / identifier[abs] ( identifier[months] )
identifier[years] = identifier[mult] *( identifier[abs] ( identifier[months] )/ literal[int] )
identifier[months] = identifier[mult] *( identifier[abs] ( identifier[months] )% literal[int] )
identifier[month] = identifier[date] . identifier[month] + identifier[months]
keyword[if] identifier[month] < literal[int] :
identifier[years] -= literal[int]
identifier[month] = literal[int] - identifier[month]
keyword[elif] literal[int] < identifier[month] :
identifier[years] += literal[int]
identifier[month] %= literal[int]
identifier[year] = identifier[date] . identifier[year] + identifier[years]
identifier[check] = identifier[datetime] . identifier[date] ( identifier[year] , identifier[month] , literal[int] )
identifier[days] = identifier[daysInMonth] ( identifier[check] )
keyword[return] identifier[datetime] . identifier[date] ( identifier[year] , identifier[month] , identifier[min] ( identifier[date] . identifier[day] , identifier[days] )) | def addMonths(date, months):
"""
Returns the new date based on the inputted months.
:param date | <datetime.date>
months | <int>
:return <datetime.date>
"""
# map from Qt information
if type(date).__name__ in ('QDate', 'QDateTime', 'QTime'):
date = date.toPython() # depends on [control=['if'], data=[]]
mult = months / abs(months)
years = mult * (abs(months) / 12)
months = mult * (abs(months) % 12)
# calculate the new month
month = date.month + months
if month < 1:
years -= 1
month = 12 - month # depends on [control=['if'], data=['month']]
elif 12 < month:
years += 1
month %= 12 # depends on [control=['if'], data=['month']]
# calculate the new year
year = date.year + years
# calculate the new day
check = datetime.date(year, month, 1)
days = daysInMonth(check)
return datetime.date(year, month, min(date.day, days)) |
def update_company(
self,
company,
update_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates specified company.
Example:
>>> from google.cloud import talent_v4beta1
>>>
>>> client = talent_v4beta1.CompanyServiceClient()
>>>
>>> # TODO: Initialize `company`:
>>> company = {}
>>>
>>> response = client.update_company(company)
Args:
company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required.
The company resource to replace the current resource in the system.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.talent_v4beta1.types.Company`
update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience.
If ``update_mask`` is provided, only the specified fields in ``company``
are updated. Otherwise all the fields are updated.
A field mask to specify the company fields to be updated. Only top level
fields of ``Company`` are supported.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.talent_v4beta1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.talent_v4beta1.types.Company` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "update_company" not in self._inner_api_calls:
self._inner_api_calls[
"update_company"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_company,
default_retry=self._method_configs["UpdateCompany"].retry,
default_timeout=self._method_configs["UpdateCompany"].timeout,
client_info=self._client_info,
)
request = company_service_pb2.UpdateCompanyRequest(
company=company, update_mask=update_mask
)
return self._inner_api_calls["update_company"](
request, retry=retry, timeout=timeout, metadata=metadata
) | def function[update_company, parameter[self, company, update_mask, retry, timeout, metadata]]:
constant[
Updates specified company.
Example:
>>> from google.cloud import talent_v4beta1
>>>
>>> client = talent_v4beta1.CompanyServiceClient()
>>>
>>> # TODO: Initialize `company`:
>>> company = {}
>>>
>>> response = client.update_company(company)
Args:
company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required.
The company resource to replace the current resource in the system.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.talent_v4beta1.types.Company`
update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience.
If ``update_mask`` is provided, only the specified fields in ``company``
are updated. Otherwise all the fields are updated.
A field mask to specify the company fields to be updated. Only top level
fields of ``Company`` are supported.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.talent_v4beta1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.talent_v4beta1.types.Company` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
]
if compare[constant[update_company] <ast.NotIn object at 0x7da2590d7190> name[self]._inner_api_calls] begin[:]
call[name[self]._inner_api_calls][constant[update_company]] assign[=] call[name[google].api_core.gapic_v1.method.wrap_method, parameter[name[self].transport.update_company]]
variable[request] assign[=] call[name[company_service_pb2].UpdateCompanyRequest, parameter[]]
return[call[call[name[self]._inner_api_calls][constant[update_company]], parameter[name[request]]]] | keyword[def] identifier[update_company] (
identifier[self] ,
identifier[company] ,
identifier[update_mask] = keyword[None] ,
identifier[retry] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[timeout] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[metadata] = keyword[None] ,
):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[_inner_api_calls] :
identifier[self] . identifier[_inner_api_calls] [
literal[string]
]= identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[wrap_method] (
identifier[self] . identifier[transport] . identifier[update_company] ,
identifier[default_retry] = identifier[self] . identifier[_method_configs] [ literal[string] ]. identifier[retry] ,
identifier[default_timeout] = identifier[self] . identifier[_method_configs] [ literal[string] ]. identifier[timeout] ,
identifier[client_info] = identifier[self] . identifier[_client_info] ,
)
identifier[request] = identifier[company_service_pb2] . identifier[UpdateCompanyRequest] (
identifier[company] = identifier[company] , identifier[update_mask] = identifier[update_mask]
)
keyword[return] identifier[self] . identifier[_inner_api_calls] [ literal[string] ](
identifier[request] , identifier[retry] = identifier[retry] , identifier[timeout] = identifier[timeout] , identifier[metadata] = identifier[metadata]
) | def update_company(self, company, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None):
"""
Updates specified company.
Example:
>>> from google.cloud import talent_v4beta1
>>>
>>> client = talent_v4beta1.CompanyServiceClient()
>>>
>>> # TODO: Initialize `company`:
>>> company = {}
>>>
>>> response = client.update_company(company)
Args:
company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required.
The company resource to replace the current resource in the system.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.talent_v4beta1.types.Company`
update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience.
If ``update_mask`` is provided, only the specified fields in ``company``
are updated. Otherwise all the fields are updated.
A field mask to specify the company fields to be updated. Only top level
fields of ``Company`` are supported.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.talent_v4beta1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.talent_v4beta1.types.Company` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'update_company' not in self._inner_api_calls:
self._inner_api_calls['update_company'] = google.api_core.gapic_v1.method.wrap_method(self.transport.update_company, default_retry=self._method_configs['UpdateCompany'].retry, default_timeout=self._method_configs['UpdateCompany'].timeout, client_info=self._client_info) # depends on [control=['if'], data=[]]
request = company_service_pb2.UpdateCompanyRequest(company=company, update_mask=update_mask)
return self._inner_api_calls['update_company'](request, retry=retry, timeout=timeout, metadata=metadata) |
def ls(*topic, **kwargs):
"""List topic from external datastore
Arguments:
topic (str): One or more topics, e.g. ("project", "item", "task")
root (str, optional): Absolute path to where projects reside,
defaults to os.getcwd()
backend (callable, optional): Function to call with absolute path as
argument to retrieve children. Defaults to os.listdir
absolute (bool, optional): Whether to return relative or absolute paths
Example:
>> ls()
/projects/thedeal
/projects/hulk
>> ls("thedeal")
/projects/thedeal/assets/ben
/projects/thedeal/assets/table
>> ls("thedeal", "ben")
/projects/thedeal/assets/ben/rigging
/projects/thedeal/assets/ben/modeling
"""
context = dump()
root = kwargs.get("root") or context.get("cwd") or os.getcwd()
backend = kwargs.get("backend", os.listdir)
absolute = kwargs.get("absolute", True)
content = {
0: "projects",
1: "inventory",
2: "template"
}[min(2, len(topic))]
# List projects
if content == "projects":
projects = lib.list_projects(root=root, backend=backend)
if absolute:
return map(lambda p: os.path.join(root, p), projects)
else:
return projects
# List items
if content == "inventory":
project = topic[0]
be = _extern.load(project, "be", root=root)
inventory = _extern.load(project, "inventory", root=root)
inventory = lib.invert_inventory(inventory)
templates = _extern.load(project, "templates", root=root)
if absolute:
paths = list()
for item, binding in inventory.iteritems():
template = templates.get(binding)
index = len(topic)
sliced = lib.slice(index, template)
paths.append(sliced.format(*(topic + (item,)), **context))
return paths
else:
return inventory.keys()
# List template
if content == "template":
project = topic[0]
be = _extern.load(project, "be", root=root)
templates = _extern.load(project, "templates", root=root)
inventory = _extern.load(project, "inventory", root=root)
return lib.list_template(root=root,
topics=topic,
templates=templates,
inventory=inventory,
be=be,
absolute=absolute) | def function[ls, parameter[]]:
constant[List topic from external datastore
Arguments:
topic (str): One or more topics, e.g. ("project", "item", "task")
root (str, optional): Absolute path to where projects reside,
defaults to os.getcwd()
backend (callable, optional): Function to call with absolute path as
argument to retrieve children. Defaults to os.listdir
absolute (bool, optional): Whether to return relative or absolute paths
Example:
>> ls()
/projects/thedeal
/projects/hulk
>> ls("thedeal")
/projects/thedeal/assets/ben
/projects/thedeal/assets/table
>> ls("thedeal", "ben")
/projects/thedeal/assets/ben/rigging
/projects/thedeal/assets/ben/modeling
]
variable[context] assign[=] call[name[dump], parameter[]]
variable[root] assign[=] <ast.BoolOp object at 0x7da1b11ee950>
variable[backend] assign[=] call[name[kwargs].get, parameter[constant[backend], name[os].listdir]]
variable[absolute] assign[=] call[name[kwargs].get, parameter[constant[absolute], constant[True]]]
variable[content] assign[=] call[dictionary[[<ast.Constant object at 0x7da1b11ed960>, <ast.Constant object at 0x7da1b11effd0>, <ast.Constant object at 0x7da1b11ec0a0>], [<ast.Constant object at 0x7da1b11ed690>, <ast.Constant object at 0x7da1b11ec2b0>, <ast.Constant object at 0x7da1b11efcd0>]]][call[name[min], parameter[constant[2], call[name[len], parameter[name[topic]]]]]]
if compare[name[content] equal[==] constant[projects]] begin[:]
variable[projects] assign[=] call[name[lib].list_projects, parameter[]]
if name[absolute] begin[:]
return[call[name[map], parameter[<ast.Lambda object at 0x7da1b11ec370>, name[projects]]]]
if compare[name[content] equal[==] constant[inventory]] begin[:]
variable[project] assign[=] call[name[topic]][constant[0]]
variable[be] assign[=] call[name[_extern].load, parameter[name[project], constant[be]]]
variable[inventory] assign[=] call[name[_extern].load, parameter[name[project], constant[inventory]]]
variable[inventory] assign[=] call[name[lib].invert_inventory, parameter[name[inventory]]]
variable[templates] assign[=] call[name[_extern].load, parameter[name[project], constant[templates]]]
if name[absolute] begin[:]
variable[paths] assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b11ec790>, <ast.Name object at 0x7da1b11ee9b0>]]] in starred[call[name[inventory].iteritems, parameter[]]] begin[:]
variable[template] assign[=] call[name[templates].get, parameter[name[binding]]]
variable[index] assign[=] call[name[len], parameter[name[topic]]]
variable[sliced] assign[=] call[name[lib].slice, parameter[name[index], name[template]]]
call[name[paths].append, parameter[call[name[sliced].format, parameter[<ast.Starred object at 0x7da1b11ef6d0>]]]]
return[name[paths]]
if compare[name[content] equal[==] constant[template]] begin[:]
variable[project] assign[=] call[name[topic]][constant[0]]
variable[be] assign[=] call[name[_extern].load, parameter[name[project], constant[be]]]
variable[templates] assign[=] call[name[_extern].load, parameter[name[project], constant[templates]]]
variable[inventory] assign[=] call[name[_extern].load, parameter[name[project], constant[inventory]]]
return[call[name[lib].list_template, parameter[]]] | keyword[def] identifier[ls] (* identifier[topic] ,** identifier[kwargs] ):
literal[string]
identifier[context] = identifier[dump] ()
identifier[root] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[or] identifier[context] . identifier[get] ( literal[string] ) keyword[or] identifier[os] . identifier[getcwd] ()
identifier[backend] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[os] . identifier[listdir] )
identifier[absolute] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] )
identifier[content] ={
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string]
}[ identifier[min] ( literal[int] , identifier[len] ( identifier[topic] ))]
keyword[if] identifier[content] == literal[string] :
identifier[projects] = identifier[lib] . identifier[list_projects] ( identifier[root] = identifier[root] , identifier[backend] = identifier[backend] )
keyword[if] identifier[absolute] :
keyword[return] identifier[map] ( keyword[lambda] identifier[p] : identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[p] ), identifier[projects] )
keyword[else] :
keyword[return] identifier[projects]
keyword[if] identifier[content] == literal[string] :
identifier[project] = identifier[topic] [ literal[int] ]
identifier[be] = identifier[_extern] . identifier[load] ( identifier[project] , literal[string] , identifier[root] = identifier[root] )
identifier[inventory] = identifier[_extern] . identifier[load] ( identifier[project] , literal[string] , identifier[root] = identifier[root] )
identifier[inventory] = identifier[lib] . identifier[invert_inventory] ( identifier[inventory] )
identifier[templates] = identifier[_extern] . identifier[load] ( identifier[project] , literal[string] , identifier[root] = identifier[root] )
keyword[if] identifier[absolute] :
identifier[paths] = identifier[list] ()
keyword[for] identifier[item] , identifier[binding] keyword[in] identifier[inventory] . identifier[iteritems] ():
identifier[template] = identifier[templates] . identifier[get] ( identifier[binding] )
identifier[index] = identifier[len] ( identifier[topic] )
identifier[sliced] = identifier[lib] . identifier[slice] ( identifier[index] , identifier[template] )
identifier[paths] . identifier[append] ( identifier[sliced] . identifier[format] (*( identifier[topic] +( identifier[item] ,)),** identifier[context] ))
keyword[return] identifier[paths]
keyword[else] :
keyword[return] identifier[inventory] . identifier[keys] ()
keyword[if] identifier[content] == literal[string] :
identifier[project] = identifier[topic] [ literal[int] ]
identifier[be] = identifier[_extern] . identifier[load] ( identifier[project] , literal[string] , identifier[root] = identifier[root] )
identifier[templates] = identifier[_extern] . identifier[load] ( identifier[project] , literal[string] , identifier[root] = identifier[root] )
identifier[inventory] = identifier[_extern] . identifier[load] ( identifier[project] , literal[string] , identifier[root] = identifier[root] )
keyword[return] identifier[lib] . identifier[list_template] ( identifier[root] = identifier[root] ,
identifier[topics] = identifier[topic] ,
identifier[templates] = identifier[templates] ,
identifier[inventory] = identifier[inventory] ,
identifier[be] = identifier[be] ,
identifier[absolute] = identifier[absolute] ) | def ls(*topic, **kwargs):
"""List topic from external datastore
Arguments:
topic (str): One or more topics, e.g. ("project", "item", "task")
root (str, optional): Absolute path to where projects reside,
defaults to os.getcwd()
backend (callable, optional): Function to call with absolute path as
argument to retrieve children. Defaults to os.listdir
absolute (bool, optional): Whether to return relative or absolute paths
Example:
>> ls()
/projects/thedeal
/projects/hulk
>> ls("thedeal")
/projects/thedeal/assets/ben
/projects/thedeal/assets/table
>> ls("thedeal", "ben")
/projects/thedeal/assets/ben/rigging
/projects/thedeal/assets/ben/modeling
"""
context = dump()
root = kwargs.get('root') or context.get('cwd') or os.getcwd()
backend = kwargs.get('backend', os.listdir)
absolute = kwargs.get('absolute', True)
content = {0: 'projects', 1: 'inventory', 2: 'template'}[min(2, len(topic))]
# List projects
if content == 'projects':
projects = lib.list_projects(root=root, backend=backend)
if absolute:
return map(lambda p: os.path.join(root, p), projects) # depends on [control=['if'], data=[]]
else:
return projects # depends on [control=['if'], data=[]]
# List items
if content == 'inventory':
project = topic[0]
be = _extern.load(project, 'be', root=root)
inventory = _extern.load(project, 'inventory', root=root)
inventory = lib.invert_inventory(inventory)
templates = _extern.load(project, 'templates', root=root)
if absolute:
paths = list()
for (item, binding) in inventory.iteritems():
template = templates.get(binding)
index = len(topic)
sliced = lib.slice(index, template)
paths.append(sliced.format(*topic + (item,), **context)) # depends on [control=['for'], data=[]]
return paths # depends on [control=['if'], data=[]]
else:
return inventory.keys() # depends on [control=['if'], data=[]]
# List template
if content == 'template':
project = topic[0]
be = _extern.load(project, 'be', root=root)
templates = _extern.load(project, 'templates', root=root)
inventory = _extern.load(project, 'inventory', root=root)
return lib.list_template(root=root, topics=topic, templates=templates, inventory=inventory, be=be, absolute=absolute) # depends on [control=['if'], data=[]] |
def _filter_types(types_dict):
# type: (Dict[FunctionKey, T]) -> Dict[FunctionKey, T]
"""Filter type info before dumping it to the file."""
def exclude(k):
# type: (FunctionKey) -> bool
"""Exclude filter"""
return k.path.startswith('<') or k.func_name == '<module>'
return {k: v for k, v in iteritems(types_dict) if not exclude(k)} | def function[_filter_types, parameter[types_dict]]:
constant[Filter type info before dumping it to the file.]
def function[exclude, parameter[k]]:
constant[Exclude filter]
return[<ast.BoolOp object at 0x7da20c991fc0>]
return[<ast.DictComp object at 0x7da20c990be0>] | keyword[def] identifier[_filter_types] ( identifier[types_dict] ):
literal[string]
keyword[def] identifier[exclude] ( identifier[k] ):
literal[string]
keyword[return] identifier[k] . identifier[path] . identifier[startswith] ( literal[string] ) keyword[or] identifier[k] . identifier[func_name] == literal[string]
keyword[return] { identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[types_dict] ) keyword[if] keyword[not] identifier[exclude] ( identifier[k] )} | def _filter_types(types_dict):
# type: (Dict[FunctionKey, T]) -> Dict[FunctionKey, T]
'Filter type info before dumping it to the file.'
def exclude(k):
# type: (FunctionKey) -> bool
'Exclude filter'
return k.path.startswith('<') or k.func_name == '<module>'
return {k: v for (k, v) in iteritems(types_dict) if not exclude(k)} |
def _on_prop_changed(self, instance, meth_name, res, args, kwargs):
"""Called by the observation code, when a modifying method
is called"""
Adapter._on_prop_changed(self) | def function[_on_prop_changed, parameter[self, instance, meth_name, res, args, kwargs]]:
constant[Called by the observation code, when a modifying method
is called]
call[name[Adapter]._on_prop_changed, parameter[name[self]]] | keyword[def] identifier[_on_prop_changed] ( identifier[self] , identifier[instance] , identifier[meth_name] , identifier[res] , identifier[args] , identifier[kwargs] ):
literal[string]
identifier[Adapter] . identifier[_on_prop_changed] ( identifier[self] ) | def _on_prop_changed(self, instance, meth_name, res, args, kwargs):
"""Called by the observation code, when a modifying method
is called"""
Adapter._on_prop_changed(self) |
def string_to_scopes(scopes):
"""Converts stringifed scope value to a list.
If scopes is a list then it is simply passed through. If scopes is an
string then a list of each individual scope is returned.
Args:
scopes: a string or iterable of strings, the scopes.
Returns:
The scopes in a list.
"""
if not scopes:
return []
elif isinstance(scopes, six.string_types):
return scopes.split(' ')
else:
return scopes | def function[string_to_scopes, parameter[scopes]]:
constant[Converts stringifed scope value to a list.
If scopes is a list then it is simply passed through. If scopes is an
string then a list of each individual scope is returned.
Args:
scopes: a string or iterable of strings, the scopes.
Returns:
The scopes in a list.
]
if <ast.UnaryOp object at 0x7da1b01505b0> begin[:]
return[list[[]]] | keyword[def] identifier[string_to_scopes] ( identifier[scopes] ):
literal[string]
keyword[if] keyword[not] identifier[scopes] :
keyword[return] []
keyword[elif] identifier[isinstance] ( identifier[scopes] , identifier[six] . identifier[string_types] ):
keyword[return] identifier[scopes] . identifier[split] ( literal[string] )
keyword[else] :
keyword[return] identifier[scopes] | def string_to_scopes(scopes):
"""Converts stringifed scope value to a list.
If scopes is a list then it is simply passed through. If scopes is an
string then a list of each individual scope is returned.
Args:
scopes: a string or iterable of strings, the scopes.
Returns:
The scopes in a list.
"""
if not scopes:
return [] # depends on [control=['if'], data=[]]
elif isinstance(scopes, six.string_types):
return scopes.split(' ') # depends on [control=['if'], data=[]]
else:
return scopes |
def adsSyncWriteReqEx(port, address, index_group, index_offset, value, plc_data_type):
# type: (int, AmsAddr, int, int, Any, Type) -> None
"""Send data synchronous to an ADS-device.
:param int port: local AMS port as returned by adsPortOpenEx()
:param pyads.structs.AmsAddr address: local or remote AmsAddr
:param int indexGroup: PLC storage area, according to the INDEXGROUP
constants
:param int index_offset: PLC storage address
:param value: value to write to the storage address of the PLC
:param int plc_data_type: type of the data given to the PLC,
according to PLCTYPE constants
"""
sync_write_request = _adsDLL.AdsSyncWriteReqEx
ams_address_pointer = ctypes.pointer(address.amsAddrStruct())
index_group_c = ctypes.c_ulong(index_group)
index_offset_c = ctypes.c_ulong(index_offset)
if plc_data_type == PLCTYPE_STRING:
data = ctypes.c_char_p(value.encode("utf-8"))
data_pointer = data # type: Union[ctypes.c_char_p, ctypes.pointer]
data_length = len(data_pointer.value) + 1 # type: ignore
else:
if type(plc_data_type).__name__ == "PyCArrayType":
data = plc_data_type(*value)
else:
data = plc_data_type(value)
data_pointer = ctypes.pointer(data)
data_length = ctypes.sizeof(data)
error_code = sync_write_request(
port,
ams_address_pointer,
index_group_c,
index_offset_c,
data_length,
data_pointer,
)
if error_code:
raise ADSError(error_code) | def function[adsSyncWriteReqEx, parameter[port, address, index_group, index_offset, value, plc_data_type]]:
constant[Send data synchronous to an ADS-device.
:param int port: local AMS port as returned by adsPortOpenEx()
:param pyads.structs.AmsAddr address: local or remote AmsAddr
:param int indexGroup: PLC storage area, according to the INDEXGROUP
constants
:param int index_offset: PLC storage address
:param value: value to write to the storage address of the PLC
:param int plc_data_type: type of the data given to the PLC,
according to PLCTYPE constants
]
variable[sync_write_request] assign[=] name[_adsDLL].AdsSyncWriteReqEx
variable[ams_address_pointer] assign[=] call[name[ctypes].pointer, parameter[call[name[address].amsAddrStruct, parameter[]]]]
variable[index_group_c] assign[=] call[name[ctypes].c_ulong, parameter[name[index_group]]]
variable[index_offset_c] assign[=] call[name[ctypes].c_ulong, parameter[name[index_offset]]]
if compare[name[plc_data_type] equal[==] name[PLCTYPE_STRING]] begin[:]
variable[data] assign[=] call[name[ctypes].c_char_p, parameter[call[name[value].encode, parameter[constant[utf-8]]]]]
variable[data_pointer] assign[=] name[data]
variable[data_length] assign[=] binary_operation[call[name[len], parameter[name[data_pointer].value]] + constant[1]]
variable[error_code] assign[=] call[name[sync_write_request], parameter[name[port], name[ams_address_pointer], name[index_group_c], name[index_offset_c], name[data_length], name[data_pointer]]]
if name[error_code] begin[:]
<ast.Raise object at 0x7da1b015bf70> | keyword[def] identifier[adsSyncWriteReqEx] ( identifier[port] , identifier[address] , identifier[index_group] , identifier[index_offset] , identifier[value] , identifier[plc_data_type] ):
literal[string]
identifier[sync_write_request] = identifier[_adsDLL] . identifier[AdsSyncWriteReqEx]
identifier[ams_address_pointer] = identifier[ctypes] . identifier[pointer] ( identifier[address] . identifier[amsAddrStruct] ())
identifier[index_group_c] = identifier[ctypes] . identifier[c_ulong] ( identifier[index_group] )
identifier[index_offset_c] = identifier[ctypes] . identifier[c_ulong] ( identifier[index_offset] )
keyword[if] identifier[plc_data_type] == identifier[PLCTYPE_STRING] :
identifier[data] = identifier[ctypes] . identifier[c_char_p] ( identifier[value] . identifier[encode] ( literal[string] ))
identifier[data_pointer] = identifier[data]
identifier[data_length] = identifier[len] ( identifier[data_pointer] . identifier[value] )+ literal[int]
keyword[else] :
keyword[if] identifier[type] ( identifier[plc_data_type] ). identifier[__name__] == literal[string] :
identifier[data] = identifier[plc_data_type] (* identifier[value] )
keyword[else] :
identifier[data] = identifier[plc_data_type] ( identifier[value] )
identifier[data_pointer] = identifier[ctypes] . identifier[pointer] ( identifier[data] )
identifier[data_length] = identifier[ctypes] . identifier[sizeof] ( identifier[data] )
identifier[error_code] = identifier[sync_write_request] (
identifier[port] ,
identifier[ams_address_pointer] ,
identifier[index_group_c] ,
identifier[index_offset_c] ,
identifier[data_length] ,
identifier[data_pointer] ,
)
keyword[if] identifier[error_code] :
keyword[raise] identifier[ADSError] ( identifier[error_code] ) | def adsSyncWriteReqEx(port, address, index_group, index_offset, value, plc_data_type):
# type: (int, AmsAddr, int, int, Any, Type) -> None
'Send data synchronous to an ADS-device.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param int indexGroup: PLC storage area, according to the INDEXGROUP\n constants\n :param int index_offset: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param int plc_data_type: type of the data given to the PLC,\n according to PLCTYPE constants\n\n '
sync_write_request = _adsDLL.AdsSyncWriteReqEx
ams_address_pointer = ctypes.pointer(address.amsAddrStruct())
index_group_c = ctypes.c_ulong(index_group)
index_offset_c = ctypes.c_ulong(index_offset)
if plc_data_type == PLCTYPE_STRING:
data = ctypes.c_char_p(value.encode('utf-8'))
data_pointer = data # type: Union[ctypes.c_char_p, ctypes.pointer]
data_length = len(data_pointer.value) + 1 # type: ignore # depends on [control=['if'], data=[]]
else:
if type(plc_data_type).__name__ == 'PyCArrayType':
data = plc_data_type(*value) # depends on [control=['if'], data=[]]
else:
data = plc_data_type(value)
data_pointer = ctypes.pointer(data)
data_length = ctypes.sizeof(data)
error_code = sync_write_request(port, ams_address_pointer, index_group_c, index_offset_c, data_length, data_pointer)
if error_code:
raise ADSError(error_code) # depends on [control=['if'], data=[]] |
def get_conv(bits, bin_point, signed=False, scaling=1.0):
"""
Creates a I{conversion structure} implented as a dictionary containing all parameters
needed to switch between number representations.
@param bits: the number of bits
@param bin_point: binary point position
@param signed: True if Fix, False if UFix
@param scaling: optional scaling to be applied after the conversion
@return: a conversion structure that can be applied in both directions of
conversion for the given specs.
"""
conversion_t = {}
conversion_t["bits"] = bits
conversion_t["bin_point"] = bin_point
conversion_t["signed"] = signed
conversion_t["scaling"] = scaling
conversion_t["dec_step"] = 1.0 / (2 ** bin_point)
#dec_max = dec_mask * dec_step
conversion_t["dec_mask"] = sum([2 ** i for i in range(bin_point)])
if bits == 8:
conversion_t["fmt"] = "B"
elif bits == 16:
conversion_t["fmt"] = "H"
elif bits == 32:
conversion_t["fmt"] = "I"
else:
raise ConversionError("numer of bits not supported: " + str(bits))
if signed:
_get_signed_params(conversion_t)
else:
_get_unsigned_params(conversion_t)
return conversion_t | def function[get_conv, parameter[bits, bin_point, signed, scaling]]:
constant[
Creates a I{conversion structure} implented as a dictionary containing all parameters
needed to switch between number representations.
@param bits: the number of bits
@param bin_point: binary point position
@param signed: True if Fix, False if UFix
@param scaling: optional scaling to be applied after the conversion
@return: a conversion structure that can be applied in both directions of
conversion for the given specs.
]
variable[conversion_t] assign[=] dictionary[[], []]
call[name[conversion_t]][constant[bits]] assign[=] name[bits]
call[name[conversion_t]][constant[bin_point]] assign[=] name[bin_point]
call[name[conversion_t]][constant[signed]] assign[=] name[signed]
call[name[conversion_t]][constant[scaling]] assign[=] name[scaling]
call[name[conversion_t]][constant[dec_step]] assign[=] binary_operation[constant[1.0] / binary_operation[constant[2] ** name[bin_point]]]
call[name[conversion_t]][constant[dec_mask]] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b09c4850>]]
if compare[name[bits] equal[==] constant[8]] begin[:]
call[name[conversion_t]][constant[fmt]] assign[=] constant[B]
if name[signed] begin[:]
call[name[_get_signed_params], parameter[name[conversion_t]]]
return[name[conversion_t]] | keyword[def] identifier[get_conv] ( identifier[bits] , identifier[bin_point] , identifier[signed] = keyword[False] , identifier[scaling] = literal[int] ):
literal[string]
identifier[conversion_t] ={}
identifier[conversion_t] [ literal[string] ]= identifier[bits]
identifier[conversion_t] [ literal[string] ]= identifier[bin_point]
identifier[conversion_t] [ literal[string] ]= identifier[signed]
identifier[conversion_t] [ literal[string] ]= identifier[scaling]
identifier[conversion_t] [ literal[string] ]= literal[int] /( literal[int] ** identifier[bin_point] )
identifier[conversion_t] [ literal[string] ]= identifier[sum] ([ literal[int] ** identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[bin_point] )])
keyword[if] identifier[bits] == literal[int] :
identifier[conversion_t] [ literal[string] ]= literal[string]
keyword[elif] identifier[bits] == literal[int] :
identifier[conversion_t] [ literal[string] ]= literal[string]
keyword[elif] identifier[bits] == literal[int] :
identifier[conversion_t] [ literal[string] ]= literal[string]
keyword[else] :
keyword[raise] identifier[ConversionError] ( literal[string] + identifier[str] ( identifier[bits] ))
keyword[if] identifier[signed] :
identifier[_get_signed_params] ( identifier[conversion_t] )
keyword[else] :
identifier[_get_unsigned_params] ( identifier[conversion_t] )
keyword[return] identifier[conversion_t] | def get_conv(bits, bin_point, signed=False, scaling=1.0):
"""
Creates a I{conversion structure} implented as a dictionary containing all parameters
needed to switch between number representations.
@param bits: the number of bits
@param bin_point: binary point position
@param signed: True if Fix, False if UFix
@param scaling: optional scaling to be applied after the conversion
@return: a conversion structure that can be applied in both directions of
conversion for the given specs.
"""
conversion_t = {}
conversion_t['bits'] = bits
conversion_t['bin_point'] = bin_point
conversion_t['signed'] = signed
conversion_t['scaling'] = scaling
conversion_t['dec_step'] = 1.0 / 2 ** bin_point
#dec_max = dec_mask * dec_step
conversion_t['dec_mask'] = sum([2 ** i for i in range(bin_point)])
if bits == 8:
conversion_t['fmt'] = 'B' # depends on [control=['if'], data=[]]
elif bits == 16:
conversion_t['fmt'] = 'H' # depends on [control=['if'], data=[]]
elif bits == 32:
conversion_t['fmt'] = 'I' # depends on [control=['if'], data=[]]
else:
raise ConversionError('numer of bits not supported: ' + str(bits))
if signed:
_get_signed_params(conversion_t) # depends on [control=['if'], data=[]]
else:
_get_unsigned_params(conversion_t)
return conversion_t |
def compute(self, x_arr, y_arr):
'''
Compute distance.
Args:
x_arr: `np.ndarray` of vectors.
y_arr: `np.ndarray` of vectors.
Retruns:
`np.ndarray` of distances.
'''
return np.linalg.norm(x_arr - y_arr, axis=-1) | def function[compute, parameter[self, x_arr, y_arr]]:
constant[
Compute distance.
Args:
x_arr: `np.ndarray` of vectors.
y_arr: `np.ndarray` of vectors.
Retruns:
`np.ndarray` of distances.
]
return[call[name[np].linalg.norm, parameter[binary_operation[name[x_arr] - name[y_arr]]]]] | keyword[def] identifier[compute] ( identifier[self] , identifier[x_arr] , identifier[y_arr] ):
literal[string]
keyword[return] identifier[np] . identifier[linalg] . identifier[norm] ( identifier[x_arr] - identifier[y_arr] , identifier[axis] =- literal[int] ) | def compute(self, x_arr, y_arr):
"""
Compute distance.
Args:
x_arr: `np.ndarray` of vectors.
y_arr: `np.ndarray` of vectors.
Retruns:
`np.ndarray` of distances.
"""
return np.linalg.norm(x_arr - y_arr, axis=-1) |
def delete_cookie(self, key: str, path: str='/', domain: Optional[str]=None) -> None:
"""Delete a cookie (set to expire immediately)."""
self.set_cookie(key, expires=datetime.utcnow(), max_age=0, path=path, domain=domain) | def function[delete_cookie, parameter[self, key, path, domain]]:
constant[Delete a cookie (set to expire immediately).]
call[name[self].set_cookie, parameter[name[key]]] | keyword[def] identifier[delete_cookie] ( identifier[self] , identifier[key] : identifier[str] , identifier[path] : identifier[str] = literal[string] , identifier[domain] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> keyword[None] :
literal[string]
identifier[self] . identifier[set_cookie] ( identifier[key] , identifier[expires] = identifier[datetime] . identifier[utcnow] (), identifier[max_age] = literal[int] , identifier[path] = identifier[path] , identifier[domain] = identifier[domain] ) | def delete_cookie(self, key: str, path: str='/', domain: Optional[str]=None) -> None:
"""Delete a cookie (set to expire immediately)."""
self.set_cookie(key, expires=datetime.utcnow(), max_age=0, path=path, domain=domain) |
def get_rgba(self):
"""Returns the solid pattern’s color.
:returns: a ``(red, green, blue, alpha)`` tuple of floats.
"""
rgba = ffi.new('double[4]')
_check_status(cairo.cairo_pattern_get_rgba(
self._pointer, rgba + 0, rgba + 1, rgba + 2, rgba + 3))
return tuple(rgba) | def function[get_rgba, parameter[self]]:
constant[Returns the solid pattern’s color.
:returns: a ``(red, green, blue, alpha)`` tuple of floats.
]
variable[rgba] assign[=] call[name[ffi].new, parameter[constant[double[4]]]]
call[name[_check_status], parameter[call[name[cairo].cairo_pattern_get_rgba, parameter[name[self]._pointer, binary_operation[name[rgba] + constant[0]], binary_operation[name[rgba] + constant[1]], binary_operation[name[rgba] + constant[2]], binary_operation[name[rgba] + constant[3]]]]]]
return[call[name[tuple], parameter[name[rgba]]]] | keyword[def] identifier[get_rgba] ( identifier[self] ):
literal[string]
identifier[rgba] = identifier[ffi] . identifier[new] ( literal[string] )
identifier[_check_status] ( identifier[cairo] . identifier[cairo_pattern_get_rgba] (
identifier[self] . identifier[_pointer] , identifier[rgba] + literal[int] , identifier[rgba] + literal[int] , identifier[rgba] + literal[int] , identifier[rgba] + literal[int] ))
keyword[return] identifier[tuple] ( identifier[rgba] ) | def get_rgba(self):
"""Returns the solid pattern’s color.
:returns: a ``(red, green, blue, alpha)`` tuple of floats.
"""
rgba = ffi.new('double[4]')
_check_status(cairo.cairo_pattern_get_rgba(self._pointer, rgba + 0, rgba + 1, rgba + 2, rgba + 3))
return tuple(rgba) |
def encode(self, input_str):
"""Input str to features dict, ready for inference."""
inputs = self.encoders["inputs"].encode(input_str) + [EOS_ID]
batch_inputs = np.reshape(inputs, [1, -1, 1, 1]) # Make it 3D.
return batch_inputs | def function[encode, parameter[self, input_str]]:
constant[Input str to features dict, ready for inference.]
variable[inputs] assign[=] binary_operation[call[call[name[self].encoders][constant[inputs]].encode, parameter[name[input_str]]] + list[[<ast.Name object at 0x7da1b209be20>]]]
variable[batch_inputs] assign[=] call[name[np].reshape, parameter[name[inputs], list[[<ast.Constant object at 0x7da1b20988e0>, <ast.UnaryOp object at 0x7da1b2099930>, <ast.Constant object at 0x7da1b2098880>, <ast.Constant object at 0x7da1b2099300>]]]]
return[name[batch_inputs]] | keyword[def] identifier[encode] ( identifier[self] , identifier[input_str] ):
literal[string]
identifier[inputs] = identifier[self] . identifier[encoders] [ literal[string] ]. identifier[encode] ( identifier[input_str] )+[ identifier[EOS_ID] ]
identifier[batch_inputs] = identifier[np] . identifier[reshape] ( identifier[inputs] ,[ literal[int] ,- literal[int] , literal[int] , literal[int] ])
keyword[return] identifier[batch_inputs] | def encode(self, input_str):
"""Input str to features dict, ready for inference."""
inputs = self.encoders['inputs'].encode(input_str) + [EOS_ID]
batch_inputs = np.reshape(inputs, [1, -1, 1, 1]) # Make it 3D.
return batch_inputs |
async def _cleanup_old_connections(self):
"""Remove all active connections and query the maximum number of supported connections
"""
retval = await self._command_task.future_command(['_query_systemstate'])
for conn in retval['active_connections']:
self._logger.info("Forcible disconnecting connection %d", conn)
await self._command_task.future_command(['_disconnect', conn]) | <ast.AsyncFunctionDef object at 0x7da18f721750> | keyword[async] keyword[def] identifier[_cleanup_old_connections] ( identifier[self] ):
literal[string]
identifier[retval] = keyword[await] identifier[self] . identifier[_command_task] . identifier[future_command] ([ literal[string] ])
keyword[for] identifier[conn] keyword[in] identifier[retval] [ literal[string] ]:
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[conn] )
keyword[await] identifier[self] . identifier[_command_task] . identifier[future_command] ([ literal[string] , identifier[conn] ]) | async def _cleanup_old_connections(self):
"""Remove all active connections and query the maximum number of supported connections
"""
retval = await self._command_task.future_command(['_query_systemstate'])
for conn in retval['active_connections']:
self._logger.info('Forcible disconnecting connection %d', conn)
await self._command_task.future_command(['_disconnect', conn]) # depends on [control=['for'], data=['conn']] |
def set_unique_prompt(self):
'''This sets the remote prompt to something more unique than ``#`` or ``$``.
This makes it easier for the :meth:`prompt` method to match the shell prompt
unambiguously. This method is called automatically by the :meth:`login`
method, but you may want to call it manually if you somehow reset the
shell prompt. For example, if you 'su' to a different user then you
will need to manually reset the prompt. This sends shell commands to
the remote host to set the prompt, so this assumes the remote host is
ready to receive commands.
Alternatively, you may use your own prompt pattern. In this case you
should call :meth:`login` with ``auto_prompt_reset=False``; then set the
:attr:`PROMPT` attribute to a regular expression. After that, the
:meth:`prompt` method will try to match your prompt pattern.
'''
self.sendline("unset PROMPT_COMMAND")
self.sendline(self.PROMPT_SET_SH) # sh-style
i = self.expect ([TIMEOUT, self.PROMPT], timeout=10)
if i == 0: # csh-style
self.sendline(self.PROMPT_SET_CSH)
i = self.expect([TIMEOUT, self.PROMPT], timeout=10)
if i == 0:
return False
return True | def function[set_unique_prompt, parameter[self]]:
constant[This sets the remote prompt to something more unique than ``#`` or ``$``.
This makes it easier for the :meth:`prompt` method to match the shell prompt
unambiguously. This method is called automatically by the :meth:`login`
method, but you may want to call it manually if you somehow reset the
shell prompt. For example, if you 'su' to a different user then you
will need to manually reset the prompt. This sends shell commands to
the remote host to set the prompt, so this assumes the remote host is
ready to receive commands.
Alternatively, you may use your own prompt pattern. In this case you
should call :meth:`login` with ``auto_prompt_reset=False``; then set the
:attr:`PROMPT` attribute to a regular expression. After that, the
:meth:`prompt` method will try to match your prompt pattern.
]
call[name[self].sendline, parameter[constant[unset PROMPT_COMMAND]]]
call[name[self].sendline, parameter[name[self].PROMPT_SET_SH]]
variable[i] assign[=] call[name[self].expect, parameter[list[[<ast.Name object at 0x7da18dc045b0>, <ast.Attribute object at 0x7da18bc71180>]]]]
if compare[name[i] equal[==] constant[0]] begin[:]
call[name[self].sendline, parameter[name[self].PROMPT_SET_CSH]]
variable[i] assign[=] call[name[self].expect, parameter[list[[<ast.Name object at 0x7da18bc72a40>, <ast.Attribute object at 0x7da18bc72ce0>]]]]
if compare[name[i] equal[==] constant[0]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[set_unique_prompt] ( identifier[self] ):
literal[string]
identifier[self] . identifier[sendline] ( literal[string] )
identifier[self] . identifier[sendline] ( identifier[self] . identifier[PROMPT_SET_SH] )
identifier[i] = identifier[self] . identifier[expect] ([ identifier[TIMEOUT] , identifier[self] . identifier[PROMPT] ], identifier[timeout] = literal[int] )
keyword[if] identifier[i] == literal[int] :
identifier[self] . identifier[sendline] ( identifier[self] . identifier[PROMPT_SET_CSH] )
identifier[i] = identifier[self] . identifier[expect] ([ identifier[TIMEOUT] , identifier[self] . identifier[PROMPT] ], identifier[timeout] = literal[int] )
keyword[if] identifier[i] == literal[int] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def set_unique_prompt(self):
"""This sets the remote prompt to something more unique than ``#`` or ``$``.
This makes it easier for the :meth:`prompt` method to match the shell prompt
unambiguously. This method is called automatically by the :meth:`login`
method, but you may want to call it manually if you somehow reset the
shell prompt. For example, if you 'su' to a different user then you
will need to manually reset the prompt. This sends shell commands to
the remote host to set the prompt, so this assumes the remote host is
ready to receive commands.
Alternatively, you may use your own prompt pattern. In this case you
should call :meth:`login` with ``auto_prompt_reset=False``; then set the
:attr:`PROMPT` attribute to a regular expression. After that, the
:meth:`prompt` method will try to match your prompt pattern.
"""
self.sendline('unset PROMPT_COMMAND')
self.sendline(self.PROMPT_SET_SH) # sh-style
i = self.expect([TIMEOUT, self.PROMPT], timeout=10)
if i == 0: # csh-style
self.sendline(self.PROMPT_SET_CSH)
i = self.expect([TIMEOUT, self.PROMPT], timeout=10)
if i == 0:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['i']]
return True |
def _parse_dict(self, text, i):
"""Parse a dictionary from source text starting at i."""
old_current_type = self.current_type
new_type = self.current_type
if new_type is None:
# customparameter.value needs to be set from the found value
new_type = dict
elif type(new_type) == list:
new_type = new_type[0]
res = new_type()
i = self._parse_dict_into_object(res, text, i)
self.current_type = old_current_type
return res, i | def function[_parse_dict, parameter[self, text, i]]:
constant[Parse a dictionary from source text starting at i.]
variable[old_current_type] assign[=] name[self].current_type
variable[new_type] assign[=] name[self].current_type
if compare[name[new_type] is constant[None]] begin[:]
variable[new_type] assign[=] name[dict]
variable[res] assign[=] call[name[new_type], parameter[]]
variable[i] assign[=] call[name[self]._parse_dict_into_object, parameter[name[res], name[text], name[i]]]
name[self].current_type assign[=] name[old_current_type]
return[tuple[[<ast.Name object at 0x7da1b033d570>, <ast.Name object at 0x7da1b033d5a0>]]] | keyword[def] identifier[_parse_dict] ( identifier[self] , identifier[text] , identifier[i] ):
literal[string]
identifier[old_current_type] = identifier[self] . identifier[current_type]
identifier[new_type] = identifier[self] . identifier[current_type]
keyword[if] identifier[new_type] keyword[is] keyword[None] :
identifier[new_type] = identifier[dict]
keyword[elif] identifier[type] ( identifier[new_type] )== identifier[list] :
identifier[new_type] = identifier[new_type] [ literal[int] ]
identifier[res] = identifier[new_type] ()
identifier[i] = identifier[self] . identifier[_parse_dict_into_object] ( identifier[res] , identifier[text] , identifier[i] )
identifier[self] . identifier[current_type] = identifier[old_current_type]
keyword[return] identifier[res] , identifier[i] | def _parse_dict(self, text, i):
"""Parse a dictionary from source text starting at i."""
old_current_type = self.current_type
new_type = self.current_type
if new_type is None:
# customparameter.value needs to be set from the found value
new_type = dict # depends on [control=['if'], data=['new_type']]
elif type(new_type) == list:
new_type = new_type[0] # depends on [control=['if'], data=[]]
res = new_type()
i = self._parse_dict_into_object(res, text, i)
self.current_type = old_current_type
return (res, i) |
def update_gradebook(self, gradebook_form):
"""Updates an existing gradebook.
arg: gradebook_form (osid.grading.GradebookForm): the form
containing the elements to be updated
raise: IllegalState - ``gradebook_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``gradebook_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``gradebook_form did not originate from
get_gradebook_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.update_bin_template
if self._catalog_session is not None:
return self._catalog_session.update_catalog(catalog_form=gradebook_form)
collection = JSONClientValidated('grading',
collection='Gradebook',
runtime=self._runtime)
if not isinstance(gradebook_form, ABCGradebookForm):
raise errors.InvalidArgument('argument type is not an GradebookForm')
if not gradebook_form.is_for_update():
raise errors.InvalidArgument('the GradebookForm is for update only, not create')
try:
if self._forms[gradebook_form.get_id().get_identifier()] == UPDATED:
raise errors.IllegalState('gradebook_form already used in an update transaction')
except KeyError:
raise errors.Unsupported('gradebook_form did not originate from this session')
if not gradebook_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
collection.save(gradebook_form._my_map) # save is deprecated - change to replace_one
self._forms[gradebook_form.get_id().get_identifier()] = UPDATED
# Note: this is out of spec. The OSIDs don't require an object to be returned
return objects.Gradebook(osid_object_map=gradebook_form._my_map, runtime=self._runtime, proxy=self._proxy) | def function[update_gradebook, parameter[self, gradebook_form]]:
constant[Updates an existing gradebook.
arg: gradebook_form (osid.grading.GradebookForm): the form
containing the elements to be updated
raise: IllegalState - ``gradebook_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``gradebook_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``gradebook_form did not originate from
get_gradebook_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.update_catalog, parameter[]]]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[grading]]]
if <ast.UnaryOp object at 0x7da207f01d50> begin[:]
<ast.Raise object at 0x7da207f039a0>
if <ast.UnaryOp object at 0x7da207f02d70> begin[:]
<ast.Raise object at 0x7da207f01180>
<ast.Try object at 0x7da207f01540>
if <ast.UnaryOp object at 0x7da207f010f0> begin[:]
<ast.Raise object at 0x7da207f024d0>
call[name[collection].save, parameter[name[gradebook_form]._my_map]]
call[name[self]._forms][call[call[name[gradebook_form].get_id, parameter[]].get_identifier, parameter[]]] assign[=] name[UPDATED]
return[call[name[objects].Gradebook, parameter[]]] | keyword[def] identifier[update_gradebook] ( identifier[self] , identifier[gradebook_form] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[update_catalog] ( identifier[catalog_form] = identifier[gradebook_form] )
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[gradebook_form] , identifier[ABCGradebookForm] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
keyword[if] keyword[not] identifier[gradebook_form] . identifier[is_for_update] ():
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
keyword[try] :
keyword[if] identifier[self] . identifier[_forms] [ identifier[gradebook_form] . identifier[get_id] (). identifier[get_identifier] ()]== identifier[UPDATED] :
keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[errors] . identifier[Unsupported] ( literal[string] )
keyword[if] keyword[not] identifier[gradebook_form] . identifier[is_valid] ():
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
identifier[collection] . identifier[save] ( identifier[gradebook_form] . identifier[_my_map] )
identifier[self] . identifier[_forms] [ identifier[gradebook_form] . identifier[get_id] (). identifier[get_identifier] ()]= identifier[UPDATED]
keyword[return] identifier[objects] . identifier[Gradebook] ( identifier[osid_object_map] = identifier[gradebook_form] . identifier[_my_map] , identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] ) | def update_gradebook(self, gradebook_form):
"""Updates an existing gradebook.
arg: gradebook_form (osid.grading.GradebookForm): the form
containing the elements to be updated
raise: IllegalState - ``gradebook_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``gradebook_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``gradebook_form did not originate from
get_gradebook_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.update_bin_template
if self._catalog_session is not None:
return self._catalog_session.update_catalog(catalog_form=gradebook_form) # depends on [control=['if'], data=[]]
collection = JSONClientValidated('grading', collection='Gradebook', runtime=self._runtime)
if not isinstance(gradebook_form, ABCGradebookForm):
raise errors.InvalidArgument('argument type is not an GradebookForm') # depends on [control=['if'], data=[]]
if not gradebook_form.is_for_update():
raise errors.InvalidArgument('the GradebookForm is for update only, not create') # depends on [control=['if'], data=[]]
try:
if self._forms[gradebook_form.get_id().get_identifier()] == UPDATED:
raise errors.IllegalState('gradebook_form already used in an update transaction') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
raise errors.Unsupported('gradebook_form did not originate from this session') # depends on [control=['except'], data=[]]
if not gradebook_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid') # depends on [control=['if'], data=[]]
collection.save(gradebook_form._my_map) # save is deprecated - change to replace_one
self._forms[gradebook_form.get_id().get_identifier()] = UPDATED
# Note: this is out of spec. The OSIDs don't require an object to be returned
return objects.Gradebook(osid_object_map=gradebook_form._my_map, runtime=self._runtime, proxy=self._proxy) |
def set_html5_canvas_format(self, fmt):
"""
Sets the format used for rendering to the HTML5 canvas.
'png' offers greater clarity, especially for small text, but
does not have as good of performance as 'jpeg'.
"""
fmt = fmt.lower()
if fmt not in ('jpeg', 'png'):
raise ValueError("Format must be one of {jpeg|png} not '%s'" % (
fmt))
settings = self.get_settings()
settings.set(html5_canvas_format=fmt) | def function[set_html5_canvas_format, parameter[self, fmt]]:
constant[
Sets the format used for rendering to the HTML5 canvas.
'png' offers greater clarity, especially for small text, but
does not have as good of performance as 'jpeg'.
]
variable[fmt] assign[=] call[name[fmt].lower, parameter[]]
if compare[name[fmt] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20e954be0>, <ast.Constant object at 0x7da20e955b10>]]] begin[:]
<ast.Raise object at 0x7da20e957190>
variable[settings] assign[=] call[name[self].get_settings, parameter[]]
call[name[settings].set, parameter[]] | keyword[def] identifier[set_html5_canvas_format] ( identifier[self] , identifier[fmt] ):
literal[string]
identifier[fmt] = identifier[fmt] . identifier[lower] ()
keyword[if] identifier[fmt] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] %(
identifier[fmt] ))
identifier[settings] = identifier[self] . identifier[get_settings] ()
identifier[settings] . identifier[set] ( identifier[html5_canvas_format] = identifier[fmt] ) | def set_html5_canvas_format(self, fmt):
"""
Sets the format used for rendering to the HTML5 canvas.
'png' offers greater clarity, especially for small text, but
does not have as good of performance as 'jpeg'.
"""
fmt = fmt.lower()
if fmt not in ('jpeg', 'png'):
raise ValueError("Format must be one of {jpeg|png} not '%s'" % fmt) # depends on [control=['if'], data=['fmt']]
settings = self.get_settings()
settings.set(html5_canvas_format=fmt) |
def values(cls, dataset, dim, expanded=True, flat=True, compute=True):
"""
Returns an array of the values along the supplied dimension.
"""
dim = dataset.get_dimension(dim, strict=True)
if dim in dataset.vdims:
coord_names = [c.name() for c in dataset.data.dim_coords]
data = dataset.data.copy().data
data = cls.canonicalize(dataset, data, coord_names)
return data.T.flatten() if flat else data
elif expanded:
data = cls.coords(dataset, dim.name, expanded=True)
return data.T.flatten() if flat else data
else:
return cls.coords(dataset, dim.name, ordered=True) | def function[values, parameter[cls, dataset, dim, expanded, flat, compute]]:
constant[
Returns an array of the values along the supplied dimension.
]
variable[dim] assign[=] call[name[dataset].get_dimension, parameter[name[dim]]]
if compare[name[dim] in name[dataset].vdims] begin[:]
variable[coord_names] assign[=] <ast.ListComp object at 0x7da1b0831690>
variable[data] assign[=] call[name[dataset].data.copy, parameter[]].data
variable[data] assign[=] call[name[cls].canonicalize, parameter[name[dataset], name[data], name[coord_names]]]
return[<ast.IfExp object at 0x7da1b07881c0>] | keyword[def] identifier[values] ( identifier[cls] , identifier[dataset] , identifier[dim] , identifier[expanded] = keyword[True] , identifier[flat] = keyword[True] , identifier[compute] = keyword[True] ):
literal[string]
identifier[dim] = identifier[dataset] . identifier[get_dimension] ( identifier[dim] , identifier[strict] = keyword[True] )
keyword[if] identifier[dim] keyword[in] identifier[dataset] . identifier[vdims] :
identifier[coord_names] =[ identifier[c] . identifier[name] () keyword[for] identifier[c] keyword[in] identifier[dataset] . identifier[data] . identifier[dim_coords] ]
identifier[data] = identifier[dataset] . identifier[data] . identifier[copy] (). identifier[data]
identifier[data] = identifier[cls] . identifier[canonicalize] ( identifier[dataset] , identifier[data] , identifier[coord_names] )
keyword[return] identifier[data] . identifier[T] . identifier[flatten] () keyword[if] identifier[flat] keyword[else] identifier[data]
keyword[elif] identifier[expanded] :
identifier[data] = identifier[cls] . identifier[coords] ( identifier[dataset] , identifier[dim] . identifier[name] , identifier[expanded] = keyword[True] )
keyword[return] identifier[data] . identifier[T] . identifier[flatten] () keyword[if] identifier[flat] keyword[else] identifier[data]
keyword[else] :
keyword[return] identifier[cls] . identifier[coords] ( identifier[dataset] , identifier[dim] . identifier[name] , identifier[ordered] = keyword[True] ) | def values(cls, dataset, dim, expanded=True, flat=True, compute=True):
"""
Returns an array of the values along the supplied dimension.
"""
dim = dataset.get_dimension(dim, strict=True)
if dim in dataset.vdims:
coord_names = [c.name() for c in dataset.data.dim_coords]
data = dataset.data.copy().data
data = cls.canonicalize(dataset, data, coord_names)
return data.T.flatten() if flat else data # depends on [control=['if'], data=[]]
elif expanded:
data = cls.coords(dataset, dim.name, expanded=True)
return data.T.flatten() if flat else data # depends on [control=['if'], data=[]]
else:
return cls.coords(dataset, dim.name, ordered=True) |
def events_for_onchain_secretreveal(
target_state: TargetTransferState,
channel_state: NettingChannelState,
block_number: BlockNumber,
block_hash: BlockHash,
) -> List[Event]:
""" Emits the event for revealing the secret on-chain if the transfer
can not be settled off-chain.
"""
transfer = target_state.transfer
expiration = transfer.lock.expiration
safe_to_wait, _ = is_safe_to_wait(
expiration,
channel_state.reveal_timeout,
block_number,
)
secret_known_offchain = channel.is_secret_known_offchain(
channel_state.partner_state,
transfer.lock.secrethash,
)
has_onchain_reveal_started = (
target_state.state == TargetTransferState.ONCHAIN_SECRET_REVEAL
)
if not safe_to_wait and secret_known_offchain and not has_onchain_reveal_started:
target_state.state = TargetTransferState.ONCHAIN_SECRET_REVEAL
secret = channel.get_secret(
channel_state.partner_state,
transfer.lock.secrethash,
)
assert secret, 'secret should be known at this point'
return secret_registry.events_for_onchain_secretreveal(
channel_state=channel_state,
secret=secret,
expiration=expiration,
block_hash=block_hash,
)
return list() | def function[events_for_onchain_secretreveal, parameter[target_state, channel_state, block_number, block_hash]]:
constant[ Emits the event for revealing the secret on-chain if the transfer
can not be settled off-chain.
]
variable[transfer] assign[=] name[target_state].transfer
variable[expiration] assign[=] name[transfer].lock.expiration
<ast.Tuple object at 0x7da1b19140a0> assign[=] call[name[is_safe_to_wait], parameter[name[expiration], name[channel_state].reveal_timeout, name[block_number]]]
variable[secret_known_offchain] assign[=] call[name[channel].is_secret_known_offchain, parameter[name[channel_state].partner_state, name[transfer].lock.secrethash]]
variable[has_onchain_reveal_started] assign[=] compare[name[target_state].state equal[==] name[TargetTransferState].ONCHAIN_SECRET_REVEAL]
if <ast.BoolOp object at 0x7da1b196dcf0> begin[:]
name[target_state].state assign[=] name[TargetTransferState].ONCHAIN_SECRET_REVEAL
variable[secret] assign[=] call[name[channel].get_secret, parameter[name[channel_state].partner_state, name[transfer].lock.secrethash]]
assert[name[secret]]
return[call[name[secret_registry].events_for_onchain_secretreveal, parameter[]]]
return[call[name[list], parameter[]]] | keyword[def] identifier[events_for_onchain_secretreveal] (
identifier[target_state] : identifier[TargetTransferState] ,
identifier[channel_state] : identifier[NettingChannelState] ,
identifier[block_number] : identifier[BlockNumber] ,
identifier[block_hash] : identifier[BlockHash] ,
)-> identifier[List] [ identifier[Event] ]:
literal[string]
identifier[transfer] = identifier[target_state] . identifier[transfer]
identifier[expiration] = identifier[transfer] . identifier[lock] . identifier[expiration]
identifier[safe_to_wait] , identifier[_] = identifier[is_safe_to_wait] (
identifier[expiration] ,
identifier[channel_state] . identifier[reveal_timeout] ,
identifier[block_number] ,
)
identifier[secret_known_offchain] = identifier[channel] . identifier[is_secret_known_offchain] (
identifier[channel_state] . identifier[partner_state] ,
identifier[transfer] . identifier[lock] . identifier[secrethash] ,
)
identifier[has_onchain_reveal_started] =(
identifier[target_state] . identifier[state] == identifier[TargetTransferState] . identifier[ONCHAIN_SECRET_REVEAL]
)
keyword[if] keyword[not] identifier[safe_to_wait] keyword[and] identifier[secret_known_offchain] keyword[and] keyword[not] identifier[has_onchain_reveal_started] :
identifier[target_state] . identifier[state] = identifier[TargetTransferState] . identifier[ONCHAIN_SECRET_REVEAL]
identifier[secret] = identifier[channel] . identifier[get_secret] (
identifier[channel_state] . identifier[partner_state] ,
identifier[transfer] . identifier[lock] . identifier[secrethash] ,
)
keyword[assert] identifier[secret] , literal[string]
keyword[return] identifier[secret_registry] . identifier[events_for_onchain_secretreveal] (
identifier[channel_state] = identifier[channel_state] ,
identifier[secret] = identifier[secret] ,
identifier[expiration] = identifier[expiration] ,
identifier[block_hash] = identifier[block_hash] ,
)
keyword[return] identifier[list] () | def events_for_onchain_secretreveal(target_state: TargetTransferState, channel_state: NettingChannelState, block_number: BlockNumber, block_hash: BlockHash) -> List[Event]:
""" Emits the event for revealing the secret on-chain if the transfer
can not be settled off-chain.
"""
transfer = target_state.transfer
expiration = transfer.lock.expiration
(safe_to_wait, _) = is_safe_to_wait(expiration, channel_state.reveal_timeout, block_number)
secret_known_offchain = channel.is_secret_known_offchain(channel_state.partner_state, transfer.lock.secrethash)
has_onchain_reveal_started = target_state.state == TargetTransferState.ONCHAIN_SECRET_REVEAL
if not safe_to_wait and secret_known_offchain and (not has_onchain_reveal_started):
target_state.state = TargetTransferState.ONCHAIN_SECRET_REVEAL
secret = channel.get_secret(channel_state.partner_state, transfer.lock.secrethash)
assert secret, 'secret should be known at this point'
return secret_registry.events_for_onchain_secretreveal(channel_state=channel_state, secret=secret, expiration=expiration, block_hash=block_hash) # depends on [control=['if'], data=[]]
return list() |
def count_unbalanced_brackets(line):
"""Return number of unmatched open/close brackets."""
count = 0
for opening, closing in ['()', '[]', '{}']:
count += abs(line.count(opening) - line.count(closing))
return count | def function[count_unbalanced_brackets, parameter[line]]:
constant[Return number of unmatched open/close brackets.]
variable[count] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b1b121a0>, <ast.Name object at 0x7da1b1b11810>]]] in starred[list[[<ast.Constant object at 0x7da1b1b11d80>, <ast.Constant object at 0x7da1b1b11990>, <ast.Constant object at 0x7da1b1b11cc0>]]] begin[:]
<ast.AugAssign object at 0x7da1b1b10130>
return[name[count]] | keyword[def] identifier[count_unbalanced_brackets] ( identifier[line] ):
literal[string]
identifier[count] = literal[int]
keyword[for] identifier[opening] , identifier[closing] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[count] += identifier[abs] ( identifier[line] . identifier[count] ( identifier[opening] )- identifier[line] . identifier[count] ( identifier[closing] ))
keyword[return] identifier[count] | def count_unbalanced_brackets(line):
"""Return number of unmatched open/close brackets."""
count = 0
for (opening, closing) in ['()', '[]', '{}']:
count += abs(line.count(opening) - line.count(closing)) # depends on [control=['for'], data=[]]
return count |
def iter(self, **kwargs):
"""Compute a range of orbits between two dates
Keyword Arguments:
dates (list of :py:class:`~beyond.dates.date.Date`): Dates from which iterate over
start (Date or None): Date of the first point
stop (Date, timedelta or None): Date of the last point
step (timedelta or None): Step to use during the computation. Use the same step as
`self` if `None`
listeners (list of:py:class:`~beyond.orbits.listeners.Listener`):
Yield:
:py:class:`Orbit`:
There is two ways to use the iter() method.
If *dates* is defined, it should be an iterable of dates. This could be
a generator as per :py:meth:`Date.range <beyond.dates.date.Date.range>`, or a list.
.. code-block:: python
# Create two successive ranges of dates, with different steps
dates = list(Date.range(Date(2019, 3, 23), Date(2019, 3, 24), timedelta(minutes=3)))
dates.extend(Date.range(Date(2019, 3, 24), Date(2019, 3, 25), timedelta(minutes=10), inclusive=True))
propag.iter(dates=dates)
The alternative, is the use of *start*, *stop* and *step* keyword arguments
which work exactly as :code:`Date.range(start, stop, step, inclusive=True)`
If one of *start*, *stop* or *step* arguments is set to ``None`` it will keep
the same property as the generating ephemeris.
.. code-block:: python
propag.iter(stop=stop) # If the iterator has a default step (e.g. numerical propagators)
propag.iter(stop=stop, step=step)
propag.iter(start=start, stop=stop, step=step)
"""
if 'dates' not in kwargs:
start = kwargs.setdefault('start', self.orbit.date)
stop = kwargs.get('stop')
step = kwargs.setdefault('step', getattr(self, 'step', None))
if 'stop' is None:
raise ValueError("The end of the propagation should be defined")
start = self.orbit.date if start is None else start
step = self.step if step is None else step
if isinstance(kwargs['stop'], timedelta):
kwargs['stop'] = start + kwargs['stop']
if start > kwargs['stop'] and step.total_seconds() > 0:
kwargs['step'] = -step
listeners = kwargs.pop('listeners', [])
for orb in self._iter(**kwargs):
for listen_orb in self.listen(orb, listeners):
yield listen_orb
yield orb | def function[iter, parameter[self]]:
constant[Compute a range of orbits between two dates
Keyword Arguments:
dates (list of :py:class:`~beyond.dates.date.Date`): Dates from which iterate over
start (Date or None): Date of the first point
stop (Date, timedelta or None): Date of the last point
step (timedelta or None): Step to use during the computation. Use the same step as
`self` if `None`
listeners (list of:py:class:`~beyond.orbits.listeners.Listener`):
Yield:
:py:class:`Orbit`:
There is two ways to use the iter() method.
If *dates* is defined, it should be an iterable of dates. This could be
a generator as per :py:meth:`Date.range <beyond.dates.date.Date.range>`, or a list.
.. code-block:: python
# Create two successive ranges of dates, with different steps
dates = list(Date.range(Date(2019, 3, 23), Date(2019, 3, 24), timedelta(minutes=3)))
dates.extend(Date.range(Date(2019, 3, 24), Date(2019, 3, 25), timedelta(minutes=10), inclusive=True))
propag.iter(dates=dates)
The alternative, is the use of *start*, *stop* and *step* keyword arguments
which work exactly as :code:`Date.range(start, stop, step, inclusive=True)`
If one of *start*, *stop* or *step* arguments is set to ``None`` it will keep
the same property as the generating ephemeris.
.. code-block:: python
propag.iter(stop=stop) # If the iterator has a default step (e.g. numerical propagators)
propag.iter(stop=stop, step=step)
propag.iter(start=start, stop=stop, step=step)
]
if compare[constant[dates] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
variable[start] assign[=] call[name[kwargs].setdefault, parameter[constant[start], name[self].orbit.date]]
variable[stop] assign[=] call[name[kwargs].get, parameter[constant[stop]]]
variable[step] assign[=] call[name[kwargs].setdefault, parameter[constant[step], call[name[getattr], parameter[name[self], constant[step], constant[None]]]]]
if compare[constant[stop] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0b7f9a0>
variable[start] assign[=] <ast.IfExp object at 0x7da1b0b7de10>
variable[step] assign[=] <ast.IfExp object at 0x7da1b0b7cdc0>
if call[name[isinstance], parameter[call[name[kwargs]][constant[stop]], name[timedelta]]] begin[:]
call[name[kwargs]][constant[stop]] assign[=] binary_operation[name[start] + call[name[kwargs]][constant[stop]]]
if <ast.BoolOp object at 0x7da1b0b7c3a0> begin[:]
call[name[kwargs]][constant[step]] assign[=] <ast.UnaryOp object at 0x7da18eb57d30>
variable[listeners] assign[=] call[name[kwargs].pop, parameter[constant[listeners], list[[]]]]
for taget[name[orb]] in starred[call[name[self]._iter, parameter[]]] begin[:]
for taget[name[listen_orb]] in starred[call[name[self].listen, parameter[name[orb], name[listeners]]]] begin[:]
<ast.Yield object at 0x7da18eb55ff0>
<ast.Yield object at 0x7da18eb57520> | keyword[def] identifier[iter] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[start] = identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[orbit] . identifier[date] )
identifier[stop] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[step] = identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[getattr] ( identifier[self] , literal[string] , keyword[None] ))
keyword[if] literal[string] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[start] = identifier[self] . identifier[orbit] . identifier[date] keyword[if] identifier[start] keyword[is] keyword[None] keyword[else] identifier[start]
identifier[step] = identifier[self] . identifier[step] keyword[if] identifier[step] keyword[is] keyword[None] keyword[else] identifier[step]
keyword[if] identifier[isinstance] ( identifier[kwargs] [ literal[string] ], identifier[timedelta] ):
identifier[kwargs] [ literal[string] ]= identifier[start] + identifier[kwargs] [ literal[string] ]
keyword[if] identifier[start] > identifier[kwargs] [ literal[string] ] keyword[and] identifier[step] . identifier[total_seconds] ()> literal[int] :
identifier[kwargs] [ literal[string] ]=- identifier[step]
identifier[listeners] = identifier[kwargs] . identifier[pop] ( literal[string] ,[])
keyword[for] identifier[orb] keyword[in] identifier[self] . identifier[_iter] (** identifier[kwargs] ):
keyword[for] identifier[listen_orb] keyword[in] identifier[self] . identifier[listen] ( identifier[orb] , identifier[listeners] ):
keyword[yield] identifier[listen_orb]
keyword[yield] identifier[orb] | def iter(self, **kwargs):
"""Compute a range of orbits between two dates
Keyword Arguments:
dates (list of :py:class:`~beyond.dates.date.Date`): Dates from which iterate over
start (Date or None): Date of the first point
stop (Date, timedelta or None): Date of the last point
step (timedelta or None): Step to use during the computation. Use the same step as
`self` if `None`
listeners (list of:py:class:`~beyond.orbits.listeners.Listener`):
Yield:
:py:class:`Orbit`:
There is two ways to use the iter() method.
If *dates* is defined, it should be an iterable of dates. This could be
a generator as per :py:meth:`Date.range <beyond.dates.date.Date.range>`, or a list.
.. code-block:: python
# Create two successive ranges of dates, with different steps
dates = list(Date.range(Date(2019, 3, 23), Date(2019, 3, 24), timedelta(minutes=3)))
dates.extend(Date.range(Date(2019, 3, 24), Date(2019, 3, 25), timedelta(minutes=10), inclusive=True))
propag.iter(dates=dates)
The alternative, is the use of *start*, *stop* and *step* keyword arguments
which work exactly as :code:`Date.range(start, stop, step, inclusive=True)`
If one of *start*, *stop* or *step* arguments is set to ``None`` it will keep
the same property as the generating ephemeris.
.. code-block:: python
propag.iter(stop=stop) # If the iterator has a default step (e.g. numerical propagators)
propag.iter(stop=stop, step=step)
propag.iter(start=start, stop=stop, step=step)
"""
if 'dates' not in kwargs:
start = kwargs.setdefault('start', self.orbit.date)
stop = kwargs.get('stop')
step = kwargs.setdefault('step', getattr(self, 'step', None))
if 'stop' is None:
raise ValueError('The end of the propagation should be defined') # depends on [control=['if'], data=[]]
start = self.orbit.date if start is None else start
step = self.step if step is None else step
if isinstance(kwargs['stop'], timedelta):
kwargs['stop'] = start + kwargs['stop'] # depends on [control=['if'], data=[]]
if start > kwargs['stop'] and step.total_seconds() > 0:
kwargs['step'] = -step # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kwargs']]
listeners = kwargs.pop('listeners', [])
for orb in self._iter(**kwargs):
for listen_orb in self.listen(orb, listeners):
yield listen_orb # depends on [control=['for'], data=['listen_orb']]
yield orb # depends on [control=['for'], data=['orb']] |
def validateDayOfMonth(value, year, month, blank=False, strip=None, allowlistRegexes=None, blocklistRegexes=None, excMsg=None):
"""Raises ValidationException if value is not a day of the month, from
1 to 28, 29, 30, or 31 depending on the month and year.
Returns value.
* value (str): The value being validated as existing as a numbered day in the given year and month.
* year (int): The given year.
* month (int): The given month. 1 is January, 2 is February, and so on.
* blank (bool): If True, a blank string will be accepted. Defaults to False.
* strip (bool, str, None): If None, whitespace is stripped from value. If a str, the characters in it are stripped from value. If False, nothing is stripped.
* allowlistRegexes (Sequence, None): A sequence of regex str that will explicitly pass validation, even if they aren't numbers.
* blocklistRegexes (Sequence, None): A sequence of regex str or (regex_str, response_str) tuples that, if matched, will explicitly fail validation.
* excMsg (str): A custom message to use in the raised ValidationException.
>>> import pysimplevalidate as pysv
>>> pysv.validateDayOfMonth('31', 2019, 10)
31
>>> pysv.validateDayOfMonth('32', 2019, 10)
Traceback (most recent call last):
...
pysimplevalidate.ValidationException: '32' is not a day in the month of October 2019
>>> pysv.validateDayOfMonth('29', 2004, 2)
29
>>> pysv.validateDayOfMonth('29', 2005, 2)
Traceback (most recent call last):
...
pysimplevalidate.ValidationException: '29' is not a day in the month of February 2005
"""
try:
daysInMonth = calendar.monthrange(year, month)[1]
except:
raise PySimpleValidateException('invalid arguments for year and/or month')
try:
return validateInt(value, blank=blank, strip=strip, allowlistRegexes=allowlistRegexes, blocklistRegexes=blocklistRegexes, min=1, max=daysInMonth)
except:
# Replace the exception message.
_raiseValidationException(_('%r is not a day in the month of %s %s') % (_errstr(value), ENGLISH_MONTH_NAMES[month - 1], year), excMsg) | def function[validateDayOfMonth, parameter[value, year, month, blank, strip, allowlistRegexes, blocklistRegexes, excMsg]]:
constant[Raises ValidationException if value is not a day of the month, from
1 to 28, 29, 30, or 31 depending on the month and year.
Returns value.
* value (str): The value being validated as existing as a numbered day in the given year and month.
* year (int): The given year.
* month (int): The given month. 1 is January, 2 is February, and so on.
* blank (bool): If True, a blank string will be accepted. Defaults to False.
* strip (bool, str, None): If None, whitespace is stripped from value. If a str, the characters in it are stripped from value. If False, nothing is stripped.
* allowlistRegexes (Sequence, None): A sequence of regex str that will explicitly pass validation, even if they aren't numbers.
* blocklistRegexes (Sequence, None): A sequence of regex str or (regex_str, response_str) tuples that, if matched, will explicitly fail validation.
* excMsg (str): A custom message to use in the raised ValidationException.
>>> import pysimplevalidate as pysv
>>> pysv.validateDayOfMonth('31', 2019, 10)
31
>>> pysv.validateDayOfMonth('32', 2019, 10)
Traceback (most recent call last):
...
pysimplevalidate.ValidationException: '32' is not a day in the month of October 2019
>>> pysv.validateDayOfMonth('29', 2004, 2)
29
>>> pysv.validateDayOfMonth('29', 2005, 2)
Traceback (most recent call last):
...
pysimplevalidate.ValidationException: '29' is not a day in the month of February 2005
]
<ast.Try object at 0x7da18fe90580>
<ast.Try object at 0x7da18fe91090> | keyword[def] identifier[validateDayOfMonth] ( identifier[value] , identifier[year] , identifier[month] , identifier[blank] = keyword[False] , identifier[strip] = keyword[None] , identifier[allowlistRegexes] = keyword[None] , identifier[blocklistRegexes] = keyword[None] , identifier[excMsg] = keyword[None] ):
literal[string]
keyword[try] :
identifier[daysInMonth] = identifier[calendar] . identifier[monthrange] ( identifier[year] , identifier[month] )[ literal[int] ]
keyword[except] :
keyword[raise] identifier[PySimpleValidateException] ( literal[string] )
keyword[try] :
keyword[return] identifier[validateInt] ( identifier[value] , identifier[blank] = identifier[blank] , identifier[strip] = identifier[strip] , identifier[allowlistRegexes] = identifier[allowlistRegexes] , identifier[blocklistRegexes] = identifier[blocklistRegexes] , identifier[min] = literal[int] , identifier[max] = identifier[daysInMonth] )
keyword[except] :
identifier[_raiseValidationException] ( identifier[_] ( literal[string] )%( identifier[_errstr] ( identifier[value] ), identifier[ENGLISH_MONTH_NAMES] [ identifier[month] - literal[int] ], identifier[year] ), identifier[excMsg] ) | def validateDayOfMonth(value, year, month, blank=False, strip=None, allowlistRegexes=None, blocklistRegexes=None, excMsg=None):
"""Raises ValidationException if value is not a day of the month, from
1 to 28, 29, 30, or 31 depending on the month and year.
Returns value.
* value (str): The value being validated as existing as a numbered day in the given year and month.
* year (int): The given year.
* month (int): The given month. 1 is January, 2 is February, and so on.
* blank (bool): If True, a blank string will be accepted. Defaults to False.
* strip (bool, str, None): If None, whitespace is stripped from value. If a str, the characters in it are stripped from value. If False, nothing is stripped.
* allowlistRegexes (Sequence, None): A sequence of regex str that will explicitly pass validation, even if they aren't numbers.
* blocklistRegexes (Sequence, None): A sequence of regex str or (regex_str, response_str) tuples that, if matched, will explicitly fail validation.
* excMsg (str): A custom message to use in the raised ValidationException.
>>> import pysimplevalidate as pysv
>>> pysv.validateDayOfMonth('31', 2019, 10)
31
>>> pysv.validateDayOfMonth('32', 2019, 10)
Traceback (most recent call last):
...
pysimplevalidate.ValidationException: '32' is not a day in the month of October 2019
>>> pysv.validateDayOfMonth('29', 2004, 2)
29
>>> pysv.validateDayOfMonth('29', 2005, 2)
Traceback (most recent call last):
...
pysimplevalidate.ValidationException: '29' is not a day in the month of February 2005
"""
try:
daysInMonth = calendar.monthrange(year, month)[1] # depends on [control=['try'], data=[]]
except:
raise PySimpleValidateException('invalid arguments for year and/or month') # depends on [control=['except'], data=[]]
try:
return validateInt(value, blank=blank, strip=strip, allowlistRegexes=allowlistRegexes, blocklistRegexes=blocklistRegexes, min=1, max=daysInMonth) # depends on [control=['try'], data=[]]
except:
# Replace the exception message.
_raiseValidationException(_('%r is not a day in the month of %s %s') % (_errstr(value), ENGLISH_MONTH_NAMES[month - 1], year), excMsg) # depends on [control=['except'], data=[]] |
def update_module_page(mod, dest_path='.'):
"Update the documentation notebook of a given module."
doc_path = get_doc_path(mod, dest_path)
strip_name = strip_fastai(mod.__name__)
nb = read_nb(doc_path)
cells = nb['cells']
link_markdown_cells(cells, get_imported_modules(cells, mod.__name__))
type_dict = read_nb_types(cells)
gvar_map = get_global_vars(mod)
for name in get_exports(mod):
if name not in gvar_map: continue
code = gvar_map[name]
if name in type_dict: cells[type_dict[name]] = get_md_cell(code)
else: cells.append(get_md_cell(code))
pos_dict = read_nb_content(cells, strip_name)
ft_names = get_ft_names(mod, include_inner=True)
new_fts = list(set(ft_names) - set(pos_dict.keys()))
if new_fts: print(f'Found new fuctions for {mod}. Please document:\n{new_fts}')
existing, undoc_cells, new_cells = parse_sections(cells)
for ft_name in new_fts: new_cells.extend([get_doc_cell(ft_name), get_empty_cell()])
if len(new_cells) > 1: nb['cells'] = existing + undoc_cells + new_cells
write_nb(nb, doc_path)
return doc_path | def function[update_module_page, parameter[mod, dest_path]]:
constant[Update the documentation notebook of a given module.]
variable[doc_path] assign[=] call[name[get_doc_path], parameter[name[mod], name[dest_path]]]
variable[strip_name] assign[=] call[name[strip_fastai], parameter[name[mod].__name__]]
variable[nb] assign[=] call[name[read_nb], parameter[name[doc_path]]]
variable[cells] assign[=] call[name[nb]][constant[cells]]
call[name[link_markdown_cells], parameter[name[cells], call[name[get_imported_modules], parameter[name[cells], name[mod].__name__]]]]
variable[type_dict] assign[=] call[name[read_nb_types], parameter[name[cells]]]
variable[gvar_map] assign[=] call[name[get_global_vars], parameter[name[mod]]]
for taget[name[name]] in starred[call[name[get_exports], parameter[name[mod]]]] begin[:]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[gvar_map]] begin[:]
continue
variable[code] assign[=] call[name[gvar_map]][name[name]]
if compare[name[name] in name[type_dict]] begin[:]
call[name[cells]][call[name[type_dict]][name[name]]] assign[=] call[name[get_md_cell], parameter[name[code]]]
variable[pos_dict] assign[=] call[name[read_nb_content], parameter[name[cells], name[strip_name]]]
variable[ft_names] assign[=] call[name[get_ft_names], parameter[name[mod]]]
variable[new_fts] assign[=] call[name[list], parameter[binary_operation[call[name[set], parameter[name[ft_names]]] - call[name[set], parameter[call[name[pos_dict].keys, parameter[]]]]]]]
if name[new_fts] begin[:]
call[name[print], parameter[<ast.JoinedStr object at 0x7da18dc072b0>]]
<ast.Tuple object at 0x7da18f00e2c0> assign[=] call[name[parse_sections], parameter[name[cells]]]
for taget[name[ft_name]] in starred[name[new_fts]] begin[:]
call[name[new_cells].extend, parameter[list[[<ast.Call object at 0x7da1b202b250>, <ast.Call object at 0x7da1b2028550>]]]]
if compare[call[name[len], parameter[name[new_cells]]] greater[>] constant[1]] begin[:]
call[name[nb]][constant[cells]] assign[=] binary_operation[binary_operation[name[existing] + name[undoc_cells]] + name[new_cells]]
call[name[write_nb], parameter[name[nb], name[doc_path]]]
return[name[doc_path]] | keyword[def] identifier[update_module_page] ( identifier[mod] , identifier[dest_path] = literal[string] ):
literal[string]
identifier[doc_path] = identifier[get_doc_path] ( identifier[mod] , identifier[dest_path] )
identifier[strip_name] = identifier[strip_fastai] ( identifier[mod] . identifier[__name__] )
identifier[nb] = identifier[read_nb] ( identifier[doc_path] )
identifier[cells] = identifier[nb] [ literal[string] ]
identifier[link_markdown_cells] ( identifier[cells] , identifier[get_imported_modules] ( identifier[cells] , identifier[mod] . identifier[__name__] ))
identifier[type_dict] = identifier[read_nb_types] ( identifier[cells] )
identifier[gvar_map] = identifier[get_global_vars] ( identifier[mod] )
keyword[for] identifier[name] keyword[in] identifier[get_exports] ( identifier[mod] ):
keyword[if] identifier[name] keyword[not] keyword[in] identifier[gvar_map] : keyword[continue]
identifier[code] = identifier[gvar_map] [ identifier[name] ]
keyword[if] identifier[name] keyword[in] identifier[type_dict] : identifier[cells] [ identifier[type_dict] [ identifier[name] ]]= identifier[get_md_cell] ( identifier[code] )
keyword[else] : identifier[cells] . identifier[append] ( identifier[get_md_cell] ( identifier[code] ))
identifier[pos_dict] = identifier[read_nb_content] ( identifier[cells] , identifier[strip_name] )
identifier[ft_names] = identifier[get_ft_names] ( identifier[mod] , identifier[include_inner] = keyword[True] )
identifier[new_fts] = identifier[list] ( identifier[set] ( identifier[ft_names] )- identifier[set] ( identifier[pos_dict] . identifier[keys] ()))
keyword[if] identifier[new_fts] : identifier[print] ( literal[string] )
identifier[existing] , identifier[undoc_cells] , identifier[new_cells] = identifier[parse_sections] ( identifier[cells] )
keyword[for] identifier[ft_name] keyword[in] identifier[new_fts] : identifier[new_cells] . identifier[extend] ([ identifier[get_doc_cell] ( identifier[ft_name] ), identifier[get_empty_cell] ()])
keyword[if] identifier[len] ( identifier[new_cells] )> literal[int] : identifier[nb] [ literal[string] ]= identifier[existing] + identifier[undoc_cells] + identifier[new_cells]
identifier[write_nb] ( identifier[nb] , identifier[doc_path] )
keyword[return] identifier[doc_path] | def update_module_page(mod, dest_path='.'):
"""Update the documentation notebook of a given module."""
doc_path = get_doc_path(mod, dest_path)
strip_name = strip_fastai(mod.__name__)
nb = read_nb(doc_path)
cells = nb['cells']
link_markdown_cells(cells, get_imported_modules(cells, mod.__name__))
type_dict = read_nb_types(cells)
gvar_map = get_global_vars(mod)
for name in get_exports(mod):
if name not in gvar_map:
continue # depends on [control=['if'], data=[]]
code = gvar_map[name]
if name in type_dict:
cells[type_dict[name]] = get_md_cell(code) # depends on [control=['if'], data=['name', 'type_dict']]
else:
cells.append(get_md_cell(code)) # depends on [control=['for'], data=['name']]
pos_dict = read_nb_content(cells, strip_name)
ft_names = get_ft_names(mod, include_inner=True)
new_fts = list(set(ft_names) - set(pos_dict.keys()))
if new_fts:
print(f'Found new fuctions for {mod}. Please document:\n{new_fts}') # depends on [control=['if'], data=[]]
(existing, undoc_cells, new_cells) = parse_sections(cells)
for ft_name in new_fts:
new_cells.extend([get_doc_cell(ft_name), get_empty_cell()]) # depends on [control=['for'], data=['ft_name']]
if len(new_cells) > 1:
nb['cells'] = existing + undoc_cells + new_cells # depends on [control=['if'], data=[]]
write_nb(nb, doc_path)
return doc_path |
def _internal_request(self, request_obj, url, method, **kwargs):
""" Internal handling of requests. Handles Exceptions.
:param request_obj: a requests session.
:param str url: url to send request to
:param str method: type of request (get/put/post/patch/delete)
:param kwargs: extra params to send to the request api
:return: Response of the request
:rtype: requests.Response
"""
method = method.lower()
if method not in self._allowed_methods:
raise ValueError('Method must be one of the allowed ones')
if method == 'get':
kwargs.setdefault('allow_redirects', True)
elif method in ['post', 'put', 'patch']:
if 'headers' not in kwargs:
kwargs['headers'] = {}
if kwargs.get('headers') is not None and kwargs['headers'].get(
'Content-type') is None:
kwargs['headers']['Content-type'] = 'application/json'
if 'data' in kwargs and kwargs['data'] is not None and kwargs['headers'].get(
'Content-type') == 'application/json':
kwargs['data'] = json.dumps(kwargs['data']) # convert to json
request_done = False
token_refreshed = False
while not request_done:
self._check_delay() # sleeps if needed
try:
log.info('Requesting ({}) URL: {}'.format(method.upper(), url))
log.info('Request parameters: {}'.format(kwargs))
# auto_retry will occur inside this function call if enabled
response = request_obj.request(method, url,
**kwargs)
response.raise_for_status() # raise 4XX and 5XX error codes.
log.info('Received response ({}) from URL {}'.format(
response.status_code, response.url))
request_done = True
return response
except TokenExpiredError as e:
# Token has expired, try to refresh the token and try again on the next loop
if not self.token_backend.token.is_long_lived:
raise e
if token_refreshed:
# Refresh token done but still TokenExpiredError raise
raise RuntimeError('Token Refresh Operation not working')
log.info('Oauth Token is expired, fetching a new token')
self.refresh_token()
log.info('New oauth token fetched')
token_refreshed = True
except (ConnectionError, ProxyError, SSLError, Timeout) as e:
# We couldn't connect to the target url, raise error
log.debug('Connection Error calling: {}.{}'
''.format(url, ('Using proxy: {}'.format(self.proxy)
if self.proxy else '')))
raise e # re-raise exception
except HTTPError as e:
# Server response with 4XX or 5XX error status codes
# try to extract the error message:
try:
error = response.json()
error_message = error.get('error', {}).get('message', '')
except ValueError:
error_message = ''
status_code = int(e.response.status_code / 100)
if status_code == 4:
# Client Error
# Logged as error. Could be a library error or Api changes
log.error('Client Error: {} | Error Message: {}'.format(str(e), error_message))
else:
# Server Error
log.debug('Server Error: {}'.format(str(e)))
if self.raise_http_errors:
if error_message:
raise HTTPError('{} | Error Message: {}'.format(e.args[0], error_message), response=response) from None
else:
raise e
else:
return e.response
except RequestException as e:
# catch any other exception raised by requests
log.debug('Request Exception: {}'.format(str(e)))
raise e | def function[_internal_request, parameter[self, request_obj, url, method]]:
constant[ Internal handling of requests. Handles Exceptions.
:param request_obj: a requests session.
:param str url: url to send request to
:param str method: type of request (get/put/post/patch/delete)
:param kwargs: extra params to send to the request api
:return: Response of the request
:rtype: requests.Response
]
variable[method] assign[=] call[name[method].lower, parameter[]]
if compare[name[method] <ast.NotIn object at 0x7da2590d7190> name[self]._allowed_methods] begin[:]
<ast.Raise object at 0x7da1b1a57be0>
if compare[name[method] equal[==] constant[get]] begin[:]
call[name[kwargs].setdefault, parameter[constant[allow_redirects], constant[True]]]
variable[request_done] assign[=] constant[False]
variable[token_refreshed] assign[=] constant[False]
while <ast.UnaryOp object at 0x7da1b1a55a20> begin[:]
call[name[self]._check_delay, parameter[]]
<ast.Try object at 0x7da1b1a558d0> | keyword[def] identifier[_internal_request] ( identifier[self] , identifier[request_obj] , identifier[url] , identifier[method] ,** identifier[kwargs] ):
literal[string]
identifier[method] = identifier[method] . identifier[lower] ()
keyword[if] identifier[method] keyword[not] keyword[in] identifier[self] . identifier[_allowed_methods] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[method] == literal[string] :
identifier[kwargs] . identifier[setdefault] ( literal[string] , keyword[True] )
keyword[elif] identifier[method] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]={}
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] keyword[and] identifier[kwargs] [ literal[string] ]. identifier[get] (
literal[string] ) keyword[is] keyword[None] :
identifier[kwargs] [ literal[string] ][ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[kwargs] [ literal[string] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[kwargs] [ literal[string] ]. identifier[get] (
literal[string] )== literal[string] :
identifier[kwargs] [ literal[string] ]= identifier[json] . identifier[dumps] ( identifier[kwargs] [ literal[string] ])
identifier[request_done] = keyword[False]
identifier[token_refreshed] = keyword[False]
keyword[while] keyword[not] identifier[request_done] :
identifier[self] . identifier[_check_delay] ()
keyword[try] :
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[method] . identifier[upper] (), identifier[url] ))
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[kwargs] ))
identifier[response] = identifier[request_obj] . identifier[request] ( identifier[method] , identifier[url] ,
** identifier[kwargs] )
identifier[response] . identifier[raise_for_status] ()
identifier[log] . identifier[info] ( literal[string] . identifier[format] (
identifier[response] . identifier[status_code] , identifier[response] . identifier[url] ))
identifier[request_done] = keyword[True]
keyword[return] identifier[response]
keyword[except] identifier[TokenExpiredError] keyword[as] identifier[e] :
keyword[if] keyword[not] identifier[self] . identifier[token_backend] . identifier[token] . identifier[is_long_lived] :
keyword[raise] identifier[e]
keyword[if] identifier[token_refreshed] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[log] . identifier[info] ( literal[string] )
identifier[self] . identifier[refresh_token] ()
identifier[log] . identifier[info] ( literal[string] )
identifier[token_refreshed] = keyword[True]
keyword[except] ( identifier[ConnectionError] , identifier[ProxyError] , identifier[SSLError] , identifier[Timeout] ) keyword[as] identifier[e] :
identifier[log] . identifier[debug] ( literal[string]
literal[string] . identifier[format] ( identifier[url] ,( literal[string] . identifier[format] ( identifier[self] . identifier[proxy] )
keyword[if] identifier[self] . identifier[proxy] keyword[else] literal[string] )))
keyword[raise] identifier[e]
keyword[except] identifier[HTTPError] keyword[as] identifier[e] :
keyword[try] :
identifier[error] = identifier[response] . identifier[json] ()
identifier[error_message] = identifier[error] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , literal[string] )
keyword[except] identifier[ValueError] :
identifier[error_message] = literal[string]
identifier[status_code] = identifier[int] ( identifier[e] . identifier[response] . identifier[status_code] / literal[int] )
keyword[if] identifier[status_code] == literal[int] :
identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[str] ( identifier[e] ), identifier[error_message] ))
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[str] ( identifier[e] )))
keyword[if] identifier[self] . identifier[raise_http_errors] :
keyword[if] identifier[error_message] :
keyword[raise] identifier[HTTPError] ( literal[string] . identifier[format] ( identifier[e] . identifier[args] [ literal[int] ], identifier[error_message] ), identifier[response] = identifier[response] ) keyword[from] keyword[None]
keyword[else] :
keyword[raise] identifier[e]
keyword[else] :
keyword[return] identifier[e] . identifier[response]
keyword[except] identifier[RequestException] keyword[as] identifier[e] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[str] ( identifier[e] )))
keyword[raise] identifier[e] | def _internal_request(self, request_obj, url, method, **kwargs):
""" Internal handling of requests. Handles Exceptions.
:param request_obj: a requests session.
:param str url: url to send request to
:param str method: type of request (get/put/post/patch/delete)
:param kwargs: extra params to send to the request api
:return: Response of the request
:rtype: requests.Response
"""
method = method.lower()
if method not in self._allowed_methods:
raise ValueError('Method must be one of the allowed ones') # depends on [control=['if'], data=[]]
if method == 'get':
kwargs.setdefault('allow_redirects', True) # depends on [control=['if'], data=[]]
elif method in ['post', 'put', 'patch']:
if 'headers' not in kwargs:
kwargs['headers'] = {} # depends on [control=['if'], data=['kwargs']]
if kwargs.get('headers') is not None and kwargs['headers'].get('Content-type') is None:
kwargs['headers']['Content-type'] = 'application/json' # depends on [control=['if'], data=[]]
if 'data' in kwargs and kwargs['data'] is not None and (kwargs['headers'].get('Content-type') == 'application/json'):
kwargs['data'] = json.dumps(kwargs['data']) # convert to json # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
request_done = False
token_refreshed = False
while not request_done:
self._check_delay() # sleeps if needed
try:
log.info('Requesting ({}) URL: {}'.format(method.upper(), url))
log.info('Request parameters: {}'.format(kwargs))
# auto_retry will occur inside this function call if enabled
response = request_obj.request(method, url, **kwargs)
response.raise_for_status() # raise 4XX and 5XX error codes.
log.info('Received response ({}) from URL {}'.format(response.status_code, response.url))
request_done = True
return response # depends on [control=['try'], data=[]]
except TokenExpiredError as e:
# Token has expired, try to refresh the token and try again on the next loop
if not self.token_backend.token.is_long_lived:
raise e # depends on [control=['if'], data=[]]
if token_refreshed:
# Refresh token done but still TokenExpiredError raise
raise RuntimeError('Token Refresh Operation not working') # depends on [control=['if'], data=[]]
log.info('Oauth Token is expired, fetching a new token')
self.refresh_token()
log.info('New oauth token fetched')
token_refreshed = True # depends on [control=['except'], data=['e']]
except (ConnectionError, ProxyError, SSLError, Timeout) as e:
# We couldn't connect to the target url, raise error
log.debug('Connection Error calling: {}.{}'.format(url, 'Using proxy: {}'.format(self.proxy) if self.proxy else ''))
raise e # re-raise exception # depends on [control=['except'], data=['e']]
except HTTPError as e:
# Server response with 4XX or 5XX error status codes
# try to extract the error message:
try:
error = response.json()
error_message = error.get('error', {}).get('message', '') # depends on [control=['try'], data=[]]
except ValueError:
error_message = '' # depends on [control=['except'], data=[]]
status_code = int(e.response.status_code / 100)
if status_code == 4:
# Client Error
# Logged as error. Could be a library error or Api changes
log.error('Client Error: {} | Error Message: {}'.format(str(e), error_message)) # depends on [control=['if'], data=[]]
else:
# Server Error
log.debug('Server Error: {}'.format(str(e)))
if self.raise_http_errors:
if error_message:
raise HTTPError('{} | Error Message: {}'.format(e.args[0], error_message), response=response) from None # depends on [control=['if'], data=[]]
else:
raise e # depends on [control=['if'], data=[]]
else:
return e.response # depends on [control=['except'], data=['e']]
except RequestException as e:
# catch any other exception raised by requests
log.debug('Request Exception: {}'.format(str(e)))
raise e # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]] |
def webify_file(srcfilename: str, destfilename: str) -> None:
"""
Rewrites a file from ``srcfilename`` to ``destfilename``, HTML-escaping it
in the process.
"""
with open(srcfilename) as infile, open(destfilename, 'w') as ofile:
for line_ in infile:
ofile.write(escape(line_)) | def function[webify_file, parameter[srcfilename, destfilename]]:
constant[
Rewrites a file from ``srcfilename`` to ``destfilename``, HTML-escaping it
in the process.
]
with call[name[open], parameter[name[srcfilename]]] begin[:]
for taget[name[line_]] in starred[name[infile]] begin[:]
call[name[ofile].write, parameter[call[name[escape], parameter[name[line_]]]]] | keyword[def] identifier[webify_file] ( identifier[srcfilename] : identifier[str] , identifier[destfilename] : identifier[str] )-> keyword[None] :
literal[string]
keyword[with] identifier[open] ( identifier[srcfilename] ) keyword[as] identifier[infile] , identifier[open] ( identifier[destfilename] , literal[string] ) keyword[as] identifier[ofile] :
keyword[for] identifier[line_] keyword[in] identifier[infile] :
identifier[ofile] . identifier[write] ( identifier[escape] ( identifier[line_] )) | def webify_file(srcfilename: str, destfilename: str) -> None:
"""
Rewrites a file from ``srcfilename`` to ``destfilename``, HTML-escaping it
in the process.
"""
with open(srcfilename) as infile, open(destfilename, 'w') as ofile:
for line_ in infile:
ofile.write(escape(line_)) # depends on [control=['for'], data=['line_']] # depends on [control=['with'], data=['infile']] |
def models(cls, api_version=DEFAULT_API_VERSION):
"""Module depends on the API version:
* 2016-04-01: :mod:`v2016_04_01.models<azure.mgmt.dns.v2016_04_01.models>`
* 2018-03-01-preview: :mod:`v2018_03_01_preview.models<azure.mgmt.dns.v2018_03_01_preview.models>`
* 2018-05-01: :mod:`v2018_05_01.models<azure.mgmt.dns.v2018_05_01.models>`
"""
if api_version == '2016-04-01':
from .v2016_04_01 import models
return models
elif api_version == '2018-03-01-preview':
from .v2018_03_01_preview import models
return models
elif api_version == '2018-05-01':
from .v2018_05_01 import models
return models
raise NotImplementedError("APIVersion {} is not available".format(api_version)) | def function[models, parameter[cls, api_version]]:
constant[Module depends on the API version:
* 2016-04-01: :mod:`v2016_04_01.models<azure.mgmt.dns.v2016_04_01.models>`
* 2018-03-01-preview: :mod:`v2018_03_01_preview.models<azure.mgmt.dns.v2018_03_01_preview.models>`
* 2018-05-01: :mod:`v2018_05_01.models<azure.mgmt.dns.v2018_05_01.models>`
]
if compare[name[api_version] equal[==] constant[2016-04-01]] begin[:]
from relative_module[v2016_04_01] import module[models]
return[name[models]]
<ast.Raise object at 0x7da18c4cf370> | keyword[def] identifier[models] ( identifier[cls] , identifier[api_version] = identifier[DEFAULT_API_VERSION] ):
literal[string]
keyword[if] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2016_04_01] keyword[import] identifier[models]
keyword[return] identifier[models]
keyword[elif] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2018_03_01_preview] keyword[import] identifier[models]
keyword[return] identifier[models]
keyword[elif] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2018_05_01] keyword[import] identifier[models]
keyword[return] identifier[models]
keyword[raise] identifier[NotImplementedError] ( literal[string] . identifier[format] ( identifier[api_version] )) | def models(cls, api_version=DEFAULT_API_VERSION):
"""Module depends on the API version:
* 2016-04-01: :mod:`v2016_04_01.models<azure.mgmt.dns.v2016_04_01.models>`
* 2018-03-01-preview: :mod:`v2018_03_01_preview.models<azure.mgmt.dns.v2018_03_01_preview.models>`
* 2018-05-01: :mod:`v2018_05_01.models<azure.mgmt.dns.v2018_05_01.models>`
"""
if api_version == '2016-04-01':
from .v2016_04_01 import models
return models # depends on [control=['if'], data=[]]
elif api_version == '2018-03-01-preview':
from .v2018_03_01_preview import models
return models # depends on [control=['if'], data=[]]
elif api_version == '2018-05-01':
from .v2018_05_01 import models
return models # depends on [control=['if'], data=[]]
raise NotImplementedError('APIVersion {} is not available'.format(api_version)) |
def monochromaticWavelength(img):
'''
TODO##########
'''
# peak wave lengths: https://en.wikipedia.org/wiki/RGB_color_model
out = _calc(img)
peakWavelengths = (570, 540, 440) # (r,g,b)
# s = sum(peakWavelengths)
for n, p in enumerate(peakWavelengths):
out[..., n] *= p
return out.sum(axis=2) | def function[monochromaticWavelength, parameter[img]]:
constant[
TODO##########
]
variable[out] assign[=] call[name[_calc], parameter[name[img]]]
variable[peakWavelengths] assign[=] tuple[[<ast.Constant object at 0x7da18eb57490>, <ast.Constant object at 0x7da18eb55360>, <ast.Constant object at 0x7da18eb572b0>]]
for taget[tuple[[<ast.Name object at 0x7da18eb57130>, <ast.Name object at 0x7da18eb57a00>]]] in starred[call[name[enumerate], parameter[name[peakWavelengths]]]] begin[:]
<ast.AugAssign object at 0x7da18eb56110>
return[call[name[out].sum, parameter[]]] | keyword[def] identifier[monochromaticWavelength] ( identifier[img] ):
literal[string]
identifier[out] = identifier[_calc] ( identifier[img] )
identifier[peakWavelengths] =( literal[int] , literal[int] , literal[int] )
keyword[for] identifier[n] , identifier[p] keyword[in] identifier[enumerate] ( identifier[peakWavelengths] ):
identifier[out] [..., identifier[n] ]*= identifier[p]
keyword[return] identifier[out] . identifier[sum] ( identifier[axis] = literal[int] ) | def monochromaticWavelength(img):
"""
TODO##########
""" # peak wave lengths: https://en.wikipedia.org/wiki/RGB_color_model
out = _calc(img)
peakWavelengths = (570, 540, 440) # (r,g,b)
# s = sum(peakWavelengths)
for (n, p) in enumerate(peakWavelengths):
out[..., n] *= p # depends on [control=['for'], data=[]]
return out.sum(axis=2) |
def reset(self, fid=0):
"""Reset the object's resources to its initialized state.
:param fid: the id of a sub-fitter
"""
self._checkid(fid)
self._fitids[fid]["solved"] = False
self._fitids[fid]["haserr"] = False
if not self._fitids[fid]["looped"]:
return self._fitproxy.reset(fid)
else:
self._fitids[fid]["looped"] = False
return True | def function[reset, parameter[self, fid]]:
constant[Reset the object's resources to its initialized state.
:param fid: the id of a sub-fitter
]
call[name[self]._checkid, parameter[name[fid]]]
call[call[name[self]._fitids][name[fid]]][constant[solved]] assign[=] constant[False]
call[call[name[self]._fitids][name[fid]]][constant[haserr]] assign[=] constant[False]
if <ast.UnaryOp object at 0x7da20e957c10> begin[:]
return[call[name[self]._fitproxy.reset, parameter[name[fid]]]]
return[constant[True]] | keyword[def] identifier[reset] ( identifier[self] , identifier[fid] = literal[int] ):
literal[string]
identifier[self] . identifier[_checkid] ( identifier[fid] )
identifier[self] . identifier[_fitids] [ identifier[fid] ][ literal[string] ]= keyword[False]
identifier[self] . identifier[_fitids] [ identifier[fid] ][ literal[string] ]= keyword[False]
keyword[if] keyword[not] identifier[self] . identifier[_fitids] [ identifier[fid] ][ literal[string] ]:
keyword[return] identifier[self] . identifier[_fitproxy] . identifier[reset] ( identifier[fid] )
keyword[else] :
identifier[self] . identifier[_fitids] [ identifier[fid] ][ literal[string] ]= keyword[False]
keyword[return] keyword[True] | def reset(self, fid=0):
"""Reset the object's resources to its initialized state.
:param fid: the id of a sub-fitter
"""
self._checkid(fid)
self._fitids[fid]['solved'] = False
self._fitids[fid]['haserr'] = False
if not self._fitids[fid]['looped']:
return self._fitproxy.reset(fid) # depends on [control=['if'], data=[]]
else:
self._fitids[fid]['looped'] = False
return True |
def heightmap_add_voronoi(
hm: np.ndarray,
nbPoints: Any,
nbCoef: int,
coef: Sequence[float],
rnd: Optional[tcod.random.Random] = None,
) -> None:
"""Add values from a Voronoi diagram to the heightmap.
Args:
hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions.
nbPoints (Any): Number of Voronoi sites.
nbCoef (int): The diagram value is calculated from the nbCoef
closest sites.
coef (Sequence[float]): The distance to each site is scaled by the
corresponding coef.
Closest site : coef[0],
second closest site : coef[1], ...
rnd (Optional[Random]): A Random instance, or None.
"""
nbPoints = len(coef)
ccoef = ffi.new("float[]", coef)
lib.TCOD_heightmap_add_voronoi(
_heightmap_cdata(hm),
nbPoints,
nbCoef,
ccoef,
rnd.random_c if rnd else ffi.NULL,
) | def function[heightmap_add_voronoi, parameter[hm, nbPoints, nbCoef, coef, rnd]]:
constant[Add values from a Voronoi diagram to the heightmap.
Args:
hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions.
nbPoints (Any): Number of Voronoi sites.
nbCoef (int): The diagram value is calculated from the nbCoef
closest sites.
coef (Sequence[float]): The distance to each site is scaled by the
corresponding coef.
Closest site : coef[0],
second closest site : coef[1], ...
rnd (Optional[Random]): A Random instance, or None.
]
variable[nbPoints] assign[=] call[name[len], parameter[name[coef]]]
variable[ccoef] assign[=] call[name[ffi].new, parameter[constant[float[]], name[coef]]]
call[name[lib].TCOD_heightmap_add_voronoi, parameter[call[name[_heightmap_cdata], parameter[name[hm]]], name[nbPoints], name[nbCoef], name[ccoef], <ast.IfExp object at 0x7da1b117b820>]] | keyword[def] identifier[heightmap_add_voronoi] (
identifier[hm] : identifier[np] . identifier[ndarray] ,
identifier[nbPoints] : identifier[Any] ,
identifier[nbCoef] : identifier[int] ,
identifier[coef] : identifier[Sequence] [ identifier[float] ],
identifier[rnd] : identifier[Optional] [ identifier[tcod] . identifier[random] . identifier[Random] ]= keyword[None] ,
)-> keyword[None] :
literal[string]
identifier[nbPoints] = identifier[len] ( identifier[coef] )
identifier[ccoef] = identifier[ffi] . identifier[new] ( literal[string] , identifier[coef] )
identifier[lib] . identifier[TCOD_heightmap_add_voronoi] (
identifier[_heightmap_cdata] ( identifier[hm] ),
identifier[nbPoints] ,
identifier[nbCoef] ,
identifier[ccoef] ,
identifier[rnd] . identifier[random_c] keyword[if] identifier[rnd] keyword[else] identifier[ffi] . identifier[NULL] ,
) | def heightmap_add_voronoi(hm: np.ndarray, nbPoints: Any, nbCoef: int, coef: Sequence[float], rnd: Optional[tcod.random.Random]=None) -> None:
"""Add values from a Voronoi diagram to the heightmap.
Args:
hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions.
nbPoints (Any): Number of Voronoi sites.
nbCoef (int): The diagram value is calculated from the nbCoef
closest sites.
coef (Sequence[float]): The distance to each site is scaled by the
corresponding coef.
Closest site : coef[0],
second closest site : coef[1], ...
rnd (Optional[Random]): A Random instance, or None.
"""
nbPoints = len(coef)
ccoef = ffi.new('float[]', coef)
lib.TCOD_heightmap_add_voronoi(_heightmap_cdata(hm), nbPoints, nbCoef, ccoef, rnd.random_c if rnd else ffi.NULL) |
def contains(polygon, point):
"""
Tests whether point lies within the polygon
"""
in_hole = functools.reduce(
lambda P, Q: P and Q,
[interior.covers(point) for interior in polygon.interiors]
) if polygon.interiors else False
return polygon.covers(point) and not in_hole | def function[contains, parameter[polygon, point]]:
constant[
Tests whether point lies within the polygon
]
variable[in_hole] assign[=] <ast.IfExp object at 0x7da1b2344c70>
return[<ast.BoolOp object at 0x7da18f00dba0>] | keyword[def] identifier[contains] ( identifier[polygon] , identifier[point] ):
literal[string]
identifier[in_hole] = identifier[functools] . identifier[reduce] (
keyword[lambda] identifier[P] , identifier[Q] : identifier[P] keyword[and] identifier[Q] ,
[ identifier[interior] . identifier[covers] ( identifier[point] ) keyword[for] identifier[interior] keyword[in] identifier[polygon] . identifier[interiors] ]
) keyword[if] identifier[polygon] . identifier[interiors] keyword[else] keyword[False]
keyword[return] identifier[polygon] . identifier[covers] ( identifier[point] ) keyword[and] keyword[not] identifier[in_hole] | def contains(polygon, point):
"""
Tests whether point lies within the polygon
"""
in_hole = functools.reduce(lambda P, Q: P and Q, [interior.covers(point) for interior in polygon.interiors]) if polygon.interiors else False
return polygon.covers(point) and (not in_hole) |
def load(self):
"""a private method that loads an estimator object from the filesystem"""
if self.is_file_persisted:
self.object_file.open()
temp = dill.loads(self.object_file.read())
self.set_object(temp)
self.object_file.close() | def function[load, parameter[self]]:
constant[a private method that loads an estimator object from the filesystem]
if name[self].is_file_persisted begin[:]
call[name[self].object_file.open, parameter[]]
variable[temp] assign[=] call[name[dill].loads, parameter[call[name[self].object_file.read, parameter[]]]]
call[name[self].set_object, parameter[name[temp]]]
call[name[self].object_file.close, parameter[]] | keyword[def] identifier[load] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[is_file_persisted] :
identifier[self] . identifier[object_file] . identifier[open] ()
identifier[temp] = identifier[dill] . identifier[loads] ( identifier[self] . identifier[object_file] . identifier[read] ())
identifier[self] . identifier[set_object] ( identifier[temp] )
identifier[self] . identifier[object_file] . identifier[close] () | def load(self):
"""a private method that loads an estimator object from the filesystem"""
if self.is_file_persisted:
self.object_file.open()
temp = dill.loads(self.object_file.read())
self.set_object(temp)
self.object_file.close() # depends on [control=['if'], data=[]] |
def generate_config(ctx):
""" Generates a sample gitlint config file. """
path = click.prompt('Please specify a location for the sample gitlint config file', default=DEFAULT_CONFIG_FILE)
path = os.path.abspath(path)
dir_name = os.path.dirname(path)
if not os.path.exists(dir_name):
click.echo(u"Error: Directory '{0}' does not exist.".format(dir_name), err=True)
ctx.exit(USAGE_ERROR_CODE)
elif os.path.exists(path):
click.echo(u"Error: File \"{0}\" already exists.".format(path), err=True)
ctx.exit(USAGE_ERROR_CODE)
LintConfigGenerator.generate_config(path)
click.echo(u"Successfully generated {0}".format(path))
ctx.exit(0) | def function[generate_config, parameter[ctx]]:
constant[ Generates a sample gitlint config file. ]
variable[path] assign[=] call[name[click].prompt, parameter[constant[Please specify a location for the sample gitlint config file]]]
variable[path] assign[=] call[name[os].path.abspath, parameter[name[path]]]
variable[dir_name] assign[=] call[name[os].path.dirname, parameter[name[path]]]
if <ast.UnaryOp object at 0x7da18f00fbe0> begin[:]
call[name[click].echo, parameter[call[constant[Error: Directory '{0}' does not exist.].format, parameter[name[dir_name]]]]]
call[name[ctx].exit, parameter[name[USAGE_ERROR_CODE]]]
call[name[LintConfigGenerator].generate_config, parameter[name[path]]]
call[name[click].echo, parameter[call[constant[Successfully generated {0}].format, parameter[name[path]]]]]
call[name[ctx].exit, parameter[constant[0]]] | keyword[def] identifier[generate_config] ( identifier[ctx] ):
literal[string]
identifier[path] = identifier[click] . identifier[prompt] ( literal[string] , identifier[default] = identifier[DEFAULT_CONFIG_FILE] )
identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] )
identifier[dir_name] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[path] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dir_name] ):
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[dir_name] ), identifier[err] = keyword[True] )
identifier[ctx] . identifier[exit] ( identifier[USAGE_ERROR_CODE] )
keyword[elif] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[path] ), identifier[err] = keyword[True] )
identifier[ctx] . identifier[exit] ( identifier[USAGE_ERROR_CODE] )
identifier[LintConfigGenerator] . identifier[generate_config] ( identifier[path] )
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[path] ))
identifier[ctx] . identifier[exit] ( literal[int] ) | def generate_config(ctx):
""" Generates a sample gitlint config file. """
path = click.prompt('Please specify a location for the sample gitlint config file', default=DEFAULT_CONFIG_FILE)
path = os.path.abspath(path)
dir_name = os.path.dirname(path)
if not os.path.exists(dir_name):
click.echo(u"Error: Directory '{0}' does not exist.".format(dir_name), err=True)
ctx.exit(USAGE_ERROR_CODE) # depends on [control=['if'], data=[]]
elif os.path.exists(path):
click.echo(u'Error: File "{0}" already exists.'.format(path), err=True)
ctx.exit(USAGE_ERROR_CODE) # depends on [control=['if'], data=[]]
LintConfigGenerator.generate_config(path)
click.echo(u'Successfully generated {0}'.format(path))
ctx.exit(0) |
def query_from_file(self, filename, data=None, union=True, limit=None):
"""
Query your database from a file.
Parameters
----------
filename: str
A SQL script
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> with open("db/tests/myscript.sql", "w") as f:
... f.write(q)
109
>>> len(db.query_from_file("db/tests/myscript.sql", limit=10))
10
db.query_from_file("db/tests/myscript.sql", limit=10)
Title \
0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
"""
with open(filename) as fp:
q = fp.read()
return self.query(q, data=data, union=union, limit=limit) | def function[query_from_file, parameter[self, filename, data, union, limit]]:
constant[
Query your database from a file.
Parameters
----------
filename: str
A SQL script
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> with open("db/tests/myscript.sql", "w") as f:
... f.write(q)
109
>>> len(db.query_from_file("db/tests/myscript.sql", limit=10))
10
db.query_from_file("db/tests/myscript.sql", limit=10)
Title 0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
]
with call[name[open], parameter[name[filename]]] begin[:]
variable[q] assign[=] call[name[fp].read, parameter[]]
return[call[name[self].query, parameter[name[q]]]] | keyword[def] identifier[query_from_file] ( identifier[self] , identifier[filename] , identifier[data] = keyword[None] , identifier[union] = keyword[True] , identifier[limit] = keyword[None] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[fp] :
identifier[q] = identifier[fp] . identifier[read] ()
keyword[return] identifier[self] . identifier[query] ( identifier[q] , identifier[data] = identifier[data] , identifier[union] = identifier[union] , identifier[limit] = identifier[limit] ) | def query_from_file(self, filename, data=None, union=True, limit=None):
"""
Query your database from a file.
Parameters
----------
filename: str
A SQL script
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> with open("db/tests/myscript.sql", "w") as f:
... f.write(q)
109
>>> len(db.query_from_file("db/tests/myscript.sql", limit=10))
10
db.query_from_file("db/tests/myscript.sql", limit=10)
Title 0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
"""
with open(filename) as fp:
q = fp.read() # depends on [control=['with'], data=['fp']]
return self.query(q, data=data, union=union, limit=limit) |
def wave_range(self, cenwave, npix, waveunits=None, round='round'):
"""Get the wavelength range covered by the given number of pixels
centered on the given wavelength.
.. note::
This calls :func:`wave_range` with ``self.binset``
as the first argument.
Parameters
----------
cenwave, npix, round
See :func:`wave_range`.
waveunits : str, optional
Wavelength units of ``cenwave`` and the returned wavelength range.
If `None` (default), the wavelengths are assumed to be in
the units of ``self.waveunits``.
Returns
-------
waverange : tuple of floats
The range of wavelengths spanned by ``npix`` centered on
``cenwave``.
Raises
------
pysynphot.exceptions.UndefinedBinset
If ``self.binset`` is None.
"""
# make sure we have a binset to work with
if self.binset is None:
raise exceptions.UndefinedBinset('No binset specified for this bandpass.')
# convert cenwave from waveunits to self.waveunits, if necessary
if waveunits is not None:
waveunits = units.Units(waveunits)
# convert to angstroms and then whatever self.waveunits is
cenwave = waveunits.ToAngstrom(cenwave)
cenwave = units.Angstrom().Convert(cenwave, self.waveunits.name)
wave1, wave2 = wave_range(self.binset, cenwave, npix, round=round)
# translate ends to waveunits, if necessary
if waveunits is not None:
# convert to angstroms
wave1 = self.waveunits.ToAngstrom(wave1)
wave2 = self.waveunits.ToAngstrom(wave2)
# then to waveunits
wave1 = units.Angstrom().Convert(wave1, waveunits.name)
wave2 = units.Angstrom().Convert(wave2, waveunits.name)
return wave1, wave2 | def function[wave_range, parameter[self, cenwave, npix, waveunits, round]]:
constant[Get the wavelength range covered by the given number of pixels
centered on the given wavelength.
.. note::
This calls :func:`wave_range` with ``self.binset``
as the first argument.
Parameters
----------
cenwave, npix, round
See :func:`wave_range`.
waveunits : str, optional
Wavelength units of ``cenwave`` and the returned wavelength range.
If `None` (default), the wavelengths are assumed to be in
the units of ``self.waveunits``.
Returns
-------
waverange : tuple of floats
The range of wavelengths spanned by ``npix`` centered on
``cenwave``.
Raises
------
pysynphot.exceptions.UndefinedBinset
If ``self.binset`` is None.
]
if compare[name[self].binset is constant[None]] begin[:]
<ast.Raise object at 0x7da18f7200a0>
if compare[name[waveunits] is_not constant[None]] begin[:]
variable[waveunits] assign[=] call[name[units].Units, parameter[name[waveunits]]]
variable[cenwave] assign[=] call[name[waveunits].ToAngstrom, parameter[name[cenwave]]]
variable[cenwave] assign[=] call[call[name[units].Angstrom, parameter[]].Convert, parameter[name[cenwave], name[self].waveunits.name]]
<ast.Tuple object at 0x7da18f721570> assign[=] call[name[wave_range], parameter[name[self].binset, name[cenwave], name[npix]]]
if compare[name[waveunits] is_not constant[None]] begin[:]
variable[wave1] assign[=] call[name[self].waveunits.ToAngstrom, parameter[name[wave1]]]
variable[wave2] assign[=] call[name[self].waveunits.ToAngstrom, parameter[name[wave2]]]
variable[wave1] assign[=] call[call[name[units].Angstrom, parameter[]].Convert, parameter[name[wave1], name[waveunits].name]]
variable[wave2] assign[=] call[call[name[units].Angstrom, parameter[]].Convert, parameter[name[wave2], name[waveunits].name]]
return[tuple[[<ast.Name object at 0x7da18f58fca0>, <ast.Name object at 0x7da18f58d510>]]] | keyword[def] identifier[wave_range] ( identifier[self] , identifier[cenwave] , identifier[npix] , identifier[waveunits] = keyword[None] , identifier[round] = literal[string] ):
literal[string]
keyword[if] identifier[self] . identifier[binset] keyword[is] keyword[None] :
keyword[raise] identifier[exceptions] . identifier[UndefinedBinset] ( literal[string] )
keyword[if] identifier[waveunits] keyword[is] keyword[not] keyword[None] :
identifier[waveunits] = identifier[units] . identifier[Units] ( identifier[waveunits] )
identifier[cenwave] = identifier[waveunits] . identifier[ToAngstrom] ( identifier[cenwave] )
identifier[cenwave] = identifier[units] . identifier[Angstrom] (). identifier[Convert] ( identifier[cenwave] , identifier[self] . identifier[waveunits] . identifier[name] )
identifier[wave1] , identifier[wave2] = identifier[wave_range] ( identifier[self] . identifier[binset] , identifier[cenwave] , identifier[npix] , identifier[round] = identifier[round] )
keyword[if] identifier[waveunits] keyword[is] keyword[not] keyword[None] :
identifier[wave1] = identifier[self] . identifier[waveunits] . identifier[ToAngstrom] ( identifier[wave1] )
identifier[wave2] = identifier[self] . identifier[waveunits] . identifier[ToAngstrom] ( identifier[wave2] )
identifier[wave1] = identifier[units] . identifier[Angstrom] (). identifier[Convert] ( identifier[wave1] , identifier[waveunits] . identifier[name] )
identifier[wave2] = identifier[units] . identifier[Angstrom] (). identifier[Convert] ( identifier[wave2] , identifier[waveunits] . identifier[name] )
keyword[return] identifier[wave1] , identifier[wave2] | def wave_range(self, cenwave, npix, waveunits=None, round='round'):
"""Get the wavelength range covered by the given number of pixels
centered on the given wavelength.
.. note::
This calls :func:`wave_range` with ``self.binset``
as the first argument.
Parameters
----------
cenwave, npix, round
See :func:`wave_range`.
waveunits : str, optional
Wavelength units of ``cenwave`` and the returned wavelength range.
If `None` (default), the wavelengths are assumed to be in
the units of ``self.waveunits``.
Returns
-------
waverange : tuple of floats
The range of wavelengths spanned by ``npix`` centered on
``cenwave``.
Raises
------
pysynphot.exceptions.UndefinedBinset
If ``self.binset`` is None.
"""
# make sure we have a binset to work with
if self.binset is None:
raise exceptions.UndefinedBinset('No binset specified for this bandpass.') # depends on [control=['if'], data=[]]
# convert cenwave from waveunits to self.waveunits, if necessary
if waveunits is not None:
waveunits = units.Units(waveunits)
# convert to angstroms and then whatever self.waveunits is
cenwave = waveunits.ToAngstrom(cenwave)
cenwave = units.Angstrom().Convert(cenwave, self.waveunits.name) # depends on [control=['if'], data=['waveunits']]
(wave1, wave2) = wave_range(self.binset, cenwave, npix, round=round)
# translate ends to waveunits, if necessary
if waveunits is not None:
# convert to angstroms
wave1 = self.waveunits.ToAngstrom(wave1)
wave2 = self.waveunits.ToAngstrom(wave2)
# then to waveunits
wave1 = units.Angstrom().Convert(wave1, waveunits.name)
wave2 = units.Angstrom().Convert(wave2, waveunits.name) # depends on [control=['if'], data=['waveunits']]
return (wave1, wave2) |
def read_lsm_scaninfo(fh):
"""Read LSM ScanInfo structure from file and return as dict."""
block = {}
blocks = [block]
unpack = struct.unpack
if struct.unpack('<I', fh.read(4))[0] != 0x10000000:
# not a Recording sub block
log.warning('read_lsm_scaninfo: invalid LSM ScanInfo structure')
return block
fh.read(8)
while True:
entry, dtype, size = unpack('<III', fh.read(12))
if dtype == 2:
# ascii
value = bytes2str(stripnull(fh.read(size)))
elif dtype == 4:
# long
value = unpack('<i', fh.read(4))[0]
elif dtype == 5:
# rational
value = unpack('<d', fh.read(8))[0]
else:
value = 0
if entry in TIFF.CZ_LSMINFO_SCANINFO_ARRAYS:
blocks.append(block)
name = TIFF.CZ_LSMINFO_SCANINFO_ARRAYS[entry]
newobj = []
block[name] = newobj
block = newobj
elif entry in TIFF.CZ_LSMINFO_SCANINFO_STRUCTS:
blocks.append(block)
newobj = {}
block.append(newobj)
block = newobj
elif entry in TIFF.CZ_LSMINFO_SCANINFO_ATTRIBUTES:
name = TIFF.CZ_LSMINFO_SCANINFO_ATTRIBUTES[entry]
block[name] = value
elif entry == 0xffffffff:
# end sub block
block = blocks.pop()
else:
# unknown entry
block['Entry0x%x' % entry] = value
if not blocks:
break
return block | def function[read_lsm_scaninfo, parameter[fh]]:
constant[Read LSM ScanInfo structure from file and return as dict.]
variable[block] assign[=] dictionary[[], []]
variable[blocks] assign[=] list[[<ast.Name object at 0x7da1b1802e00>]]
variable[unpack] assign[=] name[struct].unpack
if compare[call[call[name[struct].unpack, parameter[constant[<I], call[name[fh].read, parameter[constant[4]]]]]][constant[0]] not_equal[!=] constant[268435456]] begin[:]
call[name[log].warning, parameter[constant[read_lsm_scaninfo: invalid LSM ScanInfo structure]]]
return[name[block]]
call[name[fh].read, parameter[constant[8]]]
while constant[True] begin[:]
<ast.Tuple object at 0x7da1b1801750> assign[=] call[name[unpack], parameter[constant[<III], call[name[fh].read, parameter[constant[12]]]]]
if compare[name[dtype] equal[==] constant[2]] begin[:]
variable[value] assign[=] call[name[bytes2str], parameter[call[name[stripnull], parameter[call[name[fh].read, parameter[name[size]]]]]]]
if compare[name[entry] in name[TIFF].CZ_LSMINFO_SCANINFO_ARRAYS] begin[:]
call[name[blocks].append, parameter[name[block]]]
variable[name] assign[=] call[name[TIFF].CZ_LSMINFO_SCANINFO_ARRAYS][name[entry]]
variable[newobj] assign[=] list[[]]
call[name[block]][name[name]] assign[=] name[newobj]
variable[block] assign[=] name[newobj]
if <ast.UnaryOp object at 0x7da1b197cd30> begin[:]
break
return[name[block]] | keyword[def] identifier[read_lsm_scaninfo] ( identifier[fh] ):
literal[string]
identifier[block] ={}
identifier[blocks] =[ identifier[block] ]
identifier[unpack] = identifier[struct] . identifier[unpack]
keyword[if] identifier[struct] . identifier[unpack] ( literal[string] , identifier[fh] . identifier[read] ( literal[int] ))[ literal[int] ]!= literal[int] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[return] identifier[block]
identifier[fh] . identifier[read] ( literal[int] )
keyword[while] keyword[True] :
identifier[entry] , identifier[dtype] , identifier[size] = identifier[unpack] ( literal[string] , identifier[fh] . identifier[read] ( literal[int] ))
keyword[if] identifier[dtype] == literal[int] :
identifier[value] = identifier[bytes2str] ( identifier[stripnull] ( identifier[fh] . identifier[read] ( identifier[size] )))
keyword[elif] identifier[dtype] == literal[int] :
identifier[value] = identifier[unpack] ( literal[string] , identifier[fh] . identifier[read] ( literal[int] ))[ literal[int] ]
keyword[elif] identifier[dtype] == literal[int] :
identifier[value] = identifier[unpack] ( literal[string] , identifier[fh] . identifier[read] ( literal[int] ))[ literal[int] ]
keyword[else] :
identifier[value] = literal[int]
keyword[if] identifier[entry] keyword[in] identifier[TIFF] . identifier[CZ_LSMINFO_SCANINFO_ARRAYS] :
identifier[blocks] . identifier[append] ( identifier[block] )
identifier[name] = identifier[TIFF] . identifier[CZ_LSMINFO_SCANINFO_ARRAYS] [ identifier[entry] ]
identifier[newobj] =[]
identifier[block] [ identifier[name] ]= identifier[newobj]
identifier[block] = identifier[newobj]
keyword[elif] identifier[entry] keyword[in] identifier[TIFF] . identifier[CZ_LSMINFO_SCANINFO_STRUCTS] :
identifier[blocks] . identifier[append] ( identifier[block] )
identifier[newobj] ={}
identifier[block] . identifier[append] ( identifier[newobj] )
identifier[block] = identifier[newobj]
keyword[elif] identifier[entry] keyword[in] identifier[TIFF] . identifier[CZ_LSMINFO_SCANINFO_ATTRIBUTES] :
identifier[name] = identifier[TIFF] . identifier[CZ_LSMINFO_SCANINFO_ATTRIBUTES] [ identifier[entry] ]
identifier[block] [ identifier[name] ]= identifier[value]
keyword[elif] identifier[entry] == literal[int] :
identifier[block] = identifier[blocks] . identifier[pop] ()
keyword[else] :
identifier[block] [ literal[string] % identifier[entry] ]= identifier[value]
keyword[if] keyword[not] identifier[blocks] :
keyword[break]
keyword[return] identifier[block] | def read_lsm_scaninfo(fh):
"""Read LSM ScanInfo structure from file and return as dict."""
block = {}
blocks = [block]
unpack = struct.unpack
if struct.unpack('<I', fh.read(4))[0] != 268435456:
# not a Recording sub block
log.warning('read_lsm_scaninfo: invalid LSM ScanInfo structure')
return block # depends on [control=['if'], data=[]]
fh.read(8)
while True:
(entry, dtype, size) = unpack('<III', fh.read(12))
if dtype == 2:
# ascii
value = bytes2str(stripnull(fh.read(size))) # depends on [control=['if'], data=[]]
elif dtype == 4:
# long
value = unpack('<i', fh.read(4))[0] # depends on [control=['if'], data=[]]
elif dtype == 5:
# rational
value = unpack('<d', fh.read(8))[0] # depends on [control=['if'], data=[]]
else:
value = 0
if entry in TIFF.CZ_LSMINFO_SCANINFO_ARRAYS:
blocks.append(block)
name = TIFF.CZ_LSMINFO_SCANINFO_ARRAYS[entry]
newobj = []
block[name] = newobj
block = newobj # depends on [control=['if'], data=['entry']]
elif entry in TIFF.CZ_LSMINFO_SCANINFO_STRUCTS:
blocks.append(block)
newobj = {}
block.append(newobj)
block = newobj # depends on [control=['if'], data=[]]
elif entry in TIFF.CZ_LSMINFO_SCANINFO_ATTRIBUTES:
name = TIFF.CZ_LSMINFO_SCANINFO_ATTRIBUTES[entry]
block[name] = value # depends on [control=['if'], data=['entry']]
elif entry == 4294967295:
# end sub block
block = blocks.pop() # depends on [control=['if'], data=[]]
else:
# unknown entry
block['Entry0x%x' % entry] = value
if not blocks:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return block |
def guest_create_nic(self, userid, vdev=None, nic_id=None,
mac_addr=None, active=False):
""" Create the nic for the vm, add NICDEF record into the user direct.
:param str userid: the user id of the vm
:param str vdev: nic device number, 1- to 4- hexadecimal digits
:param str nic_id: nic identifier
:param str mac_addr: mac address, it is only be used when changing
the guest's user direct. Format should be xx:xx:xx:xx:xx:xx,
and x is a hexadecimal digit
:param bool active: whether add a nic on active guest system
:returns: nic device number, 1- to 4- hexadecimal digits
:rtype: str
"""
if mac_addr is not None:
if not zvmutils.valid_mac_addr(mac_addr):
raise exception.SDKInvalidInputFormat(
msg=("Invalid mac address, format should be "
"xx:xx:xx:xx:xx:xx, and x is a hexadecimal digit"))
return self._networkops.create_nic(userid, vdev=vdev, nic_id=nic_id,
mac_addr=mac_addr, active=active) | def function[guest_create_nic, parameter[self, userid, vdev, nic_id, mac_addr, active]]:
constant[ Create the nic for the vm, add NICDEF record into the user direct.
:param str userid: the user id of the vm
:param str vdev: nic device number, 1- to 4- hexadecimal digits
:param str nic_id: nic identifier
:param str mac_addr: mac address, it is only be used when changing
the guest's user direct. Format should be xx:xx:xx:xx:xx:xx,
and x is a hexadecimal digit
:param bool active: whether add a nic on active guest system
:returns: nic device number, 1- to 4- hexadecimal digits
:rtype: str
]
if compare[name[mac_addr] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da2043472b0> begin[:]
<ast.Raise object at 0x7da2043457b0>
return[call[name[self]._networkops.create_nic, parameter[name[userid]]]] | keyword[def] identifier[guest_create_nic] ( identifier[self] , identifier[userid] , identifier[vdev] = keyword[None] , identifier[nic_id] = keyword[None] ,
identifier[mac_addr] = keyword[None] , identifier[active] = keyword[False] ):
literal[string]
keyword[if] identifier[mac_addr] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[zvmutils] . identifier[valid_mac_addr] ( identifier[mac_addr] ):
keyword[raise] identifier[exception] . identifier[SDKInvalidInputFormat] (
identifier[msg] =( literal[string]
literal[string] ))
keyword[return] identifier[self] . identifier[_networkops] . identifier[create_nic] ( identifier[userid] , identifier[vdev] = identifier[vdev] , identifier[nic_id] = identifier[nic_id] ,
identifier[mac_addr] = identifier[mac_addr] , identifier[active] = identifier[active] ) | def guest_create_nic(self, userid, vdev=None, nic_id=None, mac_addr=None, active=False):
""" Create the nic for the vm, add NICDEF record into the user direct.
:param str userid: the user id of the vm
:param str vdev: nic device number, 1- to 4- hexadecimal digits
:param str nic_id: nic identifier
:param str mac_addr: mac address, it is only be used when changing
the guest's user direct. Format should be xx:xx:xx:xx:xx:xx,
and x is a hexadecimal digit
:param bool active: whether add a nic on active guest system
:returns: nic device number, 1- to 4- hexadecimal digits
:rtype: str
"""
if mac_addr is not None:
if not zvmutils.valid_mac_addr(mac_addr):
raise exception.SDKInvalidInputFormat(msg='Invalid mac address, format should be xx:xx:xx:xx:xx:xx, and x is a hexadecimal digit') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['mac_addr']]
return self._networkops.create_nic(userid, vdev=vdev, nic_id=nic_id, mac_addr=mac_addr, active=active) |
def who_am_i():
"""
Display username from current token and check for validity
"""
me = url['me'].format(token)
r = requests.get(me, params={'client_id': CLIENT_ID})
r.raise_for_status()
current_user = r.json()
logger.debug(me)
logger.info('Hello {0}!'.format(current_user['username']))
return current_user | def function[who_am_i, parameter[]]:
constant[
Display username from current token and check for validity
]
variable[me] assign[=] call[call[name[url]][constant[me]].format, parameter[name[token]]]
variable[r] assign[=] call[name[requests].get, parameter[name[me]]]
call[name[r].raise_for_status, parameter[]]
variable[current_user] assign[=] call[name[r].json, parameter[]]
call[name[logger].debug, parameter[name[me]]]
call[name[logger].info, parameter[call[constant[Hello {0}!].format, parameter[call[name[current_user]][constant[username]]]]]]
return[name[current_user]] | keyword[def] identifier[who_am_i] ():
literal[string]
identifier[me] = identifier[url] [ literal[string] ]. identifier[format] ( identifier[token] )
identifier[r] = identifier[requests] . identifier[get] ( identifier[me] , identifier[params] ={ literal[string] : identifier[CLIENT_ID] })
identifier[r] . identifier[raise_for_status] ()
identifier[current_user] = identifier[r] . identifier[json] ()
identifier[logger] . identifier[debug] ( identifier[me] )
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[current_user] [ literal[string] ]))
keyword[return] identifier[current_user] | def who_am_i():
"""
Display username from current token and check for validity
"""
me = url['me'].format(token)
r = requests.get(me, params={'client_id': CLIENT_ID})
r.raise_for_status()
current_user = r.json()
logger.debug(me)
logger.info('Hello {0}!'.format(current_user['username']))
return current_user |
def check_password(self, username, password):
"""The actual password checking logic. Separated from the authenticate code from Django for easier updating"""
try:
if SUPPORTS_VERIFY:
kerberos.checkPassword(username.lower(), password, getattr(settings, "KRB5_SERVICE", ""), getattr(settings, "KRB5_REALM", ""), getattr(settings, "KRB5_VERIFY_KDC", True))
else:
kerberos.checkPassword(username.lower(), password, getattr(settings, "KRB5_SERVICE", ""), getattr(settings, "KRB5_REALM", ""))
return True
except kerberos.BasicAuthError:
if getattr(settings, "KRB5_DEBUG", False):
logger.exception("Failure during authentication")
return False
except:
if getattr(settings, "KRB5_DEBUG", False):
logger.exception("Failure during authentication")
# for all other execptions also deny access
return False | def function[check_password, parameter[self, username, password]]:
constant[The actual password checking logic. Separated from the authenticate code from Django for easier updating]
<ast.Try object at 0x7da2047eb820> | keyword[def] identifier[check_password] ( identifier[self] , identifier[username] , identifier[password] ):
literal[string]
keyword[try] :
keyword[if] identifier[SUPPORTS_VERIFY] :
identifier[kerberos] . identifier[checkPassword] ( identifier[username] . identifier[lower] (), identifier[password] , identifier[getattr] ( identifier[settings] , literal[string] , literal[string] ), identifier[getattr] ( identifier[settings] , literal[string] , literal[string] ), identifier[getattr] ( identifier[settings] , literal[string] , keyword[True] ))
keyword[else] :
identifier[kerberos] . identifier[checkPassword] ( identifier[username] . identifier[lower] (), identifier[password] , identifier[getattr] ( identifier[settings] , literal[string] , literal[string] ), identifier[getattr] ( identifier[settings] , literal[string] , literal[string] ))
keyword[return] keyword[True]
keyword[except] identifier[kerberos] . identifier[BasicAuthError] :
keyword[if] identifier[getattr] ( identifier[settings] , literal[string] , keyword[False] ):
identifier[logger] . identifier[exception] ( literal[string] )
keyword[return] keyword[False]
keyword[except] :
keyword[if] identifier[getattr] ( identifier[settings] , literal[string] , keyword[False] ):
identifier[logger] . identifier[exception] ( literal[string] )
keyword[return] keyword[False] | def check_password(self, username, password):
"""The actual password checking logic. Separated from the authenticate code from Django for easier updating"""
try:
if SUPPORTS_VERIFY:
kerberos.checkPassword(username.lower(), password, getattr(settings, 'KRB5_SERVICE', ''), getattr(settings, 'KRB5_REALM', ''), getattr(settings, 'KRB5_VERIFY_KDC', True)) # depends on [control=['if'], data=[]]
else:
kerberos.checkPassword(username.lower(), password, getattr(settings, 'KRB5_SERVICE', ''), getattr(settings, 'KRB5_REALM', ''))
return True # depends on [control=['try'], data=[]]
except kerberos.BasicAuthError:
if getattr(settings, 'KRB5_DEBUG', False):
logger.exception('Failure during authentication') # depends on [control=['if'], data=[]]
return False # depends on [control=['except'], data=[]]
except:
if getattr(settings, 'KRB5_DEBUG', False):
logger.exception('Failure during authentication') # depends on [control=['if'], data=[]]
# for all other execptions also deny access
return False # depends on [control=['except'], data=[]] |
def response_data_to_model_instance(self, response_data):
"""Convert response data to a task instance model.
Args:
response_data (dict): The data from the request's response.
Returns:
:class:`saltant.models.base_task_instance.BaseTaskInstance`:
A task instance model instance representing the task
instance from the reponse data.
"""
# Coerce datetime strings into datetime objects
response_data["datetime_created"] = dateutil.parser.parse(
response_data["datetime_created"]
)
if response_data["datetime_finished"]:
response_data["datetime_finished"] = dateutil.parser.parse(
response_data["datetime_finished"]
)
# Instantiate a model for the task instance
return super(
BaseTaskInstanceManager, self
).response_data_to_model_instance(response_data) | def function[response_data_to_model_instance, parameter[self, response_data]]:
constant[Convert response data to a task instance model.
Args:
response_data (dict): The data from the request's response.
Returns:
:class:`saltant.models.base_task_instance.BaseTaskInstance`:
A task instance model instance representing the task
instance from the reponse data.
]
call[name[response_data]][constant[datetime_created]] assign[=] call[name[dateutil].parser.parse, parameter[call[name[response_data]][constant[datetime_created]]]]
if call[name[response_data]][constant[datetime_finished]] begin[:]
call[name[response_data]][constant[datetime_finished]] assign[=] call[name[dateutil].parser.parse, parameter[call[name[response_data]][constant[datetime_finished]]]]
return[call[call[name[super], parameter[name[BaseTaskInstanceManager], name[self]]].response_data_to_model_instance, parameter[name[response_data]]]] | keyword[def] identifier[response_data_to_model_instance] ( identifier[self] , identifier[response_data] ):
literal[string]
identifier[response_data] [ literal[string] ]= identifier[dateutil] . identifier[parser] . identifier[parse] (
identifier[response_data] [ literal[string] ]
)
keyword[if] identifier[response_data] [ literal[string] ]:
identifier[response_data] [ literal[string] ]= identifier[dateutil] . identifier[parser] . identifier[parse] (
identifier[response_data] [ literal[string] ]
)
keyword[return] identifier[super] (
identifier[BaseTaskInstanceManager] , identifier[self]
). identifier[response_data_to_model_instance] ( identifier[response_data] ) | def response_data_to_model_instance(self, response_data):
"""Convert response data to a task instance model.
Args:
response_data (dict): The data from the request's response.
Returns:
:class:`saltant.models.base_task_instance.BaseTaskInstance`:
A task instance model instance representing the task
instance from the reponse data.
"""
# Coerce datetime strings into datetime objects
response_data['datetime_created'] = dateutil.parser.parse(response_data['datetime_created'])
if response_data['datetime_finished']:
response_data['datetime_finished'] = dateutil.parser.parse(response_data['datetime_finished']) # depends on [control=['if'], data=[]]
# Instantiate a model for the task instance
return super(BaseTaskInstanceManager, self).response_data_to_model_instance(response_data) |
def auto_scroll(self, thumbkey):
"""Scroll the window to the thumb."""
if not self.gui_up:
return
# force scroll to bottom of thumbs, if checkbox is set
scrollp = self.w.auto_scroll.get_state()
if not scrollp:
return
bnch = self.thumb_dict[thumbkey]
# override X parameter because we only want to scroll vertically
pan_x, pan_y = self.c_view.get_pan()
self.c_view.panset_xy(pan_x, bnch.image.y) | def function[auto_scroll, parameter[self, thumbkey]]:
constant[Scroll the window to the thumb.]
if <ast.UnaryOp object at 0x7da1b0d57460> begin[:]
return[None]
variable[scrollp] assign[=] call[name[self].w.auto_scroll.get_state, parameter[]]
if <ast.UnaryOp object at 0x7da1b0d55e10> begin[:]
return[None]
variable[bnch] assign[=] call[name[self].thumb_dict][name[thumbkey]]
<ast.Tuple object at 0x7da1b0d570a0> assign[=] call[name[self].c_view.get_pan, parameter[]]
call[name[self].c_view.panset_xy, parameter[name[pan_x], name[bnch].image.y]] | keyword[def] identifier[auto_scroll] ( identifier[self] , identifier[thumbkey] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[gui_up] :
keyword[return]
identifier[scrollp] = identifier[self] . identifier[w] . identifier[auto_scroll] . identifier[get_state] ()
keyword[if] keyword[not] identifier[scrollp] :
keyword[return]
identifier[bnch] = identifier[self] . identifier[thumb_dict] [ identifier[thumbkey] ]
identifier[pan_x] , identifier[pan_y] = identifier[self] . identifier[c_view] . identifier[get_pan] ()
identifier[self] . identifier[c_view] . identifier[panset_xy] ( identifier[pan_x] , identifier[bnch] . identifier[image] . identifier[y] ) | def auto_scroll(self, thumbkey):
"""Scroll the window to the thumb."""
if not self.gui_up:
return # depends on [control=['if'], data=[]]
# force scroll to bottom of thumbs, if checkbox is set
scrollp = self.w.auto_scroll.get_state()
if not scrollp:
return # depends on [control=['if'], data=[]]
bnch = self.thumb_dict[thumbkey]
# override X parameter because we only want to scroll vertically
(pan_x, pan_y) = self.c_view.get_pan()
self.c_view.panset_xy(pan_x, bnch.image.y) |
def move_window(pymux, variables):
"""
Move window to a new index.
"""
dst_window = variables['<dst-window>']
try:
new_index = int(dst_window)
except ValueError:
raise CommandException('Invalid window index: %r' % (dst_window, ))
# Check first whether the index was not yet taken.
if pymux.arrangement.get_window_by_index(new_index):
raise CommandException("Can't move window: index in use.")
# Save index.
w = pymux.arrangement.get_active_window()
pymux.arrangement.move_window(w, new_index) | def function[move_window, parameter[pymux, variables]]:
constant[
Move window to a new index.
]
variable[dst_window] assign[=] call[name[variables]][constant[<dst-window>]]
<ast.Try object at 0x7da18bc73610>
if call[name[pymux].arrangement.get_window_by_index, parameter[name[new_index]]] begin[:]
<ast.Raise object at 0x7da18bc72d70>
variable[w] assign[=] call[name[pymux].arrangement.get_active_window, parameter[]]
call[name[pymux].arrangement.move_window, parameter[name[w], name[new_index]]] | keyword[def] identifier[move_window] ( identifier[pymux] , identifier[variables] ):
literal[string]
identifier[dst_window] = identifier[variables] [ literal[string] ]
keyword[try] :
identifier[new_index] = identifier[int] ( identifier[dst_window] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[CommandException] ( literal[string] %( identifier[dst_window] ,))
keyword[if] identifier[pymux] . identifier[arrangement] . identifier[get_window_by_index] ( identifier[new_index] ):
keyword[raise] identifier[CommandException] ( literal[string] )
identifier[w] = identifier[pymux] . identifier[arrangement] . identifier[get_active_window] ()
identifier[pymux] . identifier[arrangement] . identifier[move_window] ( identifier[w] , identifier[new_index] ) | def move_window(pymux, variables):
"""
Move window to a new index.
"""
dst_window = variables['<dst-window>']
try:
new_index = int(dst_window) # depends on [control=['try'], data=[]]
except ValueError:
raise CommandException('Invalid window index: %r' % (dst_window,)) # depends on [control=['except'], data=[]]
# Check first whether the index was not yet taken.
if pymux.arrangement.get_window_by_index(new_index):
raise CommandException("Can't move window: index in use.") # depends on [control=['if'], data=[]]
# Save index.
w = pymux.arrangement.get_active_window()
pymux.arrangement.move_window(w, new_index) |
def enumeration_to_list(enm):
"""
Turns the java.util.Enumeration into a list.
:param enm: the enumeration to convert
:type enm: JB_Object
:return: the list
:rtype: list
"""
result = []
while javabridge.call(enm, "hasMoreElements", "()Z"):
result.append(javabridge.call(enm, "nextElement", "()Ljava/lang/Object;"))
return result | def function[enumeration_to_list, parameter[enm]]:
constant[
Turns the java.util.Enumeration into a list.
:param enm: the enumeration to convert
:type enm: JB_Object
:return: the list
:rtype: list
]
variable[result] assign[=] list[[]]
while call[name[javabridge].call, parameter[name[enm], constant[hasMoreElements], constant[()Z]]] begin[:]
call[name[result].append, parameter[call[name[javabridge].call, parameter[name[enm], constant[nextElement], constant[()Ljava/lang/Object;]]]]]
return[name[result]] | keyword[def] identifier[enumeration_to_list] ( identifier[enm] ):
literal[string]
identifier[result] =[]
keyword[while] identifier[javabridge] . identifier[call] ( identifier[enm] , literal[string] , literal[string] ):
identifier[result] . identifier[append] ( identifier[javabridge] . identifier[call] ( identifier[enm] , literal[string] , literal[string] ))
keyword[return] identifier[result] | def enumeration_to_list(enm):
"""
Turns the java.util.Enumeration into a list.
:param enm: the enumeration to convert
:type enm: JB_Object
:return: the list
:rtype: list
"""
result = []
while javabridge.call(enm, 'hasMoreElements', '()Z'):
result.append(javabridge.call(enm, 'nextElement', '()Ljava/lang/Object;')) # depends on [control=['while'], data=[]]
return result |
def do_repl(self):
"""REPL for rTorrent XMLRPC commands."""
from prompt_toolkit import prompt
from prompt_toolkit.history import FileHistory
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.contrib.completers import WordCompleter
self.options.quiet = False
proxy = self.open()
ps1 = proxy.session.name() + u'> '
words = ['help', 'stats', 'exit']
words += [x + '=' for x in proxy.system.listMethods()]
history_file = os.path.join(config.config_dir, '.rtxmlrpc_history')
while True:
try:
try:
cmd = prompt(ps1, completer=WordCompleter(words),
auto_suggest=AutoSuggestFromHistory(),
history=FileHistory(history_file))
except KeyboardInterrupt:
cmd = ''
if not cmd:
print("Enter '?' or 'help' for usage information, 'Ctrl-D' to exit.")
if cmd in {'?', 'help'}:
self.repl_usage()
continue
elif cmd in {'', 'stats'}:
print(repr(proxy).split(None, 1)[1])
continue
elif cmd in {'exit'}:
raise EOFError()
try:
method, raw_args = cmd.split('=', 1)
except ValueError:
print("ERROR: '=' not found")
continue
raw_args = raw_args.split(',')
args = self.cooked(raw_args)
self.execute(proxy, method, args)
except EOFError:
print('Bye from {!r}'.format(proxy))
break | def function[do_repl, parameter[self]]:
constant[REPL for rTorrent XMLRPC commands.]
from relative_module[prompt_toolkit] import module[prompt]
from relative_module[prompt_toolkit.history] import module[FileHistory]
from relative_module[prompt_toolkit.auto_suggest] import module[AutoSuggestFromHistory]
from relative_module[prompt_toolkit.contrib.completers] import module[WordCompleter]
name[self].options.quiet assign[=] constant[False]
variable[proxy] assign[=] call[name[self].open, parameter[]]
variable[ps1] assign[=] binary_operation[call[name[proxy].session.name, parameter[]] + constant[> ]]
variable[words] assign[=] list[[<ast.Constant object at 0x7da1b13d4700>, <ast.Constant object at 0x7da1b13d4730>, <ast.Constant object at 0x7da1b13d4760>]]
<ast.AugAssign object at 0x7da1b13d4790>
variable[history_file] assign[=] call[name[os].path.join, parameter[name[config].config_dir, constant[.rtxmlrpc_history]]]
while constant[True] begin[:]
<ast.Try object at 0x7da1b13d4c10> | keyword[def] identifier[do_repl] ( identifier[self] ):
literal[string]
keyword[from] identifier[prompt_toolkit] keyword[import] identifier[prompt]
keyword[from] identifier[prompt_toolkit] . identifier[history] keyword[import] identifier[FileHistory]
keyword[from] identifier[prompt_toolkit] . identifier[auto_suggest] keyword[import] identifier[AutoSuggestFromHistory]
keyword[from] identifier[prompt_toolkit] . identifier[contrib] . identifier[completers] keyword[import] identifier[WordCompleter]
identifier[self] . identifier[options] . identifier[quiet] = keyword[False]
identifier[proxy] = identifier[self] . identifier[open] ()
identifier[ps1] = identifier[proxy] . identifier[session] . identifier[name] ()+ literal[string]
identifier[words] =[ literal[string] , literal[string] , literal[string] ]
identifier[words] +=[ identifier[x] + literal[string] keyword[for] identifier[x] keyword[in] identifier[proxy] . identifier[system] . identifier[listMethods] ()]
identifier[history_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[config] . identifier[config_dir] , literal[string] )
keyword[while] keyword[True] :
keyword[try] :
keyword[try] :
identifier[cmd] = identifier[prompt] ( identifier[ps1] , identifier[completer] = identifier[WordCompleter] ( identifier[words] ),
identifier[auto_suggest] = identifier[AutoSuggestFromHistory] (),
identifier[history] = identifier[FileHistory] ( identifier[history_file] ))
keyword[except] identifier[KeyboardInterrupt] :
identifier[cmd] = literal[string]
keyword[if] keyword[not] identifier[cmd] :
identifier[print] ( literal[string] )
keyword[if] identifier[cmd] keyword[in] { literal[string] , literal[string] }:
identifier[self] . identifier[repl_usage] ()
keyword[continue]
keyword[elif] identifier[cmd] keyword[in] { literal[string] , literal[string] }:
identifier[print] ( identifier[repr] ( identifier[proxy] ). identifier[split] ( keyword[None] , literal[int] )[ literal[int] ])
keyword[continue]
keyword[elif] identifier[cmd] keyword[in] { literal[string] }:
keyword[raise] identifier[EOFError] ()
keyword[try] :
identifier[method] , identifier[raw_args] = identifier[cmd] . identifier[split] ( literal[string] , literal[int] )
keyword[except] identifier[ValueError] :
identifier[print] ( literal[string] )
keyword[continue]
identifier[raw_args] = identifier[raw_args] . identifier[split] ( literal[string] )
identifier[args] = identifier[self] . identifier[cooked] ( identifier[raw_args] )
identifier[self] . identifier[execute] ( identifier[proxy] , identifier[method] , identifier[args] )
keyword[except] identifier[EOFError] :
identifier[print] ( literal[string] . identifier[format] ( identifier[proxy] ))
keyword[break] | def do_repl(self):
"""REPL for rTorrent XMLRPC commands."""
from prompt_toolkit import prompt
from prompt_toolkit.history import FileHistory
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.contrib.completers import WordCompleter
self.options.quiet = False
proxy = self.open()
ps1 = proxy.session.name() + u'> '
words = ['help', 'stats', 'exit']
words += [x + '=' for x in proxy.system.listMethods()]
history_file = os.path.join(config.config_dir, '.rtxmlrpc_history')
while True:
try:
try:
cmd = prompt(ps1, completer=WordCompleter(words), auto_suggest=AutoSuggestFromHistory(), history=FileHistory(history_file)) # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
cmd = '' # depends on [control=['except'], data=[]]
if not cmd:
print("Enter '?' or 'help' for usage information, 'Ctrl-D' to exit.") # depends on [control=['if'], data=[]]
if cmd in {'?', 'help'}:
self.repl_usage()
continue # depends on [control=['if'], data=[]]
elif cmd in {'', 'stats'}:
print(repr(proxy).split(None, 1)[1])
continue # depends on [control=['if'], data=[]]
elif cmd in {'exit'}:
raise EOFError() # depends on [control=['if'], data=[]]
try:
(method, raw_args) = cmd.split('=', 1) # depends on [control=['try'], data=[]]
except ValueError:
print("ERROR: '=' not found")
continue # depends on [control=['except'], data=[]]
raw_args = raw_args.split(',')
args = self.cooked(raw_args)
self.execute(proxy, method, args) # depends on [control=['try'], data=[]]
except EOFError:
print('Bye from {!r}'.format(proxy))
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] |
def bus_factor(self, by='repository', ignore_globs=None, include_globs=None):
"""
An experimental heuristic for truck factor of a repository calculated by the current distribution of blame in
the repository's primary branch. The factor is the fewest number of contributors whose contributions make up at
least 50% of the codebase's LOC
:param ignore_globs: (optional, default=None) a list of globs to ignore, default none excludes nothing
:param include_globs: (optinal, default=None) a list of globs to include, default of None includes everything.
:param by: (optional, default=repository) whether to group by repository or by file
:return:
"""
if by == 'file':
raise NotImplementedError('File-wise bus factor')
blame = self.blame(include_globs=include_globs, ignore_globs=ignore_globs, by=by)
blame = blame.sort_values(by=['loc'], ascending=False)
total = blame['loc'].sum()
cumulative = 0
tc = 0
for idx in range(blame.shape[0]):
cumulative += blame.ix[idx, 'loc']
tc += 1
if cumulative >= total / 2:
break
return DataFrame([[self._repo_name(), tc]], columns=['repository', 'bus factor']) | def function[bus_factor, parameter[self, by, ignore_globs, include_globs]]:
constant[
An experimental heuristic for truck factor of a repository calculated by the current distribution of blame in
the repository's primary branch. The factor is the fewest number of contributors whose contributions make up at
least 50% of the codebase's LOC
:param ignore_globs: (optional, default=None) a list of globs to ignore, default none excludes nothing
:param include_globs: (optinal, default=None) a list of globs to include, default of None includes everything.
:param by: (optional, default=repository) whether to group by repository or by file
:return:
]
if compare[name[by] equal[==] constant[file]] begin[:]
<ast.Raise object at 0x7da18f09f910>
variable[blame] assign[=] call[name[self].blame, parameter[]]
variable[blame] assign[=] call[name[blame].sort_values, parameter[]]
variable[total] assign[=] call[call[name[blame]][constant[loc]].sum, parameter[]]
variable[cumulative] assign[=] constant[0]
variable[tc] assign[=] constant[0]
for taget[name[idx]] in starred[call[name[range], parameter[call[name[blame].shape][constant[0]]]]] begin[:]
<ast.AugAssign object at 0x7da18bcca680>
<ast.AugAssign object at 0x7da18bcc9930>
if compare[name[cumulative] greater_or_equal[>=] binary_operation[name[total] / constant[2]]] begin[:]
break
return[call[name[DataFrame], parameter[list[[<ast.List object at 0x7da18bcc97e0>]]]]] | keyword[def] identifier[bus_factor] ( identifier[self] , identifier[by] = literal[string] , identifier[ignore_globs] = keyword[None] , identifier[include_globs] = keyword[None] ):
literal[string]
keyword[if] identifier[by] == literal[string] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
identifier[blame] = identifier[self] . identifier[blame] ( identifier[include_globs] = identifier[include_globs] , identifier[ignore_globs] = identifier[ignore_globs] , identifier[by] = identifier[by] )
identifier[blame] = identifier[blame] . identifier[sort_values] ( identifier[by] =[ literal[string] ], identifier[ascending] = keyword[False] )
identifier[total] = identifier[blame] [ literal[string] ]. identifier[sum] ()
identifier[cumulative] = literal[int]
identifier[tc] = literal[int]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[blame] . identifier[shape] [ literal[int] ]):
identifier[cumulative] += identifier[blame] . identifier[ix] [ identifier[idx] , literal[string] ]
identifier[tc] += literal[int]
keyword[if] identifier[cumulative] >= identifier[total] / literal[int] :
keyword[break]
keyword[return] identifier[DataFrame] ([[ identifier[self] . identifier[_repo_name] (), identifier[tc] ]], identifier[columns] =[ literal[string] , literal[string] ]) | def bus_factor(self, by='repository', ignore_globs=None, include_globs=None):
"""
An experimental heuristic for truck factor of a repository calculated by the current distribution of blame in
the repository's primary branch. The factor is the fewest number of contributors whose contributions make up at
least 50% of the codebase's LOC
:param ignore_globs: (optional, default=None) a list of globs to ignore, default none excludes nothing
:param include_globs: (optinal, default=None) a list of globs to include, default of None includes everything.
:param by: (optional, default=repository) whether to group by repository or by file
:return:
"""
if by == 'file':
raise NotImplementedError('File-wise bus factor') # depends on [control=['if'], data=[]]
blame = self.blame(include_globs=include_globs, ignore_globs=ignore_globs, by=by)
blame = blame.sort_values(by=['loc'], ascending=False)
total = blame['loc'].sum()
cumulative = 0
tc = 0
for idx in range(blame.shape[0]):
cumulative += blame.ix[idx, 'loc']
tc += 1
if cumulative >= total / 2:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idx']]
return DataFrame([[self._repo_name(), tc]], columns=['repository', 'bus factor']) |
def Recv(self):
"""Accept a message from Fleetspeak.
Returns:
A tuple (common_pb2.Message, size of the message in bytes).
Raises:
ProtocolError: If we receive unexpected data from Fleetspeak.
"""
size = struct.unpack(_STRUCT_FMT, self._ReadN(_STRUCT_LEN))[0]
if size > MAX_SIZE:
raise ProtocolError("Expected size to be at most %d, got %d" % (MAX_SIZE,
size))
with self._read_lock:
buf = self._ReadN(size)
self._ReadMagic()
res = common_pb2.Message()
res.ParseFromString(buf)
return res, len(buf) | def function[Recv, parameter[self]]:
constant[Accept a message from Fleetspeak.
Returns:
A tuple (common_pb2.Message, size of the message in bytes).
Raises:
ProtocolError: If we receive unexpected data from Fleetspeak.
]
variable[size] assign[=] call[call[name[struct].unpack, parameter[name[_STRUCT_FMT], call[name[self]._ReadN, parameter[name[_STRUCT_LEN]]]]]][constant[0]]
if compare[name[size] greater[>] name[MAX_SIZE]] begin[:]
<ast.Raise object at 0x7da1b13895a0>
with name[self]._read_lock begin[:]
variable[buf] assign[=] call[name[self]._ReadN, parameter[name[size]]]
call[name[self]._ReadMagic, parameter[]]
variable[res] assign[=] call[name[common_pb2].Message, parameter[]]
call[name[res].ParseFromString, parameter[name[buf]]]
return[tuple[[<ast.Name object at 0x7da1b138b160>, <ast.Call object at 0x7da1b138b040>]]] | keyword[def] identifier[Recv] ( identifier[self] ):
literal[string]
identifier[size] = identifier[struct] . identifier[unpack] ( identifier[_STRUCT_FMT] , identifier[self] . identifier[_ReadN] ( identifier[_STRUCT_LEN] ))[ literal[int] ]
keyword[if] identifier[size] > identifier[MAX_SIZE] :
keyword[raise] identifier[ProtocolError] ( literal[string] %( identifier[MAX_SIZE] ,
identifier[size] ))
keyword[with] identifier[self] . identifier[_read_lock] :
identifier[buf] = identifier[self] . identifier[_ReadN] ( identifier[size] )
identifier[self] . identifier[_ReadMagic] ()
identifier[res] = identifier[common_pb2] . identifier[Message] ()
identifier[res] . identifier[ParseFromString] ( identifier[buf] )
keyword[return] identifier[res] , identifier[len] ( identifier[buf] ) | def Recv(self):
"""Accept a message from Fleetspeak.
Returns:
A tuple (common_pb2.Message, size of the message in bytes).
Raises:
ProtocolError: If we receive unexpected data from Fleetspeak.
"""
size = struct.unpack(_STRUCT_FMT, self._ReadN(_STRUCT_LEN))[0]
if size > MAX_SIZE:
raise ProtocolError('Expected size to be at most %d, got %d' % (MAX_SIZE, size)) # depends on [control=['if'], data=['size', 'MAX_SIZE']]
with self._read_lock:
buf = self._ReadN(size)
self._ReadMagic() # depends on [control=['with'], data=[]]
res = common_pb2.Message()
res.ParseFromString(buf)
return (res, len(buf)) |
def feature_types(self):
"""Distinct types (``type_``) in :class:`.models.Feature`
:return: all distinct feature types
:rtype: list[str]
"""
r = self.session.query(distinct(models.Feature.type_)).all()
return [x[0] for x in r] | def function[feature_types, parameter[self]]:
constant[Distinct types (``type_``) in :class:`.models.Feature`
:return: all distinct feature types
:rtype: list[str]
]
variable[r] assign[=] call[call[name[self].session.query, parameter[call[name[distinct], parameter[name[models].Feature.type_]]]].all, parameter[]]
return[<ast.ListComp object at 0x7da18f811180>] | keyword[def] identifier[feature_types] ( identifier[self] ):
literal[string]
identifier[r] = identifier[self] . identifier[session] . identifier[query] ( identifier[distinct] ( identifier[models] . identifier[Feature] . identifier[type_] )). identifier[all] ()
keyword[return] [ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[r] ] | def feature_types(self):
"""Distinct types (``type_``) in :class:`.models.Feature`
:return: all distinct feature types
:rtype: list[str]
"""
r = self.session.query(distinct(models.Feature.type_)).all()
return [x[0] for x in r] |
def name_match(self, wfn):
"""
Accepts a set of CPE Names K and a candidate CPE Name X. It returns
'True' if X matches any member of K, and 'False' otherwise.
:param CPESet self: A set of m known CPE Names K = {K1, K2, …, Km}.
:param CPE cpe: A candidate CPE Name X.
:returns: True if X matches K, otherwise False.
:rtype: boolean
"""
for N in self.K:
if CPESet2_3.cpe_superset(wfn, N):
return True
return False | def function[name_match, parameter[self, wfn]]:
constant[
Accepts a set of CPE Names K and a candidate CPE Name X. It returns
'True' if X matches any member of K, and 'False' otherwise.
:param CPESet self: A set of m known CPE Names K = {K1, K2, …, Km}.
:param CPE cpe: A candidate CPE Name X.
:returns: True if X matches K, otherwise False.
:rtype: boolean
]
for taget[name[N]] in starred[name[self].K] begin[:]
if call[name[CPESet2_3].cpe_superset, parameter[name[wfn], name[N]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[name_match] ( identifier[self] , identifier[wfn] ):
literal[string]
keyword[for] identifier[N] keyword[in] identifier[self] . identifier[K] :
keyword[if] identifier[CPESet2_3] . identifier[cpe_superset] ( identifier[wfn] , identifier[N] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def name_match(self, wfn):
"""
Accepts a set of CPE Names K and a candidate CPE Name X. It returns
'True' if X matches any member of K, and 'False' otherwise.
:param CPESet self: A set of m known CPE Names K = {K1, K2, …, Km}.
:param CPE cpe: A candidate CPE Name X.
:returns: True if X matches K, otherwise False.
:rtype: boolean
"""
for N in self.K:
if CPESet2_3.cpe_superset(wfn, N):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['N']]
return False |
def kdeconf(kde,conf=0.683,xmin=None,xmax=None,npts=500,
shortest=True,conftol=0.001,return_max=False):
"""
Returns desired confidence interval for provided KDE object
"""
if xmin is None:
xmin = kde.dataset.min()
if xmax is None:
xmax = kde.dataset.max()
x = np.linspace(xmin,xmax,npts)
return conf_interval(x,kde(x),shortest=shortest,conf=conf,
conftol=conftol,return_max=return_max) | def function[kdeconf, parameter[kde, conf, xmin, xmax, npts, shortest, conftol, return_max]]:
constant[
Returns desired confidence interval for provided KDE object
]
if compare[name[xmin] is constant[None]] begin[:]
variable[xmin] assign[=] call[name[kde].dataset.min, parameter[]]
if compare[name[xmax] is constant[None]] begin[:]
variable[xmax] assign[=] call[name[kde].dataset.max, parameter[]]
variable[x] assign[=] call[name[np].linspace, parameter[name[xmin], name[xmax], name[npts]]]
return[call[name[conf_interval], parameter[name[x], call[name[kde], parameter[name[x]]]]]] | keyword[def] identifier[kdeconf] ( identifier[kde] , identifier[conf] = literal[int] , identifier[xmin] = keyword[None] , identifier[xmax] = keyword[None] , identifier[npts] = literal[int] ,
identifier[shortest] = keyword[True] , identifier[conftol] = literal[int] , identifier[return_max] = keyword[False] ):
literal[string]
keyword[if] identifier[xmin] keyword[is] keyword[None] :
identifier[xmin] = identifier[kde] . identifier[dataset] . identifier[min] ()
keyword[if] identifier[xmax] keyword[is] keyword[None] :
identifier[xmax] = identifier[kde] . identifier[dataset] . identifier[max] ()
identifier[x] = identifier[np] . identifier[linspace] ( identifier[xmin] , identifier[xmax] , identifier[npts] )
keyword[return] identifier[conf_interval] ( identifier[x] , identifier[kde] ( identifier[x] ), identifier[shortest] = identifier[shortest] , identifier[conf] = identifier[conf] ,
identifier[conftol] = identifier[conftol] , identifier[return_max] = identifier[return_max] ) | def kdeconf(kde, conf=0.683, xmin=None, xmax=None, npts=500, shortest=True, conftol=0.001, return_max=False):
"""
Returns desired confidence interval for provided KDE object
"""
if xmin is None:
xmin = kde.dataset.min() # depends on [control=['if'], data=['xmin']]
if xmax is None:
xmax = kde.dataset.max() # depends on [control=['if'], data=['xmax']]
x = np.linspace(xmin, xmax, npts)
return conf_interval(x, kde(x), shortest=shortest, conf=conf, conftol=conftol, return_max=return_max) |
def validate_filter(self, key, filter_value):
"""
validate the filter key and value against the collection schema
:param key: property name
:param filter_value: value of the filter
:returns True if all is ok otherwise False
"""
ok = False
seek = u"filter[%s]" % key
value = None
for link in self.schema['links']:
if link['rel'] == 'instances':
for property in link['properties']:
if seek == property:
value = link['properties'][property]
ok = True
if not ok:
return False
ok = self._is_type(filter_value, value['type'])
# if string with type add validation
if ok is True and value['type'] == 'string' and 'format' in value.keys():
ok = self._validate_json_format(filter_value, value)
return ok | def function[validate_filter, parameter[self, key, filter_value]]:
constant[
validate the filter key and value against the collection schema
:param key: property name
:param filter_value: value of the filter
:returns True if all is ok otherwise False
]
variable[ok] assign[=] constant[False]
variable[seek] assign[=] binary_operation[constant[filter[%s]] <ast.Mod object at 0x7da2590d6920> name[key]]
variable[value] assign[=] constant[None]
for taget[name[link]] in starred[call[name[self].schema][constant[links]]] begin[:]
if compare[call[name[link]][constant[rel]] equal[==] constant[instances]] begin[:]
for taget[name[property]] in starred[call[name[link]][constant[properties]]] begin[:]
if compare[name[seek] equal[==] name[property]] begin[:]
variable[value] assign[=] call[call[name[link]][constant[properties]]][name[property]]
variable[ok] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da18f09d060> begin[:]
return[constant[False]]
variable[ok] assign[=] call[name[self]._is_type, parameter[name[filter_value], call[name[value]][constant[type]]]]
if <ast.BoolOp object at 0x7da18f09c760> begin[:]
variable[ok] assign[=] call[name[self]._validate_json_format, parameter[name[filter_value], name[value]]]
return[name[ok]] | keyword[def] identifier[validate_filter] ( identifier[self] , identifier[key] , identifier[filter_value] ):
literal[string]
identifier[ok] = keyword[False]
identifier[seek] = literal[string] % identifier[key]
identifier[value] = keyword[None]
keyword[for] identifier[link] keyword[in] identifier[self] . identifier[schema] [ literal[string] ]:
keyword[if] identifier[link] [ literal[string] ]== literal[string] :
keyword[for] identifier[property] keyword[in] identifier[link] [ literal[string] ]:
keyword[if] identifier[seek] == identifier[property] :
identifier[value] = identifier[link] [ literal[string] ][ identifier[property] ]
identifier[ok] = keyword[True]
keyword[if] keyword[not] identifier[ok] :
keyword[return] keyword[False]
identifier[ok] = identifier[self] . identifier[_is_type] ( identifier[filter_value] , identifier[value] [ literal[string] ])
keyword[if] identifier[ok] keyword[is] keyword[True] keyword[and] identifier[value] [ literal[string] ]== literal[string] keyword[and] literal[string] keyword[in] identifier[value] . identifier[keys] ():
identifier[ok] = identifier[self] . identifier[_validate_json_format] ( identifier[filter_value] , identifier[value] )
keyword[return] identifier[ok] | def validate_filter(self, key, filter_value):
"""
validate the filter key and value against the collection schema
:param key: property name
:param filter_value: value of the filter
:returns True if all is ok otherwise False
"""
ok = False
seek = u'filter[%s]' % key
value = None
for link in self.schema['links']:
if link['rel'] == 'instances':
for property in link['properties']:
if seek == property:
value = link['properties'][property]
ok = True # depends on [control=['if'], data=['property']] # depends on [control=['for'], data=['property']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['link']]
if not ok:
return False # depends on [control=['if'], data=[]]
ok = self._is_type(filter_value, value['type'])
# if string with type add validation
if ok is True and value['type'] == 'string' and ('format' in value.keys()):
ok = self._validate_json_format(filter_value, value) # depends on [control=['if'], data=[]]
return ok |
def exit(self, status=0, message=None):
"""
Delegates to `ArgumentParser.exit`
"""
if status:
self.logger.error(message)
if self.__parser__: # pylint: disable-msg=E1101
self.__parser__.exit(status, message) # pylint: disable-msg=E1101
else:
sys.exit(status) | def function[exit, parameter[self, status, message]]:
constant[
Delegates to `ArgumentParser.exit`
]
if name[status] begin[:]
call[name[self].logger.error, parameter[name[message]]]
if name[self].__parser__ begin[:]
call[name[self].__parser__.exit, parameter[name[status], name[message]]] | keyword[def] identifier[exit] ( identifier[self] , identifier[status] = literal[int] , identifier[message] = keyword[None] ):
literal[string]
keyword[if] identifier[status] :
identifier[self] . identifier[logger] . identifier[error] ( identifier[message] )
keyword[if] identifier[self] . identifier[__parser__] :
identifier[self] . identifier[__parser__] . identifier[exit] ( identifier[status] , identifier[message] )
keyword[else] :
identifier[sys] . identifier[exit] ( identifier[status] ) | def exit(self, status=0, message=None):
"""
Delegates to `ArgumentParser.exit`
"""
if status:
self.logger.error(message) # depends on [control=['if'], data=[]]
if self.__parser__: # pylint: disable-msg=E1101
self.__parser__.exit(status, message) # pylint: disable-msg=E1101 # depends on [control=['if'], data=[]]
else:
sys.exit(status) |
def make_report(storage_directory, old_pins, new_pins, do_update=False,
version_mappings=None):
"""Create RST report from a list of projects/roles."""
report = ""
version_mappings = version_mappings or {}
for new_pin in new_pins:
repo_name, repo_url, commit_sha = new_pin
commit_sha = version_mappings.get(repo_name, {}
).get(commit_sha, commit_sha)
# Prepare our repo directory and clone the repo if needed. Only pull
# if the user requests it.
repo_dir = "{0}/{1}".format(storage_directory, repo_name)
update_repo(repo_dir, repo_url, do_update)
# Get the old SHA from the previous pins. If this pin didn't exist
# in the previous OSA revision, skip it. This could happen with newly-
# added projects and roles.
try:
commit_sha_old = next(x[2] for x in old_pins if x[0] == repo_name)
except Exception:
continue
else:
commit_sha_old = version_mappings.get(repo_name, {}
).get(commit_sha_old,
commit_sha_old)
# Loop through the commits and render our template.
validate_commits(repo_dir, [commit_sha_old, commit_sha])
commits = get_commits(repo_dir, commit_sha_old, commit_sha)
template_vars = {
'repo': repo_name,
'commits': commits,
'commit_base_url': get_commit_url(repo_url),
'old_sha': commit_sha_old,
'new_sha': commit_sha
}
rst = render_template('offline-repo-changes.j2', template_vars)
report += rst
return report | def function[make_report, parameter[storage_directory, old_pins, new_pins, do_update, version_mappings]]:
constant[Create RST report from a list of projects/roles.]
variable[report] assign[=] constant[]
variable[version_mappings] assign[=] <ast.BoolOp object at 0x7da2054a66e0>
for taget[name[new_pin]] in starred[name[new_pins]] begin[:]
<ast.Tuple object at 0x7da2054a40a0> assign[=] name[new_pin]
variable[commit_sha] assign[=] call[call[name[version_mappings].get, parameter[name[repo_name], dictionary[[], []]]].get, parameter[name[commit_sha], name[commit_sha]]]
variable[repo_dir] assign[=] call[constant[{0}/{1}].format, parameter[name[storage_directory], name[repo_name]]]
call[name[update_repo], parameter[name[repo_dir], name[repo_url], name[do_update]]]
<ast.Try object at 0x7da2054a44f0>
call[name[validate_commits], parameter[name[repo_dir], list[[<ast.Name object at 0x7da2041dabf0>, <ast.Name object at 0x7da2041da4a0>]]]]
variable[commits] assign[=] call[name[get_commits], parameter[name[repo_dir], name[commit_sha_old], name[commit_sha]]]
variable[template_vars] assign[=] dictionary[[<ast.Constant object at 0x7da2041dbf10>, <ast.Constant object at 0x7da2041d89a0>, <ast.Constant object at 0x7da2041d8d90>, <ast.Constant object at 0x7da2041dadd0>, <ast.Constant object at 0x7da2041d8220>], [<ast.Name object at 0x7da2041d8b20>, <ast.Name object at 0x7da2041d9f30>, <ast.Call object at 0x7da2041d8850>, <ast.Name object at 0x7da2041dbfd0>, <ast.Name object at 0x7da2041db160>]]
variable[rst] assign[=] call[name[render_template], parameter[constant[offline-repo-changes.j2], name[template_vars]]]
<ast.AugAssign object at 0x7da20c7941f0>
return[name[report]] | keyword[def] identifier[make_report] ( identifier[storage_directory] , identifier[old_pins] , identifier[new_pins] , identifier[do_update] = keyword[False] ,
identifier[version_mappings] = keyword[None] ):
literal[string]
identifier[report] = literal[string]
identifier[version_mappings] = identifier[version_mappings] keyword[or] {}
keyword[for] identifier[new_pin] keyword[in] identifier[new_pins] :
identifier[repo_name] , identifier[repo_url] , identifier[commit_sha] = identifier[new_pin]
identifier[commit_sha] = identifier[version_mappings] . identifier[get] ( identifier[repo_name] ,{}
). identifier[get] ( identifier[commit_sha] , identifier[commit_sha] )
identifier[repo_dir] = literal[string] . identifier[format] ( identifier[storage_directory] , identifier[repo_name] )
identifier[update_repo] ( identifier[repo_dir] , identifier[repo_url] , identifier[do_update] )
keyword[try] :
identifier[commit_sha_old] = identifier[next] ( identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[old_pins] keyword[if] identifier[x] [ literal[int] ]== identifier[repo_name] )
keyword[except] identifier[Exception] :
keyword[continue]
keyword[else] :
identifier[commit_sha_old] = identifier[version_mappings] . identifier[get] ( identifier[repo_name] ,{}
). identifier[get] ( identifier[commit_sha_old] ,
identifier[commit_sha_old] )
identifier[validate_commits] ( identifier[repo_dir] ,[ identifier[commit_sha_old] , identifier[commit_sha] ])
identifier[commits] = identifier[get_commits] ( identifier[repo_dir] , identifier[commit_sha_old] , identifier[commit_sha] )
identifier[template_vars] ={
literal[string] : identifier[repo_name] ,
literal[string] : identifier[commits] ,
literal[string] : identifier[get_commit_url] ( identifier[repo_url] ),
literal[string] : identifier[commit_sha_old] ,
literal[string] : identifier[commit_sha]
}
identifier[rst] = identifier[render_template] ( literal[string] , identifier[template_vars] )
identifier[report] += identifier[rst]
keyword[return] identifier[report] | def make_report(storage_directory, old_pins, new_pins, do_update=False, version_mappings=None):
"""Create RST report from a list of projects/roles."""
report = ''
version_mappings = version_mappings or {}
for new_pin in new_pins:
(repo_name, repo_url, commit_sha) = new_pin
commit_sha = version_mappings.get(repo_name, {}).get(commit_sha, commit_sha)
# Prepare our repo directory and clone the repo if needed. Only pull
# if the user requests it.
repo_dir = '{0}/{1}'.format(storage_directory, repo_name)
update_repo(repo_dir, repo_url, do_update)
# Get the old SHA from the previous pins. If this pin didn't exist
# in the previous OSA revision, skip it. This could happen with newly-
# added projects and roles.
try:
commit_sha_old = next((x[2] for x in old_pins if x[0] == repo_name)) # depends on [control=['try'], data=[]]
except Exception:
continue # depends on [control=['except'], data=[]]
else:
commit_sha_old = version_mappings.get(repo_name, {}).get(commit_sha_old, commit_sha_old)
# Loop through the commits and render our template.
validate_commits(repo_dir, [commit_sha_old, commit_sha])
commits = get_commits(repo_dir, commit_sha_old, commit_sha)
template_vars = {'repo': repo_name, 'commits': commits, 'commit_base_url': get_commit_url(repo_url), 'old_sha': commit_sha_old, 'new_sha': commit_sha}
rst = render_template('offline-repo-changes.j2', template_vars)
report += rst # depends on [control=['for'], data=['new_pin']]
return report |
def _itemize(objs):
"""Recursive helper function for farray."""
if not isinstance(objs, collections.Sequence):
raise TypeError("expected a sequence of Function")
isseq = [isinstance(obj, collections.Sequence) for obj in objs]
if not any(isseq):
ftype = None
for obj in objs:
if ftype is None:
if isinstance(obj, BinaryDecisionDiagram):
ftype = BinaryDecisionDiagram
elif isinstance(obj, Expression):
ftype = Expression
elif isinstance(obj, TruthTable):
ftype = TruthTable
else:
raise TypeError("expected valid Function inputs")
elif not isinstance(obj, ftype):
raise ValueError("expected uniform Function types")
return list(objs), ((0, len(objs)), ), ftype
elif all(isseq):
items = list()
shape = None
ftype = None
for obj in objs:
_items, _shape, _ftype = _itemize(obj)
if shape is None:
shape = _shape
elif shape != _shape:
raise ValueError("expected uniform farray dimensions")
if ftype is None:
ftype = _ftype
elif ftype != _ftype:
raise ValueError("expected uniform Function types")
items += _items
shape = ((0, len(objs)), ) + shape
return items, shape, ftype
else:
raise ValueError("expected uniform farray dimensions") | def function[_itemize, parameter[objs]]:
constant[Recursive helper function for farray.]
if <ast.UnaryOp object at 0x7da1b0c36320> begin[:]
<ast.Raise object at 0x7da1b0c34e20>
variable[isseq] assign[=] <ast.ListComp object at 0x7da1b0c37310>
if <ast.UnaryOp object at 0x7da1b0c37ee0> begin[:]
variable[ftype] assign[=] constant[None]
for taget[name[obj]] in starred[name[objs]] begin[:]
if compare[name[ftype] is constant[None]] begin[:]
if call[name[isinstance], parameter[name[obj], name[BinaryDecisionDiagram]]] begin[:]
variable[ftype] assign[=] name[BinaryDecisionDiagram]
return[tuple[[<ast.Call object at 0x7da1b0c37520>, <ast.Tuple object at 0x7da1b0c35b40>, <ast.Name object at 0x7da1b0c36530>]]] | keyword[def] identifier[_itemize] ( identifier[objs] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[objs] , identifier[collections] . identifier[Sequence] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[isseq] =[ identifier[isinstance] ( identifier[obj] , identifier[collections] . identifier[Sequence] ) keyword[for] identifier[obj] keyword[in] identifier[objs] ]
keyword[if] keyword[not] identifier[any] ( identifier[isseq] ):
identifier[ftype] = keyword[None]
keyword[for] identifier[obj] keyword[in] identifier[objs] :
keyword[if] identifier[ftype] keyword[is] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[BinaryDecisionDiagram] ):
identifier[ftype] = identifier[BinaryDecisionDiagram]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[Expression] ):
identifier[ftype] = identifier[Expression]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[TruthTable] ):
identifier[ftype] = identifier[TruthTable]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[obj] , identifier[ftype] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[list] ( identifier[objs] ),(( literal[int] , identifier[len] ( identifier[objs] )),), identifier[ftype]
keyword[elif] identifier[all] ( identifier[isseq] ):
identifier[items] = identifier[list] ()
identifier[shape] = keyword[None]
identifier[ftype] = keyword[None]
keyword[for] identifier[obj] keyword[in] identifier[objs] :
identifier[_items] , identifier[_shape] , identifier[_ftype] = identifier[_itemize] ( identifier[obj] )
keyword[if] identifier[shape] keyword[is] keyword[None] :
identifier[shape] = identifier[_shape]
keyword[elif] identifier[shape] != identifier[_shape] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[ftype] keyword[is] keyword[None] :
identifier[ftype] = identifier[_ftype]
keyword[elif] identifier[ftype] != identifier[_ftype] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[items] += identifier[_items]
identifier[shape] =(( literal[int] , identifier[len] ( identifier[objs] )),)+ identifier[shape]
keyword[return] identifier[items] , identifier[shape] , identifier[ftype]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def _itemize(objs):
"""Recursive helper function for farray."""
if not isinstance(objs, collections.Sequence):
raise TypeError('expected a sequence of Function') # depends on [control=['if'], data=[]]
isseq = [isinstance(obj, collections.Sequence) for obj in objs]
if not any(isseq):
ftype = None
for obj in objs:
if ftype is None:
if isinstance(obj, BinaryDecisionDiagram):
ftype = BinaryDecisionDiagram # depends on [control=['if'], data=[]]
elif isinstance(obj, Expression):
ftype = Expression # depends on [control=['if'], data=[]]
elif isinstance(obj, TruthTable):
ftype = TruthTable # depends on [control=['if'], data=[]]
else:
raise TypeError('expected valid Function inputs') # depends on [control=['if'], data=['ftype']]
elif not isinstance(obj, ftype):
raise ValueError('expected uniform Function types') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['obj']]
return (list(objs), ((0, len(objs)),), ftype) # depends on [control=['if'], data=[]]
elif all(isseq):
items = list()
shape = None
ftype = None
for obj in objs:
(_items, _shape, _ftype) = _itemize(obj)
if shape is None:
shape = _shape # depends on [control=['if'], data=['shape']]
elif shape != _shape:
raise ValueError('expected uniform farray dimensions') # depends on [control=['if'], data=[]]
if ftype is None:
ftype = _ftype # depends on [control=['if'], data=['ftype']]
elif ftype != _ftype:
raise ValueError('expected uniform Function types') # depends on [control=['if'], data=[]]
items += _items # depends on [control=['for'], data=['obj']]
shape = ((0, len(objs)),) + shape
return (items, shape, ftype) # depends on [control=['if'], data=[]]
else:
raise ValueError('expected uniform farray dimensions') |
def _validate_outgroup(self, outgroup):
"""All voucher codes in our datasets have dashes converted to underscores."""
if outgroup:
outgroup = outgroup.replace("-", "_")
good_outgroup = False
for seq_record in self.seq_records:
if seq_record.voucher_code == outgroup:
good_outgroup = True
break
if good_outgroup:
self.outgroup = outgroup
else:
raise ValueError("The given outgroup {0!r} cannot be found in the "
"input sequence records.".format(outgroup))
else:
self.outgroup = None | def function[_validate_outgroup, parameter[self, outgroup]]:
constant[All voucher codes in our datasets have dashes converted to underscores.]
if name[outgroup] begin[:]
variable[outgroup] assign[=] call[name[outgroup].replace, parameter[constant[-], constant[_]]]
variable[good_outgroup] assign[=] constant[False]
for taget[name[seq_record]] in starred[name[self].seq_records] begin[:]
if compare[name[seq_record].voucher_code equal[==] name[outgroup]] begin[:]
variable[good_outgroup] assign[=] constant[True]
break
if name[good_outgroup] begin[:]
name[self].outgroup assign[=] name[outgroup] | keyword[def] identifier[_validate_outgroup] ( identifier[self] , identifier[outgroup] ):
literal[string]
keyword[if] identifier[outgroup] :
identifier[outgroup] = identifier[outgroup] . identifier[replace] ( literal[string] , literal[string] )
identifier[good_outgroup] = keyword[False]
keyword[for] identifier[seq_record] keyword[in] identifier[self] . identifier[seq_records] :
keyword[if] identifier[seq_record] . identifier[voucher_code] == identifier[outgroup] :
identifier[good_outgroup] = keyword[True]
keyword[break]
keyword[if] identifier[good_outgroup] :
identifier[self] . identifier[outgroup] = identifier[outgroup]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[outgroup] ))
keyword[else] :
identifier[self] . identifier[outgroup] = keyword[None] | def _validate_outgroup(self, outgroup):
"""All voucher codes in our datasets have dashes converted to underscores."""
if outgroup:
outgroup = outgroup.replace('-', '_')
good_outgroup = False
for seq_record in self.seq_records:
if seq_record.voucher_code == outgroup:
good_outgroup = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['seq_record']]
if good_outgroup:
self.outgroup = outgroup # depends on [control=['if'], data=[]]
else:
raise ValueError('The given outgroup {0!r} cannot be found in the input sequence records.'.format(outgroup)) # depends on [control=['if'], data=[]]
else:
self.outgroup = None |
def selection_dialog(self, courses):
"""
opens a curses/picker based interface to select courses that should be downloaded.
"""
selected = list(filter(lambda x: x.course.id in self._settings["selected_courses"], courses))
selection = Picker(
title="Select courses to download",
options=courses,
checked=selected).getSelected()
if selection:
self._settings["selected_courses"] = list(map(lambda x: x.course.id, selection))
self.save()
log.info("Updated course selection") | def function[selection_dialog, parameter[self, courses]]:
constant[
opens a curses/picker based interface to select courses that should be downloaded.
]
variable[selected] assign[=] call[name[list], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da20e9631f0>, name[courses]]]]]
variable[selection] assign[=] call[call[name[Picker], parameter[]].getSelected, parameter[]]
if name[selection] begin[:]
call[name[self]._settings][constant[selected_courses]] assign[=] call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da20e961f60>, name[selection]]]]]
call[name[self].save, parameter[]]
call[name[log].info, parameter[constant[Updated course selection]]] | keyword[def] identifier[selection_dialog] ( identifier[self] , identifier[courses] ):
literal[string]
identifier[selected] = identifier[list] ( identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] . identifier[course] . identifier[id] keyword[in] identifier[self] . identifier[_settings] [ literal[string] ], identifier[courses] ))
identifier[selection] = identifier[Picker] (
identifier[title] = literal[string] ,
identifier[options] = identifier[courses] ,
identifier[checked] = identifier[selected] ). identifier[getSelected] ()
keyword[if] identifier[selection] :
identifier[self] . identifier[_settings] [ literal[string] ]= identifier[list] ( identifier[map] ( keyword[lambda] identifier[x] : identifier[x] . identifier[course] . identifier[id] , identifier[selection] ))
identifier[self] . identifier[save] ()
identifier[log] . identifier[info] ( literal[string] ) | def selection_dialog(self, courses):
"""
opens a curses/picker based interface to select courses that should be downloaded.
"""
selected = list(filter(lambda x: x.course.id in self._settings['selected_courses'], courses))
selection = Picker(title='Select courses to download', options=courses, checked=selected).getSelected()
if selection:
self._settings['selected_courses'] = list(map(lambda x: x.course.id, selection))
self.save()
log.info('Updated course selection') # depends on [control=['if'], data=[]] |
def enable_deploy_key(self, project, key_id):
"""
Enables a deploy key for a project.
>>> gitlab = Gitlab(host='http://localhost:10080', verify_ssl=False)
>>> gitlab.login(user='root', password='5iveL!fe')
>>> gitlab.enable_deploy_key(15, 5)
:param project: The ID or URL-encoded path of the project owned by the authenticated user
:param key_id: The ID of the deploy key
:return: A dictionary containing deploy key details
:raise: HttpError: If invalid response returned
"""
url = '/projects/{project}/deploy_keys/{key_id}/enable'.format(
project=project, key_id=key_id)
return self.post(url, default_response={}) | def function[enable_deploy_key, parameter[self, project, key_id]]:
constant[
Enables a deploy key for a project.
>>> gitlab = Gitlab(host='http://localhost:10080', verify_ssl=False)
>>> gitlab.login(user='root', password='5iveL!fe')
>>> gitlab.enable_deploy_key(15, 5)
:param project: The ID or URL-encoded path of the project owned by the authenticated user
:param key_id: The ID of the deploy key
:return: A dictionary containing deploy key details
:raise: HttpError: If invalid response returned
]
variable[url] assign[=] call[constant[/projects/{project}/deploy_keys/{key_id}/enable].format, parameter[]]
return[call[name[self].post, parameter[name[url]]]] | keyword[def] identifier[enable_deploy_key] ( identifier[self] , identifier[project] , identifier[key_id] ):
literal[string]
identifier[url] = literal[string] . identifier[format] (
identifier[project] = identifier[project] , identifier[key_id] = identifier[key_id] )
keyword[return] identifier[self] . identifier[post] ( identifier[url] , identifier[default_response] ={}) | def enable_deploy_key(self, project, key_id):
"""
Enables a deploy key for a project.
>>> gitlab = Gitlab(host='http://localhost:10080', verify_ssl=False)
>>> gitlab.login(user='root', password='5iveL!fe')
>>> gitlab.enable_deploy_key(15, 5)
:param project: The ID or URL-encoded path of the project owned by the authenticated user
:param key_id: The ID of the deploy key
:return: A dictionary containing deploy key details
:raise: HttpError: If invalid response returned
"""
url = '/projects/{project}/deploy_keys/{key_id}/enable'.format(project=project, key_id=key_id)
return self.post(url, default_response={}) |
def log_variable_sizes(var_list=None, tag=None, verbose=False):
"""Log the sizes and shapes of variables, and the total size.
Args:
var_list: a list of variables; defaults to trainable_variables
tag: a string; defaults to "Trainable Variables"
verbose: bool, if True, log every weight; otherwise, log total size only.
"""
if var_list is None:
var_list = tf.trainable_variables()
if tag is None:
tag = "Trainable Variables"
if not var_list:
return
name_to_var = {v.name: v for v in var_list}
total_size = 0
for v_name in sorted(list(name_to_var)):
v = name_to_var[v_name]
v_size = int(np.prod(np.array(v.shape.as_list())))
if verbose:
tf.logging.info("Weight %s\tshape %s\tsize %d",
v.name[:-2].ljust(80),
str(v.shape).ljust(20), v_size)
total_size += v_size
tf.logging.info("%s Total size: %d", tag, total_size) | def function[log_variable_sizes, parameter[var_list, tag, verbose]]:
constant[Log the sizes and shapes of variables, and the total size.
Args:
var_list: a list of variables; defaults to trainable_variables
tag: a string; defaults to "Trainable Variables"
verbose: bool, if True, log every weight; otherwise, log total size only.
]
if compare[name[var_list] is constant[None]] begin[:]
variable[var_list] assign[=] call[name[tf].trainable_variables, parameter[]]
if compare[name[tag] is constant[None]] begin[:]
variable[tag] assign[=] constant[Trainable Variables]
if <ast.UnaryOp object at 0x7da204566f80> begin[:]
return[None]
variable[name_to_var] assign[=] <ast.DictComp object at 0x7da204565900>
variable[total_size] assign[=] constant[0]
for taget[name[v_name]] in starred[call[name[sorted], parameter[call[name[list], parameter[name[name_to_var]]]]]] begin[:]
variable[v] assign[=] call[name[name_to_var]][name[v_name]]
variable[v_size] assign[=] call[name[int], parameter[call[name[np].prod, parameter[call[name[np].array, parameter[call[name[v].shape.as_list, parameter[]]]]]]]]
if name[verbose] begin[:]
call[name[tf].logging.info, parameter[constant[Weight %s shape %s size %d], call[call[name[v].name][<ast.Slice object at 0x7da1b2346200>].ljust, parameter[constant[80]]], call[call[name[str], parameter[name[v].shape]].ljust, parameter[constant[20]]], name[v_size]]]
<ast.AugAssign object at 0x7da1b23471f0>
call[name[tf].logging.info, parameter[constant[%s Total size: %d], name[tag], name[total_size]]] | keyword[def] identifier[log_variable_sizes] ( identifier[var_list] = keyword[None] , identifier[tag] = keyword[None] , identifier[verbose] = keyword[False] ):
literal[string]
keyword[if] identifier[var_list] keyword[is] keyword[None] :
identifier[var_list] = identifier[tf] . identifier[trainable_variables] ()
keyword[if] identifier[tag] keyword[is] keyword[None] :
identifier[tag] = literal[string]
keyword[if] keyword[not] identifier[var_list] :
keyword[return]
identifier[name_to_var] ={ identifier[v] . identifier[name] : identifier[v] keyword[for] identifier[v] keyword[in] identifier[var_list] }
identifier[total_size] = literal[int]
keyword[for] identifier[v_name] keyword[in] identifier[sorted] ( identifier[list] ( identifier[name_to_var] )):
identifier[v] = identifier[name_to_var] [ identifier[v_name] ]
identifier[v_size] = identifier[int] ( identifier[np] . identifier[prod] ( identifier[np] . identifier[array] ( identifier[v] . identifier[shape] . identifier[as_list] ())))
keyword[if] identifier[verbose] :
identifier[tf] . identifier[logging] . identifier[info] ( literal[string] ,
identifier[v] . identifier[name] [:- literal[int] ]. identifier[ljust] ( literal[int] ),
identifier[str] ( identifier[v] . identifier[shape] ). identifier[ljust] ( literal[int] ), identifier[v_size] )
identifier[total_size] += identifier[v_size]
identifier[tf] . identifier[logging] . identifier[info] ( literal[string] , identifier[tag] , identifier[total_size] ) | def log_variable_sizes(var_list=None, tag=None, verbose=False):
"""Log the sizes and shapes of variables, and the total size.
Args:
var_list: a list of variables; defaults to trainable_variables
tag: a string; defaults to "Trainable Variables"
verbose: bool, if True, log every weight; otherwise, log total size only.
"""
if var_list is None:
var_list = tf.trainable_variables() # depends on [control=['if'], data=['var_list']]
if tag is None:
tag = 'Trainable Variables' # depends on [control=['if'], data=['tag']]
if not var_list:
return # depends on [control=['if'], data=[]]
name_to_var = {v.name: v for v in var_list}
total_size = 0
for v_name in sorted(list(name_to_var)):
v = name_to_var[v_name]
v_size = int(np.prod(np.array(v.shape.as_list())))
if verbose:
tf.logging.info('Weight %s\tshape %s\tsize %d', v.name[:-2].ljust(80), str(v.shape).ljust(20), v_size) # depends on [control=['if'], data=[]]
total_size += v_size # depends on [control=['for'], data=['v_name']]
tf.logging.info('%s Total size: %d', tag, total_size) |
def load(steps, reload=False):
"""
safely load steps in place, excluding those that fail
Args:
steps: the steps to load
"""
# work on collections by default for fewer isinstance() calls per call to load()
if reload:
_STEP_CACHE.clear()
if callable(steps):
steps = steps()
if not isinstance(steps, collections.Iterable):
return load([steps])[0]
loaded = []
for s in steps:
digest = s._digest
if digest in _STEP_CACHE:
loaded.append(_STEP_CACHE[digest])
else:
try:
s.load()
_STEP_CACHE[digest] = s
loaded.append(s)
except(Exception):
logging.warn('Error during step load:\n%s' %
util.indent(traceback.format_exc()))
return loaded | def function[load, parameter[steps, reload]]:
constant[
safely load steps in place, excluding those that fail
Args:
steps: the steps to load
]
if name[reload] begin[:]
call[name[_STEP_CACHE].clear, parameter[]]
if call[name[callable], parameter[name[steps]]] begin[:]
variable[steps] assign[=] call[name[steps], parameter[]]
if <ast.UnaryOp object at 0x7da1b242bdf0> begin[:]
return[call[call[name[load], parameter[list[[<ast.Name object at 0x7da1b2428670>]]]]][constant[0]]]
variable[loaded] assign[=] list[[]]
for taget[name[s]] in starred[name[steps]] begin[:]
variable[digest] assign[=] name[s]._digest
if compare[name[digest] in name[_STEP_CACHE]] begin[:]
call[name[loaded].append, parameter[call[name[_STEP_CACHE]][name[digest]]]]
return[name[loaded]] | keyword[def] identifier[load] ( identifier[steps] , identifier[reload] = keyword[False] ):
literal[string]
keyword[if] identifier[reload] :
identifier[_STEP_CACHE] . identifier[clear] ()
keyword[if] identifier[callable] ( identifier[steps] ):
identifier[steps] = identifier[steps] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[steps] , identifier[collections] . identifier[Iterable] ):
keyword[return] identifier[load] ([ identifier[steps] ])[ literal[int] ]
identifier[loaded] =[]
keyword[for] identifier[s] keyword[in] identifier[steps] :
identifier[digest] = identifier[s] . identifier[_digest]
keyword[if] identifier[digest] keyword[in] identifier[_STEP_CACHE] :
identifier[loaded] . identifier[append] ( identifier[_STEP_CACHE] [ identifier[digest] ])
keyword[else] :
keyword[try] :
identifier[s] . identifier[load] ()
identifier[_STEP_CACHE] [ identifier[digest] ]= identifier[s]
identifier[loaded] . identifier[append] ( identifier[s] )
keyword[except] ( identifier[Exception] ):
identifier[logging] . identifier[warn] ( literal[string] %
identifier[util] . identifier[indent] ( identifier[traceback] . identifier[format_exc] ()))
keyword[return] identifier[loaded] | def load(steps, reload=False):
"""
safely load steps in place, excluding those that fail
Args:
steps: the steps to load
"""
# work on collections by default for fewer isinstance() calls per call to load()
if reload:
_STEP_CACHE.clear() # depends on [control=['if'], data=[]]
if callable(steps):
steps = steps() # depends on [control=['if'], data=[]]
if not isinstance(steps, collections.Iterable):
return load([steps])[0] # depends on [control=['if'], data=[]]
loaded = []
for s in steps:
digest = s._digest
if digest in _STEP_CACHE:
loaded.append(_STEP_CACHE[digest]) # depends on [control=['if'], data=['digest', '_STEP_CACHE']]
else:
try:
s.load()
_STEP_CACHE[digest] = s
loaded.append(s) # depends on [control=['try'], data=[]]
except Exception:
logging.warn('Error during step load:\n%s' % util.indent(traceback.format_exc())) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['s']]
return loaded |
def get_compositions(self):
"""Gets the composition list resulting from a search.
return: (osid.repository.CompositionList) - the composition list
raise: IllegalState - the list has already been retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.retrieved:
raise errors.IllegalState('List has already been retrieved.')
self.retrieved = True
return objects.CompositionList(self._results, runtime=self._runtime) | def function[get_compositions, parameter[self]]:
constant[Gets the composition list resulting from a search.
return: (osid.repository.CompositionList) - the composition list
raise: IllegalState - the list has already been retrieved
*compliance: mandatory -- This method must be implemented.*
]
if name[self].retrieved begin[:]
<ast.Raise object at 0x7da20c7c9450>
name[self].retrieved assign[=] constant[True]
return[call[name[objects].CompositionList, parameter[name[self]._results]]] | keyword[def] identifier[get_compositions] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[retrieved] :
keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] )
identifier[self] . identifier[retrieved] = keyword[True]
keyword[return] identifier[objects] . identifier[CompositionList] ( identifier[self] . identifier[_results] , identifier[runtime] = identifier[self] . identifier[_runtime] ) | def get_compositions(self):
"""Gets the composition list resulting from a search.
return: (osid.repository.CompositionList) - the composition list
raise: IllegalState - the list has already been retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.retrieved:
raise errors.IllegalState('List has already been retrieved.') # depends on [control=['if'], data=[]]
self.retrieved = True
return objects.CompositionList(self._results, runtime=self._runtime) |
def end_of_chunk(prev_tag, tag, prev_type, type_):
"""Checks if a chunk ended between the previous and current word.
Args:
prev_tag: previous chunk tag.
tag: current chunk tag.
prev_type: previous type.
type_: current type.
Returns:
chunk_end: boolean.
"""
chunk_end = False
if prev_tag == 'E': chunk_end = True
if prev_tag == 'S': chunk_end = True
if prev_tag == 'B' and tag == 'B': chunk_end = True
if prev_tag == 'B' and tag == 'S': chunk_end = True
if prev_tag == 'B' and tag == 'O': chunk_end = True
if prev_tag == 'I' and tag == 'B': chunk_end = True
if prev_tag == 'I' and tag == 'S': chunk_end = True
if prev_tag == 'I' and tag == 'O': chunk_end = True
if prev_tag != 'O' and prev_tag != '.' and prev_type != type_:
chunk_end = True
return chunk_end | def function[end_of_chunk, parameter[prev_tag, tag, prev_type, type_]]:
constant[Checks if a chunk ended between the previous and current word.
Args:
prev_tag: previous chunk tag.
tag: current chunk tag.
prev_type: previous type.
type_: current type.
Returns:
chunk_end: boolean.
]
variable[chunk_end] assign[=] constant[False]
if compare[name[prev_tag] equal[==] constant[E]] begin[:]
variable[chunk_end] assign[=] constant[True]
if compare[name[prev_tag] equal[==] constant[S]] begin[:]
variable[chunk_end] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b07ce6e0> begin[:]
variable[chunk_end] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b07cf820> begin[:]
variable[chunk_end] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b07cc6d0> begin[:]
variable[chunk_end] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b07cf190> begin[:]
variable[chunk_end] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b07cd090> begin[:]
variable[chunk_end] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b07cc8b0> begin[:]
variable[chunk_end] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b07cc070> begin[:]
variable[chunk_end] assign[=] constant[True]
return[name[chunk_end]] | keyword[def] identifier[end_of_chunk] ( identifier[prev_tag] , identifier[tag] , identifier[prev_type] , identifier[type_] ):
literal[string]
identifier[chunk_end] = keyword[False]
keyword[if] identifier[prev_tag] == literal[string] : identifier[chunk_end] = keyword[True]
keyword[if] identifier[prev_tag] == literal[string] : identifier[chunk_end] = keyword[True]
keyword[if] identifier[prev_tag] == literal[string] keyword[and] identifier[tag] == literal[string] : identifier[chunk_end] = keyword[True]
keyword[if] identifier[prev_tag] == literal[string] keyword[and] identifier[tag] == literal[string] : identifier[chunk_end] = keyword[True]
keyword[if] identifier[prev_tag] == literal[string] keyword[and] identifier[tag] == literal[string] : identifier[chunk_end] = keyword[True]
keyword[if] identifier[prev_tag] == literal[string] keyword[and] identifier[tag] == literal[string] : identifier[chunk_end] = keyword[True]
keyword[if] identifier[prev_tag] == literal[string] keyword[and] identifier[tag] == literal[string] : identifier[chunk_end] = keyword[True]
keyword[if] identifier[prev_tag] == literal[string] keyword[and] identifier[tag] == literal[string] : identifier[chunk_end] = keyword[True]
keyword[if] identifier[prev_tag] != literal[string] keyword[and] identifier[prev_tag] != literal[string] keyword[and] identifier[prev_type] != identifier[type_] :
identifier[chunk_end] = keyword[True]
keyword[return] identifier[chunk_end] | def end_of_chunk(prev_tag, tag, prev_type, type_):
"""Checks if a chunk ended between the previous and current word.
Args:
prev_tag: previous chunk tag.
tag: current chunk tag.
prev_type: previous type.
type_: current type.
Returns:
chunk_end: boolean.
"""
chunk_end = False
if prev_tag == 'E':
chunk_end = True # depends on [control=['if'], data=[]]
if prev_tag == 'S':
chunk_end = True # depends on [control=['if'], data=[]]
if prev_tag == 'B' and tag == 'B':
chunk_end = True # depends on [control=['if'], data=[]]
if prev_tag == 'B' and tag == 'S':
chunk_end = True # depends on [control=['if'], data=[]]
if prev_tag == 'B' and tag == 'O':
chunk_end = True # depends on [control=['if'], data=[]]
if prev_tag == 'I' and tag == 'B':
chunk_end = True # depends on [control=['if'], data=[]]
if prev_tag == 'I' and tag == 'S':
chunk_end = True # depends on [control=['if'], data=[]]
if prev_tag == 'I' and tag == 'O':
chunk_end = True # depends on [control=['if'], data=[]]
if prev_tag != 'O' and prev_tag != '.' and (prev_type != type_):
chunk_end = True # depends on [control=['if'], data=[]]
return chunk_end |
def publish_pushdb_changes_to_remote_scm(self, pushdb_file, coordinate, tag_name, tag_message,
postscript=None):
"""Push pushdb changes to the remote scm repository, and then tag the commit if it succeeds."""
self._add_pushdb(pushdb_file)
self.commit_pushdb(coordinate, postscript=postscript)
self._push_and_tag_changes(
tag_name=tag_name,
tag_message='{message}{postscript}'.format(message=tag_message, postscript=postscript or '')
) | def function[publish_pushdb_changes_to_remote_scm, parameter[self, pushdb_file, coordinate, tag_name, tag_message, postscript]]:
constant[Push pushdb changes to the remote scm repository, and then tag the commit if it succeeds.]
call[name[self]._add_pushdb, parameter[name[pushdb_file]]]
call[name[self].commit_pushdb, parameter[name[coordinate]]]
call[name[self]._push_and_tag_changes, parameter[]] | keyword[def] identifier[publish_pushdb_changes_to_remote_scm] ( identifier[self] , identifier[pushdb_file] , identifier[coordinate] , identifier[tag_name] , identifier[tag_message] ,
identifier[postscript] = keyword[None] ):
literal[string]
identifier[self] . identifier[_add_pushdb] ( identifier[pushdb_file] )
identifier[self] . identifier[commit_pushdb] ( identifier[coordinate] , identifier[postscript] = identifier[postscript] )
identifier[self] . identifier[_push_and_tag_changes] (
identifier[tag_name] = identifier[tag_name] ,
identifier[tag_message] = literal[string] . identifier[format] ( identifier[message] = identifier[tag_message] , identifier[postscript] = identifier[postscript] keyword[or] literal[string] )
) | def publish_pushdb_changes_to_remote_scm(self, pushdb_file, coordinate, tag_name, tag_message, postscript=None):
"""Push pushdb changes to the remote scm repository, and then tag the commit if it succeeds."""
self._add_pushdb(pushdb_file)
self.commit_pushdb(coordinate, postscript=postscript)
self._push_and_tag_changes(tag_name=tag_name, tag_message='{message}{postscript}'.format(message=tag_message, postscript=postscript or '')) |
def get_highest_numeric_score(self):
"""Gets the highest number in a numeric grading system.
return: (decimal) - the highest number
raise: IllegalState - ``is_based_on_grades()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
if self.is_based_on_grades():
raise errors.IllegalState('This GradeSystem is based on grades')
if self._my_map['highestNumericScore'] is None:
return None
else:
return Decimal(str(self._my_map['highestNumericScore'])) | def function[get_highest_numeric_score, parameter[self]]:
constant[Gets the highest number in a numeric grading system.
return: (decimal) - the highest number
raise: IllegalState - ``is_based_on_grades()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
]
if call[name[self].is_based_on_grades, parameter[]] begin[:]
<ast.Raise object at 0x7da20c7c88b0>
if compare[call[name[self]._my_map][constant[highestNumericScore]] is constant[None]] begin[:]
return[constant[None]] | keyword[def] identifier[get_highest_numeric_score] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[is_based_on_grades] ():
keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] )
keyword[if] identifier[self] . identifier[_my_map] [ literal[string] ] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[else] :
keyword[return] identifier[Decimal] ( identifier[str] ( identifier[self] . identifier[_my_map] [ literal[string] ])) | def get_highest_numeric_score(self):
"""Gets the highest number in a numeric grading system.
return: (decimal) - the highest number
raise: IllegalState - ``is_based_on_grades()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
if self.is_based_on_grades():
raise errors.IllegalState('This GradeSystem is based on grades') # depends on [control=['if'], data=[]]
if self._my_map['highestNumericScore'] is None:
return None # depends on [control=['if'], data=[]]
else:
return Decimal(str(self._my_map['highestNumericScore'])) |
def lv_present(name,
vgname=None,
size=None,
extents=None,
snapshot=None,
pv='',
thinvolume=False,
thinpool=False,
force=False,
**kwargs):
'''
Create a new Logical Volume
name
The name of the Logical Volume
vgname
The name of the Volume Group on which the Logical Volume resides
size
The initial size of the Logical Volume
extents
The number of logical extents to allocate
snapshot
The name of the snapshot
pv
The Physical Volume to use
kwargs
Any supported options to lvcreate. See
:mod:`linux_lvm <salt.modules.linux_lvm>` for more details.
.. versionadded:: to_complete
thinvolume
Logical Volume is thinly provisioned
thinpool
Logical Volume is a thin pool
.. versionadded:: 2018.3.0
force
Assume yes to all prompts
'''
ret = {'changes': {},
'comment': '',
'name': name,
'result': True}
_snapshot = None
if snapshot:
_snapshot = name
name = snapshot
if thinvolume:
lvpath = '/dev/{0}/{1}'.format(vgname.split('/')[0], name)
else:
lvpath = '/dev/{0}/{1}'.format(vgname, name)
if __salt__['lvm.lvdisplay'](lvpath, quiet=True):
ret['comment'] = 'Logical Volume {0} already present'.format(name)
elif __opts__['test']:
ret['comment'] = 'Logical Volume {0} is set to be created'.format(name)
ret['result'] = None
return ret
else:
changes = __salt__['lvm.lvcreate'](name,
vgname,
size=size,
extents=extents,
snapshot=_snapshot,
pv=pv,
thinvolume=thinvolume,
thinpool=thinpool,
force=force,
**kwargs)
if __salt__['lvm.lvdisplay'](lvpath):
ret['comment'] = 'Created Logical Volume {0}'.format(name)
ret['changes']['created'] = changes
else:
ret['comment'] = 'Failed to create Logical Volume {0}. Error: {1}'.format(name, changes)
ret['result'] = False
return ret | def function[lv_present, parameter[name, vgname, size, extents, snapshot, pv, thinvolume, thinpool, force]]:
constant[
Create a new Logical Volume
name
The name of the Logical Volume
vgname
The name of the Volume Group on which the Logical Volume resides
size
The initial size of the Logical Volume
extents
The number of logical extents to allocate
snapshot
The name of the snapshot
pv
The Physical Volume to use
kwargs
Any supported options to lvcreate. See
:mod:`linux_lvm <salt.modules.linux_lvm>` for more details.
.. versionadded:: to_complete
thinvolume
Logical Volume is thinly provisioned
thinpool
Logical Volume is a thin pool
.. versionadded:: 2018.3.0
force
Assume yes to all prompts
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b20ed960>, <ast.Constant object at 0x7da1b20edc30>, <ast.Constant object at 0x7da1b20ec910>, <ast.Constant object at 0x7da1b20ecfa0>], [<ast.Dict object at 0x7da1b20ed9c0>, <ast.Constant object at 0x7da1b20eded0>, <ast.Name object at 0x7da1b20ed2d0>, <ast.Constant object at 0x7da1b20ec1c0>]]
variable[_snapshot] assign[=] constant[None]
if name[snapshot] begin[:]
variable[_snapshot] assign[=] name[name]
variable[name] assign[=] name[snapshot]
if name[thinvolume] begin[:]
variable[lvpath] assign[=] call[constant[/dev/{0}/{1}].format, parameter[call[call[name[vgname].split, parameter[constant[/]]]][constant[0]], name[name]]]
if call[call[name[__salt__]][constant[lvm.lvdisplay]], parameter[name[lvpath]]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Logical Volume {0} already present].format, parameter[name[name]]]
return[name[ret]] | keyword[def] identifier[lv_present] ( identifier[name] ,
identifier[vgname] = keyword[None] ,
identifier[size] = keyword[None] ,
identifier[extents] = keyword[None] ,
identifier[snapshot] = keyword[None] ,
identifier[pv] = literal[string] ,
identifier[thinvolume] = keyword[False] ,
identifier[thinpool] = keyword[False] ,
identifier[force] = keyword[False] ,
** identifier[kwargs] ):
literal[string]
identifier[ret] ={ literal[string] :{},
literal[string] : literal[string] ,
literal[string] : identifier[name] ,
literal[string] : keyword[True] }
identifier[_snapshot] = keyword[None]
keyword[if] identifier[snapshot] :
identifier[_snapshot] = identifier[name]
identifier[name] = identifier[snapshot]
keyword[if] identifier[thinvolume] :
identifier[lvpath] = literal[string] . identifier[format] ( identifier[vgname] . identifier[split] ( literal[string] )[ literal[int] ], identifier[name] )
keyword[else] :
identifier[lvpath] = literal[string] . identifier[format] ( identifier[vgname] , identifier[name] )
keyword[if] identifier[__salt__] [ literal[string] ]( identifier[lvpath] , identifier[quiet] = keyword[True] ):
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[elif] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]= keyword[None]
keyword[return] identifier[ret]
keyword[else] :
identifier[changes] = identifier[__salt__] [ literal[string] ]( identifier[name] ,
identifier[vgname] ,
identifier[size] = identifier[size] ,
identifier[extents] = identifier[extents] ,
identifier[snapshot] = identifier[_snapshot] ,
identifier[pv] = identifier[pv] ,
identifier[thinvolume] = identifier[thinvolume] ,
identifier[thinpool] = identifier[thinpool] ,
identifier[force] = identifier[force] ,
** identifier[kwargs] )
keyword[if] identifier[__salt__] [ literal[string] ]( identifier[lvpath] ):
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[changes]
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] , identifier[changes] )
identifier[ret] [ literal[string] ]= keyword[False]
keyword[return] identifier[ret] | def lv_present(name, vgname=None, size=None, extents=None, snapshot=None, pv='', thinvolume=False, thinpool=False, force=False, **kwargs):
"""
Create a new Logical Volume
name
The name of the Logical Volume
vgname
The name of the Volume Group on which the Logical Volume resides
size
The initial size of the Logical Volume
extents
The number of logical extents to allocate
snapshot
The name of the snapshot
pv
The Physical Volume to use
kwargs
Any supported options to lvcreate. See
:mod:`linux_lvm <salt.modules.linux_lvm>` for more details.
.. versionadded:: to_complete
thinvolume
Logical Volume is thinly provisioned
thinpool
Logical Volume is a thin pool
.. versionadded:: 2018.3.0
force
Assume yes to all prompts
"""
ret = {'changes': {}, 'comment': '', 'name': name, 'result': True}
_snapshot = None
if snapshot:
_snapshot = name
name = snapshot # depends on [control=['if'], data=[]]
if thinvolume:
lvpath = '/dev/{0}/{1}'.format(vgname.split('/')[0], name) # depends on [control=['if'], data=[]]
else:
lvpath = '/dev/{0}/{1}'.format(vgname, name)
if __salt__['lvm.lvdisplay'](lvpath, quiet=True):
ret['comment'] = 'Logical Volume {0} already present'.format(name) # depends on [control=['if'], data=[]]
elif __opts__['test']:
ret['comment'] = 'Logical Volume {0} is set to be created'.format(name)
ret['result'] = None
return ret # depends on [control=['if'], data=[]]
else:
changes = __salt__['lvm.lvcreate'](name, vgname, size=size, extents=extents, snapshot=_snapshot, pv=pv, thinvolume=thinvolume, thinpool=thinpool, force=force, **kwargs)
if __salt__['lvm.lvdisplay'](lvpath):
ret['comment'] = 'Created Logical Volume {0}'.format(name)
ret['changes']['created'] = changes # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Failed to create Logical Volume {0}. Error: {1}'.format(name, changes)
ret['result'] = False
return ret |
def dynamips_auto_idlepc(self):
"""
Compute the idle PC for a dynamips node
"""
return (yield from self._compute.get("/projects/{}/{}/nodes/{}/auto_idlepc".format(self._project.id, self._node_type, self._id), timeout=240)).json | def function[dynamips_auto_idlepc, parameter[self]]:
constant[
Compute the idle PC for a dynamips node
]
return[<ast.YieldFrom object at 0x7da204620b80>.json] | keyword[def] identifier[dynamips_auto_idlepc] ( identifier[self] ):
literal[string]
keyword[return] ( keyword[yield] keyword[from] identifier[self] . identifier[_compute] . identifier[get] ( literal[string] . identifier[format] ( identifier[self] . identifier[_project] . identifier[id] , identifier[self] . identifier[_node_type] , identifier[self] . identifier[_id] ), identifier[timeout] = literal[int] )). identifier[json] | def dynamips_auto_idlepc(self):
"""
Compute the idle PC for a dynamips node
"""
return (yield from self._compute.get('/projects/{}/{}/nodes/{}/auto_idlepc'.format(self._project.id, self._node_type, self._id), timeout=240)).json |
def sync_to_peers(peer_interface, user, paths=None, verbose=False, cmd=None,
gid=None, fatal=False):
"""Sync all hosts to an specific path
The type of group is integer, it allows user has permissions to
operate a directory have a different group id with the user id.
Propagates exception if any operation fails and fatal=True.
"""
if paths:
for host in collect_authed_hosts(peer_interface):
sync_to_peer(host, user, paths, verbose, cmd, gid, fatal) | def function[sync_to_peers, parameter[peer_interface, user, paths, verbose, cmd, gid, fatal]]:
constant[Sync all hosts to an specific path
The type of group is integer, it allows user has permissions to
operate a directory have a different group id with the user id.
Propagates exception if any operation fails and fatal=True.
]
if name[paths] begin[:]
for taget[name[host]] in starred[call[name[collect_authed_hosts], parameter[name[peer_interface]]]] begin[:]
call[name[sync_to_peer], parameter[name[host], name[user], name[paths], name[verbose], name[cmd], name[gid], name[fatal]]] | keyword[def] identifier[sync_to_peers] ( identifier[peer_interface] , identifier[user] , identifier[paths] = keyword[None] , identifier[verbose] = keyword[False] , identifier[cmd] = keyword[None] ,
identifier[gid] = keyword[None] , identifier[fatal] = keyword[False] ):
literal[string]
keyword[if] identifier[paths] :
keyword[for] identifier[host] keyword[in] identifier[collect_authed_hosts] ( identifier[peer_interface] ):
identifier[sync_to_peer] ( identifier[host] , identifier[user] , identifier[paths] , identifier[verbose] , identifier[cmd] , identifier[gid] , identifier[fatal] ) | def sync_to_peers(peer_interface, user, paths=None, verbose=False, cmd=None, gid=None, fatal=False):
"""Sync all hosts to an specific path
The type of group is integer, it allows user has permissions to
operate a directory have a different group id with the user id.
Propagates exception if any operation fails and fatal=True.
"""
if paths:
for host in collect_authed_hosts(peer_interface):
sync_to_peer(host, user, paths, verbose, cmd, gid, fatal) # depends on [control=['for'], data=['host']] # depends on [control=['if'], data=[]] |
def _send_to_group(self, group, **kwargs):
""" You shouldn't use this method directly.
Send a single command to specific group.
Handles automatically sending command to white or rgbw group.
"""
retries = kwargs.get("retries", self.repeat_commands)
for _ in range(retries):
if kwargs.get("send_on", True):
self.on(group)
if group is None or group == 0:
self._send_to_all_groups(**kwargs)
continue
if group < 1 or group > 4:
raise AttributeError("Group must be between 1 and 4 (was %s)" % group)
if kwargs.get("per_group"):
self._send_command(kwargs.get("%s_cmd" % self.get_group_type(group), [None, None, None, None])[group - 1])
continue
if self.get_group_type(group) == "white":
command = self.WHITE_COMMANDS.get(kwargs["command"])
elif self.get_group_type(group) == "rgbw":
if kwargs["command"] == "color_by_int":
command = (self.RGBW_COMMANDS["color_by_int"], struct.pack("B", kwargs["color"]))
else:
command = self.RGBW_COMMANDS.get(kwargs["command"])
self._send_command(command) | def function[_send_to_group, parameter[self, group]]:
constant[ You shouldn't use this method directly.
Send a single command to specific group.
Handles automatically sending command to white or rgbw group.
]
variable[retries] assign[=] call[name[kwargs].get, parameter[constant[retries], name[self].repeat_commands]]
for taget[name[_]] in starred[call[name[range], parameter[name[retries]]]] begin[:]
if call[name[kwargs].get, parameter[constant[send_on], constant[True]]] begin[:]
call[name[self].on, parameter[name[group]]]
if <ast.BoolOp object at 0x7da18ede6c50> begin[:]
call[name[self]._send_to_all_groups, parameter[]]
continue
if <ast.BoolOp object at 0x7da18ede4e80> begin[:]
<ast.Raise object at 0x7da18ede5f90>
if call[name[kwargs].get, parameter[constant[per_group]]] begin[:]
call[name[self]._send_command, parameter[call[call[name[kwargs].get, parameter[binary_operation[constant[%s_cmd] <ast.Mod object at 0x7da2590d6920> call[name[self].get_group_type, parameter[name[group]]]], list[[<ast.Constant object at 0x7da18f723ee0>, <ast.Constant object at 0x7da18f720f70>, <ast.Constant object at 0x7da18f7231f0>, <ast.Constant object at 0x7da18f723e50>]]]]][binary_operation[name[group] - constant[1]]]]]
continue
if compare[call[name[self].get_group_type, parameter[name[group]]] equal[==] constant[white]] begin[:]
variable[command] assign[=] call[name[self].WHITE_COMMANDS.get, parameter[call[name[kwargs]][constant[command]]]]
call[name[self]._send_command, parameter[name[command]]] | keyword[def] identifier[_send_to_group] ( identifier[self] , identifier[group] ,** identifier[kwargs] ):
literal[string]
identifier[retries] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[repeat_commands] )
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[retries] ):
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] ):
identifier[self] . identifier[on] ( identifier[group] )
keyword[if] identifier[group] keyword[is] keyword[None] keyword[or] identifier[group] == literal[int] :
identifier[self] . identifier[_send_to_all_groups] (** identifier[kwargs] )
keyword[continue]
keyword[if] identifier[group] < literal[int] keyword[or] identifier[group] > literal[int] :
keyword[raise] identifier[AttributeError] ( literal[string] % identifier[group] )
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
identifier[self] . identifier[_send_command] ( identifier[kwargs] . identifier[get] ( literal[string] % identifier[self] . identifier[get_group_type] ( identifier[group] ),[ keyword[None] , keyword[None] , keyword[None] , keyword[None] ])[ identifier[group] - literal[int] ])
keyword[continue]
keyword[if] identifier[self] . identifier[get_group_type] ( identifier[group] )== literal[string] :
identifier[command] = identifier[self] . identifier[WHITE_COMMANDS] . identifier[get] ( identifier[kwargs] [ literal[string] ])
keyword[elif] identifier[self] . identifier[get_group_type] ( identifier[group] )== literal[string] :
keyword[if] identifier[kwargs] [ literal[string] ]== literal[string] :
identifier[command] =( identifier[self] . identifier[RGBW_COMMANDS] [ literal[string] ], identifier[struct] . identifier[pack] ( literal[string] , identifier[kwargs] [ literal[string] ]))
keyword[else] :
identifier[command] = identifier[self] . identifier[RGBW_COMMANDS] . identifier[get] ( identifier[kwargs] [ literal[string] ])
identifier[self] . identifier[_send_command] ( identifier[command] ) | def _send_to_group(self, group, **kwargs):
""" You shouldn't use this method directly.
Send a single command to specific group.
Handles automatically sending command to white or rgbw group.
"""
retries = kwargs.get('retries', self.repeat_commands)
for _ in range(retries):
if kwargs.get('send_on', True):
self.on(group) # depends on [control=['if'], data=[]]
if group is None or group == 0:
self._send_to_all_groups(**kwargs)
continue # depends on [control=['if'], data=[]]
if group < 1 or group > 4:
raise AttributeError('Group must be between 1 and 4 (was %s)' % group) # depends on [control=['if'], data=[]]
if kwargs.get('per_group'):
self._send_command(kwargs.get('%s_cmd' % self.get_group_type(group), [None, None, None, None])[group - 1])
continue # depends on [control=['if'], data=[]]
if self.get_group_type(group) == 'white':
command = self.WHITE_COMMANDS.get(kwargs['command']) # depends on [control=['if'], data=[]]
elif self.get_group_type(group) == 'rgbw':
if kwargs['command'] == 'color_by_int':
command = (self.RGBW_COMMANDS['color_by_int'], struct.pack('B', kwargs['color'])) # depends on [control=['if'], data=[]]
else:
command = self.RGBW_COMMANDS.get(kwargs['command']) # depends on [control=['if'], data=[]]
self._send_command(command) # depends on [control=['for'], data=[]] |
def expected_hmm_logprob(pi_0, trans_matrix, stats):
"""
:param pi_0: initial distribution
:param trans_matrix: transition matrix
:param stats: tuple (E[z_t], \sum_t E[z_t z_{t+1}.T])
:return: E_{q(z)} [ log p(z) ]
"""
E_z, sum_E_ztztp1T, _ = stats
T, K = E_z.shape
assert sum_E_ztztp1T.shape == (K, K)
out = 0
out += np.dot(E_z[0], np.log(pi_0))
out += np.sum(sum_E_ztztp1T * np.log(trans_matrix))
return out | def function[expected_hmm_logprob, parameter[pi_0, trans_matrix, stats]]:
constant[
:param pi_0: initial distribution
:param trans_matrix: transition matrix
:param stats: tuple (E[z_t], \sum_t E[z_t z_{t+1}.T])
:return: E_{q(z)} [ log p(z) ]
]
<ast.Tuple object at 0x7da1b12656f0> assign[=] name[stats]
<ast.Tuple object at 0x7da1b1266620> assign[=] name[E_z].shape
assert[compare[name[sum_E_ztztp1T].shape equal[==] tuple[[<ast.Name object at 0x7da1b1266710>, <ast.Name object at 0x7da1b1266110>]]]]
variable[out] assign[=] constant[0]
<ast.AugAssign object at 0x7da1b12643d0>
<ast.AugAssign object at 0x7da1b1265b70>
return[name[out]] | keyword[def] identifier[expected_hmm_logprob] ( identifier[pi_0] , identifier[trans_matrix] , identifier[stats] ):
literal[string]
identifier[E_z] , identifier[sum_E_ztztp1T] , identifier[_] = identifier[stats]
identifier[T] , identifier[K] = identifier[E_z] . identifier[shape]
keyword[assert] identifier[sum_E_ztztp1T] . identifier[shape] ==( identifier[K] , identifier[K] )
identifier[out] = literal[int]
identifier[out] += identifier[np] . identifier[dot] ( identifier[E_z] [ literal[int] ], identifier[np] . identifier[log] ( identifier[pi_0] ))
identifier[out] += identifier[np] . identifier[sum] ( identifier[sum_E_ztztp1T] * identifier[np] . identifier[log] ( identifier[trans_matrix] ))
keyword[return] identifier[out] | def expected_hmm_logprob(pi_0, trans_matrix, stats):
"""
:param pi_0: initial distribution
:param trans_matrix: transition matrix
:param stats: tuple (E[z_t], \\sum_t E[z_t z_{t+1}.T])
:return: E_{q(z)} [ log p(z) ]
"""
(E_z, sum_E_ztztp1T, _) = stats
(T, K) = E_z.shape
assert sum_E_ztztp1T.shape == (K, K)
out = 0
out += np.dot(E_z[0], np.log(pi_0))
out += np.sum(sum_E_ztztp1T * np.log(trans_matrix))
return out |
def add_optional_parameters(detail_json, detail, rating, rating_n, popularity, current_popularity, time_spent):
"""
check for optional return parameters and add them to the result json
:param detail_json:
:param detail:
:param rating:
:param rating_n:
:param popularity:
:param current_popularity:
:param time_spent:
:return:
"""
if rating is not None:
detail_json["rating"] = rating
elif "rating" in detail:
detail_json["rating"] = detail["rating"]
if rating_n is not None:
detail_json["rating_n"] = rating_n
if "international_phone_number" in detail:
detail_json["international_phone_number"] = detail["international_phone_number"]
if current_popularity is not None:
detail_json["current_popularity"] = current_popularity
if popularity is not None:
popularity, wait_times = get_popularity_for_day(popularity)
detail_json["populartimes"] = popularity
detail_json["time_wait"] = wait_times
if time_spent is not None:
detail_json["time_spent"] = time_spent
return detail_json | def function[add_optional_parameters, parameter[detail_json, detail, rating, rating_n, popularity, current_popularity, time_spent]]:
constant[
check for optional return parameters and add them to the result json
:param detail_json:
:param detail:
:param rating:
:param rating_n:
:param popularity:
:param current_popularity:
:param time_spent:
:return:
]
if compare[name[rating] is_not constant[None]] begin[:]
call[name[detail_json]][constant[rating]] assign[=] name[rating]
if compare[name[rating_n] is_not constant[None]] begin[:]
call[name[detail_json]][constant[rating_n]] assign[=] name[rating_n]
if compare[constant[international_phone_number] in name[detail]] begin[:]
call[name[detail_json]][constant[international_phone_number]] assign[=] call[name[detail]][constant[international_phone_number]]
if compare[name[current_popularity] is_not constant[None]] begin[:]
call[name[detail_json]][constant[current_popularity]] assign[=] name[current_popularity]
if compare[name[popularity] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da18dc06bf0> assign[=] call[name[get_popularity_for_day], parameter[name[popularity]]]
call[name[detail_json]][constant[populartimes]] assign[=] name[popularity]
call[name[detail_json]][constant[time_wait]] assign[=] name[wait_times]
if compare[name[time_spent] is_not constant[None]] begin[:]
call[name[detail_json]][constant[time_spent]] assign[=] name[time_spent]
return[name[detail_json]] | keyword[def] identifier[add_optional_parameters] ( identifier[detail_json] , identifier[detail] , identifier[rating] , identifier[rating_n] , identifier[popularity] , identifier[current_popularity] , identifier[time_spent] ):
literal[string]
keyword[if] identifier[rating] keyword[is] keyword[not] keyword[None] :
identifier[detail_json] [ literal[string] ]= identifier[rating]
keyword[elif] literal[string] keyword[in] identifier[detail] :
identifier[detail_json] [ literal[string] ]= identifier[detail] [ literal[string] ]
keyword[if] identifier[rating_n] keyword[is] keyword[not] keyword[None] :
identifier[detail_json] [ literal[string] ]= identifier[rating_n]
keyword[if] literal[string] keyword[in] identifier[detail] :
identifier[detail_json] [ literal[string] ]= identifier[detail] [ literal[string] ]
keyword[if] identifier[current_popularity] keyword[is] keyword[not] keyword[None] :
identifier[detail_json] [ literal[string] ]= identifier[current_popularity]
keyword[if] identifier[popularity] keyword[is] keyword[not] keyword[None] :
identifier[popularity] , identifier[wait_times] = identifier[get_popularity_for_day] ( identifier[popularity] )
identifier[detail_json] [ literal[string] ]= identifier[popularity]
identifier[detail_json] [ literal[string] ]= identifier[wait_times]
keyword[if] identifier[time_spent] keyword[is] keyword[not] keyword[None] :
identifier[detail_json] [ literal[string] ]= identifier[time_spent]
keyword[return] identifier[detail_json] | def add_optional_parameters(detail_json, detail, rating, rating_n, popularity, current_popularity, time_spent):
"""
check for optional return parameters and add them to the result json
:param detail_json:
:param detail:
:param rating:
:param rating_n:
:param popularity:
:param current_popularity:
:param time_spent:
:return:
"""
if rating is not None:
detail_json['rating'] = rating # depends on [control=['if'], data=['rating']]
elif 'rating' in detail:
detail_json['rating'] = detail['rating'] # depends on [control=['if'], data=['detail']]
if rating_n is not None:
detail_json['rating_n'] = rating_n # depends on [control=['if'], data=['rating_n']]
if 'international_phone_number' in detail:
detail_json['international_phone_number'] = detail['international_phone_number'] # depends on [control=['if'], data=['detail']]
if current_popularity is not None:
detail_json['current_popularity'] = current_popularity # depends on [control=['if'], data=['current_popularity']]
if popularity is not None:
(popularity, wait_times) = get_popularity_for_day(popularity)
detail_json['populartimes'] = popularity
detail_json['time_wait'] = wait_times # depends on [control=['if'], data=['popularity']]
if time_spent is not None:
detail_json['time_spent'] = time_spent # depends on [control=['if'], data=['time_spent']]
return detail_json |
def getWorker(self, name):
"""
Retrieve the Worker registered under the given name.
If the given name does not exists in the Worker list, an Exception is raised.
Parameters
----------
name: string
Name of the Worker to retrieve
"""
if not name in self.worker_list:
self.logger.error("Worker {0} is not registered!".format(name))
raise Exception("Worker {0} is not registered!".format(name))
return self.worker_list[name] | def function[getWorker, parameter[self, name]]:
constant[
Retrieve the Worker registered under the given name.
If the given name does not exists in the Worker list, an Exception is raised.
Parameters
----------
name: string
Name of the Worker to retrieve
]
if <ast.UnaryOp object at 0x7da1b1a1e710> begin[:]
call[name[self].logger.error, parameter[call[constant[Worker {0} is not registered!].format, parameter[name[name]]]]]
<ast.Raise object at 0x7da1b1a1d810>
return[call[name[self].worker_list][name[name]]] | keyword[def] identifier[getWorker] ( identifier[self] , identifier[name] ):
literal[string]
keyword[if] keyword[not] identifier[name] keyword[in] identifier[self] . identifier[worker_list] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[return] identifier[self] . identifier[worker_list] [ identifier[name] ] | def getWorker(self, name):
"""
Retrieve the Worker registered under the given name.
If the given name does not exists in the Worker list, an Exception is raised.
Parameters
----------
name: string
Name of the Worker to retrieve
"""
if not name in self.worker_list:
self.logger.error('Worker {0} is not registered!'.format(name))
raise Exception('Worker {0} is not registered!'.format(name)) # depends on [control=['if'], data=[]]
return self.worker_list[name] |
def setup_common_actions(self):
"""Setup context menu common actions"""
# Filters
filters_action = create_action(self, _("Edit filename filters..."),
None, ima.icon('filter'),
triggered=self.edit_filter)
# Show all files
all_action = create_action(self, _("Show all files"),
toggled=self.toggle_all)
all_action.setChecked(self.show_all)
self.toggle_all(self.show_all)
# Show all files
single_click_to_open = create_action(
self,
_("Single click to open"),
toggled=self.set_single_click_to_open,
)
single_click_to_open.setChecked(self.single_click_to_open)
return [filters_action, all_action, single_click_to_open] | def function[setup_common_actions, parameter[self]]:
constant[Setup context menu common actions]
variable[filters_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Edit filename filters...]]], constant[None], call[name[ima].icon, parameter[constant[filter]]]]]
variable[all_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Show all files]]]]]
call[name[all_action].setChecked, parameter[name[self].show_all]]
call[name[self].toggle_all, parameter[name[self].show_all]]
variable[single_click_to_open] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Single click to open]]]]]
call[name[single_click_to_open].setChecked, parameter[name[self].single_click_to_open]]
return[list[[<ast.Name object at 0x7da18f09f8e0>, <ast.Name object at 0x7da18f09cf70>, <ast.Name object at 0x7da18f09f310>]]] | keyword[def] identifier[setup_common_actions] ( identifier[self] ):
literal[string]
identifier[filters_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ),
keyword[None] , identifier[ima] . identifier[icon] ( literal[string] ),
identifier[triggered] = identifier[self] . identifier[edit_filter] )
identifier[all_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ),
identifier[toggled] = identifier[self] . identifier[toggle_all] )
identifier[all_action] . identifier[setChecked] ( identifier[self] . identifier[show_all] )
identifier[self] . identifier[toggle_all] ( identifier[self] . identifier[show_all] )
identifier[single_click_to_open] = identifier[create_action] (
identifier[self] ,
identifier[_] ( literal[string] ),
identifier[toggled] = identifier[self] . identifier[set_single_click_to_open] ,
)
identifier[single_click_to_open] . identifier[setChecked] ( identifier[self] . identifier[single_click_to_open] )
keyword[return] [ identifier[filters_action] , identifier[all_action] , identifier[single_click_to_open] ] | def setup_common_actions(self):
"""Setup context menu common actions""" # Filters
filters_action = create_action(self, _('Edit filename filters...'), None, ima.icon('filter'), triggered=self.edit_filter) # Show all files
all_action = create_action(self, _('Show all files'), toggled=self.toggle_all)
all_action.setChecked(self.show_all)
self.toggle_all(self.show_all) # Show all files
single_click_to_open = create_action(self, _('Single click to open'), toggled=self.set_single_click_to_open)
single_click_to_open.setChecked(self.single_click_to_open)
return [filters_action, all_action, single_click_to_open] |
def _get_version(prog, path):
'''Given a program name and expected path, tries to determine its version.
Returns tuple (bool, version). First element True iff found version ok.
Second element is version string (if found), otherwise an error message'''
assert prog in prog_to_version_cmd
args, regex = prog_to_version_cmd[prog]
cmd = path + ' ' + args
if prog == 'spades':
cmd_output = subprocess.Popen(['python3', path, args], shell=False, stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
else:
cmd_output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
cmd_output = common.decode(cmd_output[0]).split('\n')[:-1] + common.decode(cmd_output[1]).split('\n')[:-1]
for line in cmd_output:
hits = regex.search(line)
if hits:
return True, hits.group(1)
return False, 'I tried to get the version of ' + prog + ' with: "' + cmd + '" and the output didn\'t match this regular expression: "' + regex.pattern + '"' | def function[_get_version, parameter[prog, path]]:
constant[Given a program name and expected path, tries to determine its version.
Returns tuple (bool, version). First element True iff found version ok.
Second element is version string (if found), otherwise an error message]
assert[compare[name[prog] in name[prog_to_version_cmd]]]
<ast.Tuple object at 0x7da2044c30a0> assign[=] call[name[prog_to_version_cmd]][name[prog]]
variable[cmd] assign[=] binary_operation[binary_operation[name[path] + constant[ ]] + name[args]]
if compare[name[prog] equal[==] constant[spades]] begin[:]
variable[cmd_output] assign[=] call[call[name[subprocess].Popen, parameter[list[[<ast.Constant object at 0x7da2044c2980>, <ast.Name object at 0x7da2044c2b00>, <ast.Name object at 0x7da2044c04c0>]]]].communicate, parameter[]]
variable[cmd_output] assign[=] binary_operation[call[call[call[name[common].decode, parameter[call[name[cmd_output]][constant[0]]]].split, parameter[constant[
]]]][<ast.Slice object at 0x7da204960ca0>] + call[call[call[name[common].decode, parameter[call[name[cmd_output]][constant[1]]]].split, parameter[constant[
]]]][<ast.Slice object at 0x7da2049614b0>]]
for taget[name[line]] in starred[name[cmd_output]] begin[:]
variable[hits] assign[=] call[name[regex].search, parameter[name[line]]]
if name[hits] begin[:]
return[tuple[[<ast.Constant object at 0x7da204963e50>, <ast.Call object at 0x7da204963610>]]]
return[tuple[[<ast.Constant object at 0x7da2044c06d0>, <ast.BinOp object at 0x7da2044c3880>]]] | keyword[def] identifier[_get_version] ( identifier[prog] , identifier[path] ):
literal[string]
keyword[assert] identifier[prog] keyword[in] identifier[prog_to_version_cmd]
identifier[args] , identifier[regex] = identifier[prog_to_version_cmd] [ identifier[prog] ]
identifier[cmd] = identifier[path] + literal[string] + identifier[args]
keyword[if] identifier[prog] == literal[string] :
identifier[cmd_output] = identifier[subprocess] . identifier[Popen] ([ literal[string] , identifier[path] , identifier[args] ], identifier[shell] = keyword[False] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] ). identifier[communicate] ()
keyword[else] :
identifier[cmd_output] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[shell] = keyword[True] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[PIPE] ). identifier[communicate] ()
identifier[cmd_output] = identifier[common] . identifier[decode] ( identifier[cmd_output] [ literal[int] ]). identifier[split] ( literal[string] )[:- literal[int] ]+ identifier[common] . identifier[decode] ( identifier[cmd_output] [ literal[int] ]). identifier[split] ( literal[string] )[:- literal[int] ]
keyword[for] identifier[line] keyword[in] identifier[cmd_output] :
identifier[hits] = identifier[regex] . identifier[search] ( identifier[line] )
keyword[if] identifier[hits] :
keyword[return] keyword[True] , identifier[hits] . identifier[group] ( literal[int] )
keyword[return] keyword[False] , literal[string] + identifier[prog] + literal[string] + identifier[cmd] + literal[string] + identifier[regex] . identifier[pattern] + literal[string] | def _get_version(prog, path):
"""Given a program name and expected path, tries to determine its version.
Returns tuple (bool, version). First element True iff found version ok.
Second element is version string (if found), otherwise an error message"""
assert prog in prog_to_version_cmd
(args, regex) = prog_to_version_cmd[prog]
cmd = path + ' ' + args
if prog == 'spades':
cmd_output = subprocess.Popen(['python3', path, args], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() # depends on [control=['if'], data=[]]
else:
cmd_output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
cmd_output = common.decode(cmd_output[0]).split('\n')[:-1] + common.decode(cmd_output[1]).split('\n')[:-1]
for line in cmd_output:
hits = regex.search(line)
if hits:
return (True, hits.group(1)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return (False, 'I tried to get the version of ' + prog + ' with: "' + cmd + '" and the output didn\'t match this regular expression: "' + regex.pattern + '"') |
def is_scalar(self, typ: Type = _Any) -> bool:
"""Returns True iff this represents a scalar node.
If a type is given, checks that the ScalarNode represents this \
type. Type may be `str`, `int`, `float`, `bool`, or `None`.
If no type is given, any ScalarNode will return True.
"""
if isinstance(self.yaml_node, yaml.ScalarNode):
if typ != _Any and typ in scalar_type_to_tag:
if typ is None:
typ = type(None)
return self.yaml_node.tag == scalar_type_to_tag[typ]
if typ is _Any:
return True
raise ValueError('Invalid scalar type passed to is_scalar()')
return False | def function[is_scalar, parameter[self, typ]]:
constant[Returns True iff this represents a scalar node.
If a type is given, checks that the ScalarNode represents this type. Type may be `str`, `int`, `float`, `bool`, or `None`.
If no type is given, any ScalarNode will return True.
]
if call[name[isinstance], parameter[name[self].yaml_node, name[yaml].ScalarNode]] begin[:]
if <ast.BoolOp object at 0x7da18eb57970> begin[:]
if compare[name[typ] is constant[None]] begin[:]
variable[typ] assign[=] call[name[type], parameter[constant[None]]]
return[compare[name[self].yaml_node.tag equal[==] call[name[scalar_type_to_tag]][name[typ]]]]
if compare[name[typ] is name[_Any]] begin[:]
return[constant[True]]
<ast.Raise object at 0x7da18eb55270>
return[constant[False]] | keyword[def] identifier[is_scalar] ( identifier[self] , identifier[typ] : identifier[Type] = identifier[_Any] )-> identifier[bool] :
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[yaml_node] , identifier[yaml] . identifier[ScalarNode] ):
keyword[if] identifier[typ] != identifier[_Any] keyword[and] identifier[typ] keyword[in] identifier[scalar_type_to_tag] :
keyword[if] identifier[typ] keyword[is] keyword[None] :
identifier[typ] = identifier[type] ( keyword[None] )
keyword[return] identifier[self] . identifier[yaml_node] . identifier[tag] == identifier[scalar_type_to_tag] [ identifier[typ] ]
keyword[if] identifier[typ] keyword[is] identifier[_Any] :
keyword[return] keyword[True]
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] keyword[False] | def is_scalar(self, typ: Type=_Any) -> bool:
"""Returns True iff this represents a scalar node.
If a type is given, checks that the ScalarNode represents this type. Type may be `str`, `int`, `float`, `bool`, or `None`.
If no type is given, any ScalarNode will return True.
"""
if isinstance(self.yaml_node, yaml.ScalarNode):
if typ != _Any and typ in scalar_type_to_tag:
if typ is None:
typ = type(None) # depends on [control=['if'], data=['typ']]
return self.yaml_node.tag == scalar_type_to_tag[typ] # depends on [control=['if'], data=[]]
if typ is _Any:
return True # depends on [control=['if'], data=[]]
raise ValueError('Invalid scalar type passed to is_scalar()') # depends on [control=['if'], data=[]]
return False |
def create_object(self, data, view_kwargs):
"""Create an object through sqlalchemy
:param dict data: the data validated by marshmallow
:param dict view_kwargs: kwargs from the resource view
:return DeclarativeMeta: an object from sqlalchemy
"""
self.before_create_object(data, view_kwargs)
relationship_fields = get_relationships(self.resource.schema, model_field=True)
nested_fields = get_nested_fields(self.resource.schema, model_field=True)
join_fields = relationship_fields + nested_fields
obj = self.model(**{key: value
for (key, value) in data.items() if key not in join_fields})
self.apply_relationships(data, obj)
self.apply_nested_fields(data, obj)
self.session.add(obj)
try:
self.session.commit()
except JsonApiException as e:
self.session.rollback()
raise e
except Exception as e:
self.session.rollback()
raise JsonApiException("Object creation error: " + str(e), source={'pointer': '/data'})
self.after_create_object(obj, data, view_kwargs)
return obj | def function[create_object, parameter[self, data, view_kwargs]]:
constant[Create an object through sqlalchemy
:param dict data: the data validated by marshmallow
:param dict view_kwargs: kwargs from the resource view
:return DeclarativeMeta: an object from sqlalchemy
]
call[name[self].before_create_object, parameter[name[data], name[view_kwargs]]]
variable[relationship_fields] assign[=] call[name[get_relationships], parameter[name[self].resource.schema]]
variable[nested_fields] assign[=] call[name[get_nested_fields], parameter[name[self].resource.schema]]
variable[join_fields] assign[=] binary_operation[name[relationship_fields] + name[nested_fields]]
variable[obj] assign[=] call[name[self].model, parameter[]]
call[name[self].apply_relationships, parameter[name[data], name[obj]]]
call[name[self].apply_nested_fields, parameter[name[data], name[obj]]]
call[name[self].session.add, parameter[name[obj]]]
<ast.Try object at 0x7da1b17f9ed0>
call[name[self].after_create_object, parameter[name[obj], name[data], name[view_kwargs]]]
return[name[obj]] | keyword[def] identifier[create_object] ( identifier[self] , identifier[data] , identifier[view_kwargs] ):
literal[string]
identifier[self] . identifier[before_create_object] ( identifier[data] , identifier[view_kwargs] )
identifier[relationship_fields] = identifier[get_relationships] ( identifier[self] . identifier[resource] . identifier[schema] , identifier[model_field] = keyword[True] )
identifier[nested_fields] = identifier[get_nested_fields] ( identifier[self] . identifier[resource] . identifier[schema] , identifier[model_field] = keyword[True] )
identifier[join_fields] = identifier[relationship_fields] + identifier[nested_fields]
identifier[obj] = identifier[self] . identifier[model] (**{ identifier[key] : identifier[value]
keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[data] . identifier[items] () keyword[if] identifier[key] keyword[not] keyword[in] identifier[join_fields] })
identifier[self] . identifier[apply_relationships] ( identifier[data] , identifier[obj] )
identifier[self] . identifier[apply_nested_fields] ( identifier[data] , identifier[obj] )
identifier[self] . identifier[session] . identifier[add] ( identifier[obj] )
keyword[try] :
identifier[self] . identifier[session] . identifier[commit] ()
keyword[except] identifier[JsonApiException] keyword[as] identifier[e] :
identifier[self] . identifier[session] . identifier[rollback] ()
keyword[raise] identifier[e]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[session] . identifier[rollback] ()
keyword[raise] identifier[JsonApiException] ( literal[string] + identifier[str] ( identifier[e] ), identifier[source] ={ literal[string] : literal[string] })
identifier[self] . identifier[after_create_object] ( identifier[obj] , identifier[data] , identifier[view_kwargs] )
keyword[return] identifier[obj] | def create_object(self, data, view_kwargs):
"""Create an object through sqlalchemy
:param dict data: the data validated by marshmallow
:param dict view_kwargs: kwargs from the resource view
:return DeclarativeMeta: an object from sqlalchemy
"""
self.before_create_object(data, view_kwargs)
relationship_fields = get_relationships(self.resource.schema, model_field=True)
nested_fields = get_nested_fields(self.resource.schema, model_field=True)
join_fields = relationship_fields + nested_fields
obj = self.model(**{key: value for (key, value) in data.items() if key not in join_fields})
self.apply_relationships(data, obj)
self.apply_nested_fields(data, obj)
self.session.add(obj)
try:
self.session.commit() # depends on [control=['try'], data=[]]
except JsonApiException as e:
self.session.rollback()
raise e # depends on [control=['except'], data=['e']]
except Exception as e:
self.session.rollback()
raise JsonApiException('Object creation error: ' + str(e), source={'pointer': '/data'}) # depends on [control=['except'], data=['e']]
self.after_create_object(obj, data, view_kwargs)
return obj |
def put_event_multi_touch_string(self, count, contacts, scan_time):
""":py:func:`put_event_multi_touch`
in count of type int
:py:func:`put_event_multi_touch`
in contacts of type str
Contains information about all contacts:
"id1,x1,y1,inContact1,inRange1;...;idN,xN,yN,inContactN,inRangeN".
For example for two contacts: "0,10,20,1,1;1,30,40,1,1"
in scan_time of type int
:py:func:`put_event_multi_touch`
"""
if not isinstance(count, baseinteger):
raise TypeError("count can only be an instance of type baseinteger")
if not isinstance(contacts, basestring):
raise TypeError("contacts can only be an instance of type basestring")
if not isinstance(scan_time, baseinteger):
raise TypeError("scan_time can only be an instance of type baseinteger")
self._call("putEventMultiTouchString",
in_p=[count, contacts, scan_time]) | def function[put_event_multi_touch_string, parameter[self, count, contacts, scan_time]]:
constant[:py:func:`put_event_multi_touch`
in count of type int
:py:func:`put_event_multi_touch`
in contacts of type str
Contains information about all contacts:
"id1,x1,y1,inContact1,inRange1;...;idN,xN,yN,inContactN,inRangeN".
For example for two contacts: "0,10,20,1,1;1,30,40,1,1"
in scan_time of type int
:py:func:`put_event_multi_touch`
]
if <ast.UnaryOp object at 0x7da20e9b1330> begin[:]
<ast.Raise object at 0x7da20e9b2560>
if <ast.UnaryOp object at 0x7da20e9b0100> begin[:]
<ast.Raise object at 0x7da20e9b1270>
if <ast.UnaryOp object at 0x7da20e9b09a0> begin[:]
<ast.Raise object at 0x7da20e9b0970>
call[name[self]._call, parameter[constant[putEventMultiTouchString]]] | keyword[def] identifier[put_event_multi_touch_string] ( identifier[self] , identifier[count] , identifier[contacts] , identifier[scan_time] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[count] , identifier[baseinteger] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[contacts] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[scan_time] , identifier[baseinteger] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[count] , identifier[contacts] , identifier[scan_time] ]) | def put_event_multi_touch_string(self, count, contacts, scan_time):
""":py:func:`put_event_multi_touch`
in count of type int
:py:func:`put_event_multi_touch`
in contacts of type str
Contains information about all contacts:
"id1,x1,y1,inContact1,inRange1;...;idN,xN,yN,inContactN,inRangeN".
For example for two contacts: "0,10,20,1,1;1,30,40,1,1"
in scan_time of type int
:py:func:`put_event_multi_touch`
"""
if not isinstance(count, baseinteger):
raise TypeError('count can only be an instance of type baseinteger') # depends on [control=['if'], data=[]]
if not isinstance(contacts, basestring):
raise TypeError('contacts can only be an instance of type basestring') # depends on [control=['if'], data=[]]
if not isinstance(scan_time, baseinteger):
raise TypeError('scan_time can only be an instance of type baseinteger') # depends on [control=['if'], data=[]]
self._call('putEventMultiTouchString', in_p=[count, contacts, scan_time]) |
def clone(self, substitutions, commit=True, **kwargs):
"""
Clone a DAG, optionally skipping the commit.
"""
return self.store.clone(substitutions, **kwargs) | def function[clone, parameter[self, substitutions, commit]]:
constant[
Clone a DAG, optionally skipping the commit.
]
return[call[name[self].store.clone, parameter[name[substitutions]]]] | keyword[def] identifier[clone] ( identifier[self] , identifier[substitutions] , identifier[commit] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[store] . identifier[clone] ( identifier[substitutions] ,** identifier[kwargs] ) | def clone(self, substitutions, commit=True, **kwargs):
"""
Clone a DAG, optionally skipping the commit.
"""
return self.store.clone(substitutions, **kwargs) |
def batch_run_many(player, positions, batch_size=100):
"""Used to avoid a memory oveflow issue when running the network
on too many positions. TODO: This should be a member function of
player.network?"""
prob_list = []
value_list = []
for idx in range(0, len(positions), batch_size):
probs, values = player.network.run_many(positions[idx:idx + batch_size])
prob_list.append(probs)
value_list.append(values)
return np.concatenate(prob_list, axis=0), np.concatenate(value_list, axis=0) | def function[batch_run_many, parameter[player, positions, batch_size]]:
constant[Used to avoid a memory oveflow issue when running the network
on too many positions. TODO: This should be a member function of
player.network?]
variable[prob_list] assign[=] list[[]]
variable[value_list] assign[=] list[[]]
for taget[name[idx]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[positions]]], name[batch_size]]]] begin[:]
<ast.Tuple object at 0x7da18bcc9ed0> assign[=] call[name[player].network.run_many, parameter[call[name[positions]][<ast.Slice object at 0x7da18bcca320>]]]
call[name[prob_list].append, parameter[name[probs]]]
call[name[value_list].append, parameter[name[values]]]
return[tuple[[<ast.Call object at 0x7da18bcc9990>, <ast.Call object at 0x7da18bcca0e0>]]] | keyword[def] identifier[batch_run_many] ( identifier[player] , identifier[positions] , identifier[batch_size] = literal[int] ):
literal[string]
identifier[prob_list] =[]
identifier[value_list] =[]
keyword[for] identifier[idx] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[positions] ), identifier[batch_size] ):
identifier[probs] , identifier[values] = identifier[player] . identifier[network] . identifier[run_many] ( identifier[positions] [ identifier[idx] : identifier[idx] + identifier[batch_size] ])
identifier[prob_list] . identifier[append] ( identifier[probs] )
identifier[value_list] . identifier[append] ( identifier[values] )
keyword[return] identifier[np] . identifier[concatenate] ( identifier[prob_list] , identifier[axis] = literal[int] ), identifier[np] . identifier[concatenate] ( identifier[value_list] , identifier[axis] = literal[int] ) | def batch_run_many(player, positions, batch_size=100):
"""Used to avoid a memory oveflow issue when running the network
on too many positions. TODO: This should be a member function of
player.network?"""
prob_list = []
value_list = []
for idx in range(0, len(positions), batch_size):
(probs, values) = player.network.run_many(positions[idx:idx + batch_size])
prob_list.append(probs)
value_list.append(values) # depends on [control=['for'], data=['idx']]
return (np.concatenate(prob_list, axis=0), np.concatenate(value_list, axis=0)) |
def publish(self, payload, **kwargs):
""" Publish a message.
"""
publish_kwargs = self.publish_kwargs.copy()
# merge headers from when the publisher was instantiated
# with any provided now; "extra" headers always win
headers = publish_kwargs.pop('headers', {}).copy()
headers.update(kwargs.pop('headers', {}))
headers.update(kwargs.pop('extra_headers', {}))
use_confirms = kwargs.pop('use_confirms', self.use_confirms)
transport_options = kwargs.pop('transport_options',
self.transport_options
)
transport_options['confirm_publish'] = use_confirms
delivery_mode = kwargs.pop('delivery_mode', self.delivery_mode)
mandatory = kwargs.pop('mandatory', self.mandatory)
priority = kwargs.pop('priority', self.priority)
expiration = kwargs.pop('expiration', self.expiration)
serializer = kwargs.pop('serializer', self.serializer)
compression = kwargs.pop('compression', self.compression)
retry = kwargs.pop('retry', self.retry)
retry_policy = kwargs.pop('retry_policy', self.retry_policy)
declare = self.declare[:]
declare.extend(kwargs.pop('declare', ()))
publish_kwargs.update(kwargs) # remaining publish-time kwargs win
with get_producer(self.amqp_uri,
use_confirms,
self.ssl,
transport_options,
) as producer:
try:
producer.publish(
payload,
headers=headers,
delivery_mode=delivery_mode,
mandatory=mandatory,
priority=priority,
expiration=expiration,
compression=compression,
declare=declare,
retry=retry,
retry_policy=retry_policy,
serializer=serializer,
**publish_kwargs
)
except ChannelError as exc:
if "NO_ROUTE" in str(exc):
raise UndeliverableMessage()
raise
if mandatory:
if not use_confirms:
warnings.warn(
"Mandatory delivery was requested, but "
"unroutable messages cannot be detected without "
"publish confirms enabled."
) | def function[publish, parameter[self, payload]]:
constant[ Publish a message.
]
variable[publish_kwargs] assign[=] call[name[self].publish_kwargs.copy, parameter[]]
variable[headers] assign[=] call[call[name[publish_kwargs].pop, parameter[constant[headers], dictionary[[], []]]].copy, parameter[]]
call[name[headers].update, parameter[call[name[kwargs].pop, parameter[constant[headers], dictionary[[], []]]]]]
call[name[headers].update, parameter[call[name[kwargs].pop, parameter[constant[extra_headers], dictionary[[], []]]]]]
variable[use_confirms] assign[=] call[name[kwargs].pop, parameter[constant[use_confirms], name[self].use_confirms]]
variable[transport_options] assign[=] call[name[kwargs].pop, parameter[constant[transport_options], name[self].transport_options]]
call[name[transport_options]][constant[confirm_publish]] assign[=] name[use_confirms]
variable[delivery_mode] assign[=] call[name[kwargs].pop, parameter[constant[delivery_mode], name[self].delivery_mode]]
variable[mandatory] assign[=] call[name[kwargs].pop, parameter[constant[mandatory], name[self].mandatory]]
variable[priority] assign[=] call[name[kwargs].pop, parameter[constant[priority], name[self].priority]]
variable[expiration] assign[=] call[name[kwargs].pop, parameter[constant[expiration], name[self].expiration]]
variable[serializer] assign[=] call[name[kwargs].pop, parameter[constant[serializer], name[self].serializer]]
variable[compression] assign[=] call[name[kwargs].pop, parameter[constant[compression], name[self].compression]]
variable[retry] assign[=] call[name[kwargs].pop, parameter[constant[retry], name[self].retry]]
variable[retry_policy] assign[=] call[name[kwargs].pop, parameter[constant[retry_policy], name[self].retry_policy]]
variable[declare] assign[=] call[name[self].declare][<ast.Slice object at 0x7da18ede7e50>]
call[name[declare].extend, parameter[call[name[kwargs].pop, parameter[constant[declare], tuple[[]]]]]]
call[name[publish_kwargs].update, parameter[name[kwargs]]]
with call[name[get_producer], parameter[name[self].amqp_uri, name[use_confirms], name[self].ssl, name[transport_options]]] begin[:]
<ast.Try object at 0x7da18ede5fc0>
if name[mandatory] begin[:]
if <ast.UnaryOp object at 0x7da18ede65f0> begin[:]
call[name[warnings].warn, parameter[constant[Mandatory delivery was requested, but unroutable messages cannot be detected without publish confirms enabled.]]] | keyword[def] identifier[publish] ( identifier[self] , identifier[payload] ,** identifier[kwargs] ):
literal[string]
identifier[publish_kwargs] = identifier[self] . identifier[publish_kwargs] . identifier[copy] ()
identifier[headers] = identifier[publish_kwargs] . identifier[pop] ( literal[string] ,{}). identifier[copy] ()
identifier[headers] . identifier[update] ( identifier[kwargs] . identifier[pop] ( literal[string] ,{}))
identifier[headers] . identifier[update] ( identifier[kwargs] . identifier[pop] ( literal[string] ,{}))
identifier[use_confirms] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[use_confirms] )
identifier[transport_options] = identifier[kwargs] . identifier[pop] ( literal[string] ,
identifier[self] . identifier[transport_options]
)
identifier[transport_options] [ literal[string] ]= identifier[use_confirms]
identifier[delivery_mode] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[delivery_mode] )
identifier[mandatory] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[mandatory] )
identifier[priority] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[priority] )
identifier[expiration] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[expiration] )
identifier[serializer] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[serializer] )
identifier[compression] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[compression] )
identifier[retry] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[retry] )
identifier[retry_policy] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[retry_policy] )
identifier[declare] = identifier[self] . identifier[declare] [:]
identifier[declare] . identifier[extend] ( identifier[kwargs] . identifier[pop] ( literal[string] ,()))
identifier[publish_kwargs] . identifier[update] ( identifier[kwargs] )
keyword[with] identifier[get_producer] ( identifier[self] . identifier[amqp_uri] ,
identifier[use_confirms] ,
identifier[self] . identifier[ssl] ,
identifier[transport_options] ,
) keyword[as] identifier[producer] :
keyword[try] :
identifier[producer] . identifier[publish] (
identifier[payload] ,
identifier[headers] = identifier[headers] ,
identifier[delivery_mode] = identifier[delivery_mode] ,
identifier[mandatory] = identifier[mandatory] ,
identifier[priority] = identifier[priority] ,
identifier[expiration] = identifier[expiration] ,
identifier[compression] = identifier[compression] ,
identifier[declare] = identifier[declare] ,
identifier[retry] = identifier[retry] ,
identifier[retry_policy] = identifier[retry_policy] ,
identifier[serializer] = identifier[serializer] ,
** identifier[publish_kwargs]
)
keyword[except] identifier[ChannelError] keyword[as] identifier[exc] :
keyword[if] literal[string] keyword[in] identifier[str] ( identifier[exc] ):
keyword[raise] identifier[UndeliverableMessage] ()
keyword[raise]
keyword[if] identifier[mandatory] :
keyword[if] keyword[not] identifier[use_confirms] :
identifier[warnings] . identifier[warn] (
literal[string]
literal[string]
literal[string]
) | def publish(self, payload, **kwargs):
""" Publish a message.
"""
publish_kwargs = self.publish_kwargs.copy()
# merge headers from when the publisher was instantiated
# with any provided now; "extra" headers always win
headers = publish_kwargs.pop('headers', {}).copy()
headers.update(kwargs.pop('headers', {}))
headers.update(kwargs.pop('extra_headers', {}))
use_confirms = kwargs.pop('use_confirms', self.use_confirms)
transport_options = kwargs.pop('transport_options', self.transport_options)
transport_options['confirm_publish'] = use_confirms
delivery_mode = kwargs.pop('delivery_mode', self.delivery_mode)
mandatory = kwargs.pop('mandatory', self.mandatory)
priority = kwargs.pop('priority', self.priority)
expiration = kwargs.pop('expiration', self.expiration)
serializer = kwargs.pop('serializer', self.serializer)
compression = kwargs.pop('compression', self.compression)
retry = kwargs.pop('retry', self.retry)
retry_policy = kwargs.pop('retry_policy', self.retry_policy)
declare = self.declare[:]
declare.extend(kwargs.pop('declare', ()))
publish_kwargs.update(kwargs) # remaining publish-time kwargs win
with get_producer(self.amqp_uri, use_confirms, self.ssl, transport_options) as producer:
try:
producer.publish(payload, headers=headers, delivery_mode=delivery_mode, mandatory=mandatory, priority=priority, expiration=expiration, compression=compression, declare=declare, retry=retry, retry_policy=retry_policy, serializer=serializer, **publish_kwargs) # depends on [control=['try'], data=[]]
except ChannelError as exc:
if 'NO_ROUTE' in str(exc):
raise UndeliverableMessage() # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['exc']]
if mandatory:
if not use_confirms:
warnings.warn('Mandatory delivery was requested, but unroutable messages cannot be detected without publish confirms enabled.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['producer']] |
def get_jids():
'''
Return a list of all job ids
'''
serv = _get_serv(ret=None)
jids = _get_list(serv, 'jids')
loads = serv.get_multi(jids) # {jid: load, jid: load, ...}
ret = {}
for jid, load in six.iteritems(loads):
ret[jid] = salt.utils.jid.format_jid_instance(jid, salt.utils.json.loads(load))
return ret | def function[get_jids, parameter[]]:
constant[
Return a list of all job ids
]
variable[serv] assign[=] call[name[_get_serv], parameter[]]
variable[jids] assign[=] call[name[_get_list], parameter[name[serv], constant[jids]]]
variable[loads] assign[=] call[name[serv].get_multi, parameter[name[jids]]]
variable[ret] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da204620460>, <ast.Name object at 0x7da204620730>]]] in starred[call[name[six].iteritems, parameter[name[loads]]]] begin[:]
call[name[ret]][name[jid]] assign[=] call[name[salt].utils.jid.format_jid_instance, parameter[name[jid], call[name[salt].utils.json.loads, parameter[name[load]]]]]
return[name[ret]] | keyword[def] identifier[get_jids] ():
literal[string]
identifier[serv] = identifier[_get_serv] ( identifier[ret] = keyword[None] )
identifier[jids] = identifier[_get_list] ( identifier[serv] , literal[string] )
identifier[loads] = identifier[serv] . identifier[get_multi] ( identifier[jids] )
identifier[ret] ={}
keyword[for] identifier[jid] , identifier[load] keyword[in] identifier[six] . identifier[iteritems] ( identifier[loads] ):
identifier[ret] [ identifier[jid] ]= identifier[salt] . identifier[utils] . identifier[jid] . identifier[format_jid_instance] ( identifier[jid] , identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[load] ))
keyword[return] identifier[ret] | def get_jids():
"""
Return a list of all job ids
"""
serv = _get_serv(ret=None)
jids = _get_list(serv, 'jids')
loads = serv.get_multi(jids) # {jid: load, jid: load, ...}
ret = {}
for (jid, load) in six.iteritems(loads):
ret[jid] = salt.utils.jid.format_jid_instance(jid, salt.utils.json.loads(load)) # depends on [control=['for'], data=[]]
return ret |
def _raise_if_bad_http_status_and_method(self, response):
"""Check response status code is valid for a Put or Patch
request. Must be 200, 201, 202, or 204.
:raises: BadStatus if invalid status.
"""
code = response.status_code
if code in {200, 202} or \
(code == 201 and self.method in {'PUT', 'PATCH'}) or \
(code == 204 and self.method in {'DELETE', 'POST'}):
return
raise BadStatus(
"Invalid return status for {!r} operation".format(self.method)) | def function[_raise_if_bad_http_status_and_method, parameter[self, response]]:
constant[Check response status code is valid for a Put or Patch
request. Must be 200, 201, 202, or 204.
:raises: BadStatus if invalid status.
]
variable[code] assign[=] name[response].status_code
if <ast.BoolOp object at 0x7da1b0d50c40> begin[:]
return[None]
<ast.Raise object at 0x7da1b0d51270> | keyword[def] identifier[_raise_if_bad_http_status_and_method] ( identifier[self] , identifier[response] ):
literal[string]
identifier[code] = identifier[response] . identifier[status_code]
keyword[if] identifier[code] keyword[in] { literal[int] , literal[int] } keyword[or] ( identifier[code] == literal[int] keyword[and] identifier[self] . identifier[method] keyword[in] { literal[string] , literal[string] }) keyword[or] ( identifier[code] == literal[int] keyword[and] identifier[self] . identifier[method] keyword[in] { literal[string] , literal[string] }):
keyword[return]
keyword[raise] identifier[BadStatus] (
literal[string] . identifier[format] ( identifier[self] . identifier[method] )) | def _raise_if_bad_http_status_and_method(self, response):
"""Check response status code is valid for a Put or Patch
request. Must be 200, 201, 202, or 204.
:raises: BadStatus if invalid status.
"""
code = response.status_code
if code in {200, 202} or (code == 201 and self.method in {'PUT', 'PATCH'}) or (code == 204 and self.method in {'DELETE', 'POST'}):
return # depends on [control=['if'], data=[]]
raise BadStatus('Invalid return status for {!r} operation'.format(self.method)) |
def cleanup(self):
"""Remove expired associations.
@return: tuple of (removed associations, remaining associations)
"""
remove = []
for handle, assoc in self.assocs.items():
if assoc.expiresIn == 0:
remove.append(handle)
for handle in remove:
del self.assocs[handle]
return len(remove), len(self.assocs) | def function[cleanup, parameter[self]]:
constant[Remove expired associations.
@return: tuple of (removed associations, remaining associations)
]
variable[remove] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b06c87f0>, <ast.Name object at 0x7da1b06ca800>]]] in starred[call[name[self].assocs.items, parameter[]]] begin[:]
if compare[name[assoc].expiresIn equal[==] constant[0]] begin[:]
call[name[remove].append, parameter[name[handle]]]
for taget[name[handle]] in starred[name[remove]] begin[:]
<ast.Delete object at 0x7da1b06ca1a0>
return[tuple[[<ast.Call object at 0x7da1b06cb430>, <ast.Call object at 0x7da1b06c8bb0>]]] | keyword[def] identifier[cleanup] ( identifier[self] ):
literal[string]
identifier[remove] =[]
keyword[for] identifier[handle] , identifier[assoc] keyword[in] identifier[self] . identifier[assocs] . identifier[items] ():
keyword[if] identifier[assoc] . identifier[expiresIn] == literal[int] :
identifier[remove] . identifier[append] ( identifier[handle] )
keyword[for] identifier[handle] keyword[in] identifier[remove] :
keyword[del] identifier[self] . identifier[assocs] [ identifier[handle] ]
keyword[return] identifier[len] ( identifier[remove] ), identifier[len] ( identifier[self] . identifier[assocs] ) | def cleanup(self):
"""Remove expired associations.
@return: tuple of (removed associations, remaining associations)
"""
remove = []
for (handle, assoc) in self.assocs.items():
if assoc.expiresIn == 0:
remove.append(handle) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for handle in remove:
del self.assocs[handle] # depends on [control=['for'], data=['handle']]
return (len(remove), len(self.assocs)) |
async def get_status(self, filters=None, utc=False):
"""Return the status of the model.
:param str filters: Optional list of applications, units, or machines
to include, which can use wildcards ('*').
:param bool utc: Display time as UTC in RFC3339 format
"""
client_facade = client.ClientFacade.from_connection(self.connection())
return await client_facade.FullStatus(filters) | <ast.AsyncFunctionDef object at 0x7da1b0c25ba0> | keyword[async] keyword[def] identifier[get_status] ( identifier[self] , identifier[filters] = keyword[None] , identifier[utc] = keyword[False] ):
literal[string]
identifier[client_facade] = identifier[client] . identifier[ClientFacade] . identifier[from_connection] ( identifier[self] . identifier[connection] ())
keyword[return] keyword[await] identifier[client_facade] . identifier[FullStatus] ( identifier[filters] ) | async def get_status(self, filters=None, utc=False):
"""Return the status of the model.
:param str filters: Optional list of applications, units, or machines
to include, which can use wildcards ('*').
:param bool utc: Display time as UTC in RFC3339 format
"""
client_facade = client.ClientFacade.from_connection(self.connection())
return await client_facade.FullStatus(filters) |
def read(path, savedir):
" Read file from path "
if path.startswith('http://'):
name = op.basename(path)
save_path = op.join(savedir, name)
if not op.exists(save_path):
src = urllib2.urlopen(path).read()
try:
open(save_path, 'w').write(src)
except IOError:
return src
path = save_path
return open(path, 'r').read() | def function[read, parameter[path, savedir]]:
constant[ Read file from path ]
if call[name[path].startswith, parameter[constant[http://]]] begin[:]
variable[name] assign[=] call[name[op].basename, parameter[name[path]]]
variable[save_path] assign[=] call[name[op].join, parameter[name[savedir], name[name]]]
if <ast.UnaryOp object at 0x7da1b207c580> begin[:]
variable[src] assign[=] call[call[name[urllib2].urlopen, parameter[name[path]]].read, parameter[]]
<ast.Try object at 0x7da1b207eef0>
variable[path] assign[=] name[save_path]
return[call[call[name[open], parameter[name[path], constant[r]]].read, parameter[]]] | keyword[def] identifier[read] ( identifier[path] , identifier[savedir] ):
literal[string]
keyword[if] identifier[path] . identifier[startswith] ( literal[string] ):
identifier[name] = identifier[op] . identifier[basename] ( identifier[path] )
identifier[save_path] = identifier[op] . identifier[join] ( identifier[savedir] , identifier[name] )
keyword[if] keyword[not] identifier[op] . identifier[exists] ( identifier[save_path] ):
identifier[src] = identifier[urllib2] . identifier[urlopen] ( identifier[path] ). identifier[read] ()
keyword[try] :
identifier[open] ( identifier[save_path] , literal[string] ). identifier[write] ( identifier[src] )
keyword[except] identifier[IOError] :
keyword[return] identifier[src]
identifier[path] = identifier[save_path]
keyword[return] identifier[open] ( identifier[path] , literal[string] ). identifier[read] () | def read(path, savedir):
""" Read file from path """
if path.startswith('http://'):
name = op.basename(path)
save_path = op.join(savedir, name)
if not op.exists(save_path):
src = urllib2.urlopen(path).read()
try:
open(save_path, 'w').write(src) # depends on [control=['try'], data=[]]
except IOError:
return src # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
path = save_path # depends on [control=['if'], data=[]]
return open(path, 'r').read() |
def _clear_stats(self):
"""
Initializes broker statistics data structures
"""
for stat in (STAT_BYTES_RECEIVED,
STAT_BYTES_SENT,
STAT_MSG_RECEIVED,
STAT_MSG_SENT,
STAT_CLIENTS_MAXIMUM,
STAT_CLIENTS_CONNECTED,
STAT_CLIENTS_DISCONNECTED,
STAT_PUBLISH_RECEIVED,
STAT_PUBLISH_SENT):
self._stats[stat] = 0 | def function[_clear_stats, parameter[self]]:
constant[
Initializes broker statistics data structures
]
for taget[name[stat]] in starred[tuple[[<ast.Name object at 0x7da18fe92fb0>, <ast.Name object at 0x7da18fe92cb0>, <ast.Name object at 0x7da18fe92290>, <ast.Name object at 0x7da18fe91b40>, <ast.Name object at 0x7da18fe907f0>, <ast.Name object at 0x7da18fe910f0>, <ast.Name object at 0x7da18fe93700>, <ast.Name object at 0x7da18fe91420>, <ast.Name object at 0x7da18fe90af0>]]] begin[:]
call[name[self]._stats][name[stat]] assign[=] constant[0] | keyword[def] identifier[_clear_stats] ( identifier[self] ):
literal[string]
keyword[for] identifier[stat] keyword[in] ( identifier[STAT_BYTES_RECEIVED] ,
identifier[STAT_BYTES_SENT] ,
identifier[STAT_MSG_RECEIVED] ,
identifier[STAT_MSG_SENT] ,
identifier[STAT_CLIENTS_MAXIMUM] ,
identifier[STAT_CLIENTS_CONNECTED] ,
identifier[STAT_CLIENTS_DISCONNECTED] ,
identifier[STAT_PUBLISH_RECEIVED] ,
identifier[STAT_PUBLISH_SENT] ):
identifier[self] . identifier[_stats] [ identifier[stat] ]= literal[int] | def _clear_stats(self):
"""
Initializes broker statistics data structures
"""
for stat in (STAT_BYTES_RECEIVED, STAT_BYTES_SENT, STAT_MSG_RECEIVED, STAT_MSG_SENT, STAT_CLIENTS_MAXIMUM, STAT_CLIENTS_CONNECTED, STAT_CLIENTS_DISCONNECTED, STAT_PUBLISH_RECEIVED, STAT_PUBLISH_SENT):
self._stats[stat] = 0 # depends on [control=['for'], data=['stat']] |
def memoize(fn):
'''Cache the results of a function that only takes positional arguments.'''
cache = {}
@wraps(fn)
def wrapped_function(*args):
if args in cache:
return cache[args]
else:
result = fn(*args)
cache[args] = result
return result
return wrapped_function | def function[memoize, parameter[fn]]:
constant[Cache the results of a function that only takes positional arguments.]
variable[cache] assign[=] dictionary[[], []]
def function[wrapped_function, parameter[]]:
if compare[name[args] in name[cache]] begin[:]
return[call[name[cache]][name[args]]]
return[name[wrapped_function]] | keyword[def] identifier[memoize] ( identifier[fn] ):
literal[string]
identifier[cache] ={}
@ identifier[wraps] ( identifier[fn] )
keyword[def] identifier[wrapped_function] (* identifier[args] ):
keyword[if] identifier[args] keyword[in] identifier[cache] :
keyword[return] identifier[cache] [ identifier[args] ]
keyword[else] :
identifier[result] = identifier[fn] (* identifier[args] )
identifier[cache] [ identifier[args] ]= identifier[result]
keyword[return] identifier[result]
keyword[return] identifier[wrapped_function] | def memoize(fn):
"""Cache the results of a function that only takes positional arguments."""
cache = {}
@wraps(fn)
def wrapped_function(*args):
if args in cache:
return cache[args] # depends on [control=['if'], data=['args', 'cache']]
else:
result = fn(*args)
cache[args] = result
return result
return wrapped_function |
def set_source_google_finance(self):
"""
Set data source to Google Finance
"""
self.data_worker = data_worker
self.worker_args = {"function": pandas.io.data.DataReader, "input": self.input_queue, "output": self.output_map,
"source": 'google'}
self.source_name = "Google Finance" | def function[set_source_google_finance, parameter[self]]:
constant[
Set data source to Google Finance
]
name[self].data_worker assign[=] name[data_worker]
name[self].worker_args assign[=] dictionary[[<ast.Constant object at 0x7da18c4cc0d0>, <ast.Constant object at 0x7da18c4ccd90>, <ast.Constant object at 0x7da18c4cfcd0>, <ast.Constant object at 0x7da18c4cdd80>], [<ast.Attribute object at 0x7da18c4ce8c0>, <ast.Attribute object at 0x7da18c4cfa90>, <ast.Attribute object at 0x7da18c4cd780>, <ast.Constant object at 0x7da18c4ccfd0>]]
name[self].source_name assign[=] constant[Google Finance] | keyword[def] identifier[set_source_google_finance] ( identifier[self] ):
literal[string]
identifier[self] . identifier[data_worker] = identifier[data_worker]
identifier[self] . identifier[worker_args] ={ literal[string] : identifier[pandas] . identifier[io] . identifier[data] . identifier[DataReader] , literal[string] : identifier[self] . identifier[input_queue] , literal[string] : identifier[self] . identifier[output_map] ,
literal[string] : literal[string] }
identifier[self] . identifier[source_name] = literal[string] | def set_source_google_finance(self):
"""
Set data source to Google Finance
"""
self.data_worker = data_worker
self.worker_args = {'function': pandas.io.data.DataReader, 'input': self.input_queue, 'output': self.output_map, 'source': 'google'}
self.source_name = 'Google Finance' |
def file_matches_extensions(self, fname, extensions):
"""
True if file matches one of extensions
:param fname:
:param extensions:
:return:
"""
if not isinstance(extensions, list):
extensions = [extensions]
for ext in extensions:
if fname.endswith('.%s' % ext):
return True
return False | def function[file_matches_extensions, parameter[self, fname, extensions]]:
constant[
True if file matches one of extensions
:param fname:
:param extensions:
:return:
]
if <ast.UnaryOp object at 0x7da1b23466b0> begin[:]
variable[extensions] assign[=] list[[<ast.Name object at 0x7da1b2344f10>]]
for taget[name[ext]] in starred[name[extensions]] begin[:]
if call[name[fname].endswith, parameter[binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> name[ext]]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[file_matches_extensions] ( identifier[self] , identifier[fname] , identifier[extensions] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[extensions] , identifier[list] ):
identifier[extensions] =[ identifier[extensions] ]
keyword[for] identifier[ext] keyword[in] identifier[extensions] :
keyword[if] identifier[fname] . identifier[endswith] ( literal[string] % identifier[ext] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def file_matches_extensions(self, fname, extensions):
"""
True if file matches one of extensions
:param fname:
:param extensions:
:return:
"""
if not isinstance(extensions, list):
extensions = [extensions] # depends on [control=['if'], data=[]]
for ext in extensions:
if fname.endswith('.%s' % ext):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ext']]
return False |
def add_distributed_artifact(self, id, **kwargs):
"""
Adds an artifact to the list of distributed artifacts for this product milestone
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_distributed_artifact(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product milestone id (required)
:param ArtifactRest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_distributed_artifact_with_http_info(id, **kwargs)
else:
(data) = self.add_distributed_artifact_with_http_info(id, **kwargs)
return data | def function[add_distributed_artifact, parameter[self, id]]:
constant[
Adds an artifact to the list of distributed artifacts for this product milestone
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_distributed_artifact(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product milestone id (required)
:param ArtifactRest body:
:return: None
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[callback]]] begin[:]
return[call[name[self].add_distributed_artifact_with_http_info, parameter[name[id]]]] | keyword[def] identifier[add_distributed_artifact] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[add_distributed_artifact_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[add_distributed_artifact_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[return] identifier[data] | def add_distributed_artifact(self, id, **kwargs):
"""
Adds an artifact to the list of distributed artifacts for this product milestone
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_distributed_artifact(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product milestone id (required)
:param ArtifactRest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_distributed_artifact_with_http_info(id, **kwargs) # depends on [control=['if'], data=[]]
else:
data = self.add_distributed_artifact_with_http_info(id, **kwargs)
return data |
def _rest(url, req, data=None):
"""Send a rest rest request to the server."""
if url.upper().startswith("HTTPS"):
print("Secure connection required: Please use HTTPS or https")
return ""
req = req.upper()
if req != "GET" and req != "PUT" and req != "POST" and req != "DELETE":
return ""
status, body = _api_action(url, req, data)
if (int(status) >= 200 and int(status) <= 226):
return body
else:
return body | def function[_rest, parameter[url, req, data]]:
constant[Send a rest rest request to the server.]
if call[call[name[url].upper, parameter[]].startswith, parameter[constant[HTTPS]]] begin[:]
call[name[print], parameter[constant[Secure connection required: Please use HTTPS or https]]]
return[constant[]]
variable[req] assign[=] call[name[req].upper, parameter[]]
if <ast.BoolOp object at 0x7da1b2512860> begin[:]
return[constant[]]
<ast.Tuple object at 0x7da1b2509db0> assign[=] call[name[_api_action], parameter[name[url], name[req], name[data]]]
if <ast.BoolOp object at 0x7da1b2508100> begin[:]
return[name[body]] | keyword[def] identifier[_rest] ( identifier[url] , identifier[req] , identifier[data] = keyword[None] ):
literal[string]
keyword[if] identifier[url] . identifier[upper] (). identifier[startswith] ( literal[string] ):
identifier[print] ( literal[string] )
keyword[return] literal[string]
identifier[req] = identifier[req] . identifier[upper] ()
keyword[if] identifier[req] != literal[string] keyword[and] identifier[req] != literal[string] keyword[and] identifier[req] != literal[string] keyword[and] identifier[req] != literal[string] :
keyword[return] literal[string]
identifier[status] , identifier[body] = identifier[_api_action] ( identifier[url] , identifier[req] , identifier[data] )
keyword[if] ( identifier[int] ( identifier[status] )>= literal[int] keyword[and] identifier[int] ( identifier[status] )<= literal[int] ):
keyword[return] identifier[body]
keyword[else] :
keyword[return] identifier[body] | def _rest(url, req, data=None):
"""Send a rest rest request to the server."""
if url.upper().startswith('HTTPS'):
print('Secure connection required: Please use HTTPS or https')
return '' # depends on [control=['if'], data=[]]
req = req.upper()
if req != 'GET' and req != 'PUT' and (req != 'POST') and (req != 'DELETE'):
return '' # depends on [control=['if'], data=[]]
(status, body) = _api_action(url, req, data)
if int(status) >= 200 and int(status) <= 226:
return body # depends on [control=['if'], data=[]]
else:
return body |
def get_notes(self):
"""Return a list of all of the project's notes.
:return: A list of notes.
:rtype: list of :class:`pytodoist.todoist.Note`
>>> from pytodoist import todoist
>>> user = todoist.login('john.doe@gmail.com', 'password')
>>> project = user.get_project('PyTodoist')
>>> notes = project.get_notes()
"""
self.owner.sync()
notes = self.owner.notes.values()
return [n for n in notes if n.project_id == self.id] | def function[get_notes, parameter[self]]:
constant[Return a list of all of the project's notes.
:return: A list of notes.
:rtype: list of :class:`pytodoist.todoist.Note`
>>> from pytodoist import todoist
>>> user = todoist.login('john.doe@gmail.com', 'password')
>>> project = user.get_project('PyTodoist')
>>> notes = project.get_notes()
]
call[name[self].owner.sync, parameter[]]
variable[notes] assign[=] call[name[self].owner.notes.values, parameter[]]
return[<ast.ListComp object at 0x7da1b0e72f80>] | keyword[def] identifier[get_notes] ( identifier[self] ):
literal[string]
identifier[self] . identifier[owner] . identifier[sync] ()
identifier[notes] = identifier[self] . identifier[owner] . identifier[notes] . identifier[values] ()
keyword[return] [ identifier[n] keyword[for] identifier[n] keyword[in] identifier[notes] keyword[if] identifier[n] . identifier[project_id] == identifier[self] . identifier[id] ] | def get_notes(self):
"""Return a list of all of the project's notes.
:return: A list of notes.
:rtype: list of :class:`pytodoist.todoist.Note`
>>> from pytodoist import todoist
>>> user = todoist.login('john.doe@gmail.com', 'password')
>>> project = user.get_project('PyTodoist')
>>> notes = project.get_notes()
"""
self.owner.sync()
notes = self.owner.notes.values()
return [n for n in notes if n.project_id == self.id] |
def draw_clusters(data, clusters, noise = [], marker_descr = '.', hide_axes = False, axes = None, display_result = True):
"""!
@brief Displays clusters for data in 2D or 3D.
@param[in] data (list): Points that are described by coordinates represented.
@param[in] clusters (list): Clusters that are represented by lists of indexes where each index corresponds to point in data.
@param[in] noise (list): Points that are regarded to noise.
@param[in] marker_descr (string): Marker for displaying points.
@param[in] hide_axes (bool): If True - axes is not displayed.
@param[in] axes (ax) Matplotlib axes where clusters should be drawn, if it is not specified (None) then new plot will be created.
@param[in] display_result (bool): If specified then matplotlib axes will be used for drawing and plot will not be shown.
@return (ax) Matplotlib axes where drawn clusters are presented.
"""
# Get dimension
dimension = 0;
if ( (data is not None) and (clusters is not None) ):
dimension = len(data[0]);
elif ( (data is None) and (clusters is not None) ):
dimension = len(clusters[0][0]);
else:
raise NameError('Data or clusters should be specified exactly.');
"Draw clusters"
colors = [ 'red', 'blue', 'darkgreen', 'brown', 'violet',
'deepskyblue', 'darkgrey', 'lightsalmon', 'deeppink', 'yellow',
'black', 'mediumspringgreen', 'orange', 'darkviolet', 'darkblue',
'silver', 'lime', 'pink', 'gold', 'bisque' ];
if (len(clusters) > len(colors)):
raise NameError('Impossible to represent clusters due to number of specified colors.');
fig = plt.figure();
if (axes is None):
# Check for dimensions
if ((dimension) == 1 or (dimension == 2)):
axes = fig.add_subplot(111);
elif (dimension == 3):
axes = fig.gca(projection='3d');
else:
raise NameError('Drawer supports only 2d and 3d data representation');
color_index = 0;
for cluster in clusters:
color = colors[color_index];
for item in cluster:
if (dimension == 1):
if (data is None):
axes.plot(item[0], 0.0, color = color, marker = marker_descr);
else:
axes.plot(data[item][0], 0.0, color = color, marker = marker_descr);
if (dimension == 2):
if (data is None):
axes.plot(item[0], item[1], color = color, marker = marker_descr);
else:
axes.plot(data[item][0], data[item][1], color = color, marker = marker_descr);
elif (dimension == 3):
if (data is None):
axes.scatter(item[0], item[1], item[2], c = color, marker = marker_descr);
else:
axes.scatter(data[item][0], data[item][1], data[item][2], c = color, marker = marker_descr);
color_index += 1;
for item in noise:
if (dimension == 1):
if (data is None):
axes.plot(item[0], 0.0, 'w' + marker_descr);
else:
axes.plot(data[item][0], 0.0, 'w' + marker_descr);
if (dimension == 2):
if (data is None):
axes.plot(item[0], item[1], 'w' + marker_descr);
else:
axes.plot(data[item][0], data[item][1], 'w' + marker_descr);
elif (dimension == 3):
if (data is None):
axes.scatter(item[0], item[1], item[2], c = 'w', marker = marker_descr);
else:
axes.scatter(data[item][0], data[item][1], data[item][2], c = 'w', marker = marker_descr);
axes.grid(True);
if (hide_axes is True):
axes.xaxis.set_ticklabels([]);
axes.yaxis.set_ticklabels([]);
if (dimension == 3):
axes.zaxis.set_ticklabels([]);
if (display_result is True):
plt.show();
return axes; | def function[draw_clusters, parameter[data, clusters, noise, marker_descr, hide_axes, axes, display_result]]:
constant[!
@brief Displays clusters for data in 2D or 3D.
@param[in] data (list): Points that are described by coordinates represented.
@param[in] clusters (list): Clusters that are represented by lists of indexes where each index corresponds to point in data.
@param[in] noise (list): Points that are regarded to noise.
@param[in] marker_descr (string): Marker for displaying points.
@param[in] hide_axes (bool): If True - axes is not displayed.
@param[in] axes (ax) Matplotlib axes where clusters should be drawn, if it is not specified (None) then new plot will be created.
@param[in] display_result (bool): If specified then matplotlib axes will be used for drawing and plot will not be shown.
@return (ax) Matplotlib axes where drawn clusters are presented.
]
variable[dimension] assign[=] constant[0]
if <ast.BoolOp object at 0x7da1b01ba9b0> begin[:]
variable[dimension] assign[=] call[name[len], parameter[call[name[data]][constant[0]]]]
constant[Draw clusters]
variable[colors] assign[=] list[[<ast.Constant object at 0x7da1b014f850>, <ast.Constant object at 0x7da1b014f760>, <ast.Constant object at 0x7da1b014d450>, <ast.Constant object at 0x7da1b014dab0>, <ast.Constant object at 0x7da1b014f6a0>, <ast.Constant object at 0x7da1b014c820>, <ast.Constant object at 0x7da1b014d180>, <ast.Constant object at 0x7da1b014c640>, <ast.Constant object at 0x7da1b014fbb0>, <ast.Constant object at 0x7da1b014fb50>, <ast.Constant object at 0x7da1b014d3f0>, <ast.Constant object at 0x7da1b014d2a0>, <ast.Constant object at 0x7da1b014d750>, <ast.Constant object at 0x7da1b014cac0>, <ast.Constant object at 0x7da1b014ce20>, <ast.Constant object at 0x7da1b014f970>, <ast.Constant object at 0x7da1b014fac0>, <ast.Constant object at 0x7da1b014c700>, <ast.Constant object at 0x7da1b014e0e0>, <ast.Constant object at 0x7da1b014ff70>]]
if compare[call[name[len], parameter[name[clusters]]] greater[>] call[name[len], parameter[name[colors]]]] begin[:]
<ast.Raise object at 0x7da1b014f220>
variable[fig] assign[=] call[name[plt].figure, parameter[]]
if compare[name[axes] is constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b014e3b0> begin[:]
variable[axes] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
variable[color_index] assign[=] constant[0]
for taget[name[cluster]] in starred[name[clusters]] begin[:]
variable[color] assign[=] call[name[colors]][name[color_index]]
for taget[name[item]] in starred[name[cluster]] begin[:]
if compare[name[dimension] equal[==] constant[1]] begin[:]
if compare[name[data] is constant[None]] begin[:]
call[name[axes].plot, parameter[call[name[item]][constant[0]], constant[0.0]]]
if compare[name[dimension] equal[==] constant[2]] begin[:]
if compare[name[data] is constant[None]] begin[:]
call[name[axes].plot, parameter[call[name[item]][constant[0]], call[name[item]][constant[1]]]]
<ast.AugAssign object at 0x7da1b0198070>
for taget[name[item]] in starred[name[noise]] begin[:]
if compare[name[dimension] equal[==] constant[1]] begin[:]
if compare[name[data] is constant[None]] begin[:]
call[name[axes].plot, parameter[call[name[item]][constant[0]], constant[0.0], binary_operation[constant[w] + name[marker_descr]]]]
if compare[name[dimension] equal[==] constant[2]] begin[:]
if compare[name[data] is constant[None]] begin[:]
call[name[axes].plot, parameter[call[name[item]][constant[0]], call[name[item]][constant[1]], binary_operation[constant[w] + name[marker_descr]]]]
call[name[axes].grid, parameter[constant[True]]]
if compare[name[hide_axes] is constant[True]] begin[:]
call[name[axes].xaxis.set_ticklabels, parameter[list[[]]]]
call[name[axes].yaxis.set_ticklabels, parameter[list[[]]]]
if compare[name[dimension] equal[==] constant[3]] begin[:]
call[name[axes].zaxis.set_ticklabels, parameter[list[[]]]]
if compare[name[display_result] is constant[True]] begin[:]
call[name[plt].show, parameter[]]
return[name[axes]] | keyword[def] identifier[draw_clusters] ( identifier[data] , identifier[clusters] , identifier[noise] =[], identifier[marker_descr] = literal[string] , identifier[hide_axes] = keyword[False] , identifier[axes] = keyword[None] , identifier[display_result] = keyword[True] ):
literal[string]
identifier[dimension] = literal[int] ;
keyword[if] (( identifier[data] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[clusters] keyword[is] keyword[not] keyword[None] )):
identifier[dimension] = identifier[len] ( identifier[data] [ literal[int] ]);
keyword[elif] (( identifier[data] keyword[is] keyword[None] ) keyword[and] ( identifier[clusters] keyword[is] keyword[not] keyword[None] )):
identifier[dimension] = identifier[len] ( identifier[clusters] [ literal[int] ][ literal[int] ]);
keyword[else] :
keyword[raise] identifier[NameError] ( literal[string] );
literal[string]
identifier[colors] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ];
keyword[if] ( identifier[len] ( identifier[clusters] )> identifier[len] ( identifier[colors] )):
keyword[raise] identifier[NameError] ( literal[string] );
identifier[fig] = identifier[plt] . identifier[figure] ();
keyword[if] ( identifier[axes] keyword[is] keyword[None] ):
keyword[if] (( identifier[dimension] )== literal[int] keyword[or] ( identifier[dimension] == literal[int] )):
identifier[axes] = identifier[fig] . identifier[add_subplot] ( literal[int] );
keyword[elif] ( identifier[dimension] == literal[int] ):
identifier[axes] = identifier[fig] . identifier[gca] ( identifier[projection] = literal[string] );
keyword[else] :
keyword[raise] identifier[NameError] ( literal[string] );
identifier[color_index] = literal[int] ;
keyword[for] identifier[cluster] keyword[in] identifier[clusters] :
identifier[color] = identifier[colors] [ identifier[color_index] ];
keyword[for] identifier[item] keyword[in] identifier[cluster] :
keyword[if] ( identifier[dimension] == literal[int] ):
keyword[if] ( identifier[data] keyword[is] keyword[None] ):
identifier[axes] . identifier[plot] ( identifier[item] [ literal[int] ], literal[int] , identifier[color] = identifier[color] , identifier[marker] = identifier[marker_descr] );
keyword[else] :
identifier[axes] . identifier[plot] ( identifier[data] [ identifier[item] ][ literal[int] ], literal[int] , identifier[color] = identifier[color] , identifier[marker] = identifier[marker_descr] );
keyword[if] ( identifier[dimension] == literal[int] ):
keyword[if] ( identifier[data] keyword[is] keyword[None] ):
identifier[axes] . identifier[plot] ( identifier[item] [ literal[int] ], identifier[item] [ literal[int] ], identifier[color] = identifier[color] , identifier[marker] = identifier[marker_descr] );
keyword[else] :
identifier[axes] . identifier[plot] ( identifier[data] [ identifier[item] ][ literal[int] ], identifier[data] [ identifier[item] ][ literal[int] ], identifier[color] = identifier[color] , identifier[marker] = identifier[marker_descr] );
keyword[elif] ( identifier[dimension] == literal[int] ):
keyword[if] ( identifier[data] keyword[is] keyword[None] ):
identifier[axes] . identifier[scatter] ( identifier[item] [ literal[int] ], identifier[item] [ literal[int] ], identifier[item] [ literal[int] ], identifier[c] = identifier[color] , identifier[marker] = identifier[marker_descr] );
keyword[else] :
identifier[axes] . identifier[scatter] ( identifier[data] [ identifier[item] ][ literal[int] ], identifier[data] [ identifier[item] ][ literal[int] ], identifier[data] [ identifier[item] ][ literal[int] ], identifier[c] = identifier[color] , identifier[marker] = identifier[marker_descr] );
identifier[color_index] += literal[int] ;
keyword[for] identifier[item] keyword[in] identifier[noise] :
keyword[if] ( identifier[dimension] == literal[int] ):
keyword[if] ( identifier[data] keyword[is] keyword[None] ):
identifier[axes] . identifier[plot] ( identifier[item] [ literal[int] ], literal[int] , literal[string] + identifier[marker_descr] );
keyword[else] :
identifier[axes] . identifier[plot] ( identifier[data] [ identifier[item] ][ literal[int] ], literal[int] , literal[string] + identifier[marker_descr] );
keyword[if] ( identifier[dimension] == literal[int] ):
keyword[if] ( identifier[data] keyword[is] keyword[None] ):
identifier[axes] . identifier[plot] ( identifier[item] [ literal[int] ], identifier[item] [ literal[int] ], literal[string] + identifier[marker_descr] );
keyword[else] :
identifier[axes] . identifier[plot] ( identifier[data] [ identifier[item] ][ literal[int] ], identifier[data] [ identifier[item] ][ literal[int] ], literal[string] + identifier[marker_descr] );
keyword[elif] ( identifier[dimension] == literal[int] ):
keyword[if] ( identifier[data] keyword[is] keyword[None] ):
identifier[axes] . identifier[scatter] ( identifier[item] [ literal[int] ], identifier[item] [ literal[int] ], identifier[item] [ literal[int] ], identifier[c] = literal[string] , identifier[marker] = identifier[marker_descr] );
keyword[else] :
identifier[axes] . identifier[scatter] ( identifier[data] [ identifier[item] ][ literal[int] ], identifier[data] [ identifier[item] ][ literal[int] ], identifier[data] [ identifier[item] ][ literal[int] ], identifier[c] = literal[string] , identifier[marker] = identifier[marker_descr] );
identifier[axes] . identifier[grid] ( keyword[True] );
keyword[if] ( identifier[hide_axes] keyword[is] keyword[True] ):
identifier[axes] . identifier[xaxis] . identifier[set_ticklabels] ([]);
identifier[axes] . identifier[yaxis] . identifier[set_ticklabels] ([]);
keyword[if] ( identifier[dimension] == literal[int] ):
identifier[axes] . identifier[zaxis] . identifier[set_ticklabels] ([]);
keyword[if] ( identifier[display_result] keyword[is] keyword[True] ):
identifier[plt] . identifier[show] ();
keyword[return] identifier[axes] ; | def draw_clusters(data, clusters, noise=[], marker_descr='.', hide_axes=False, axes=None, display_result=True):
"""!
@brief Displays clusters for data in 2D or 3D.
@param[in] data (list): Points that are described by coordinates represented.
@param[in] clusters (list): Clusters that are represented by lists of indexes where each index corresponds to point in data.
@param[in] noise (list): Points that are regarded to noise.
@param[in] marker_descr (string): Marker for displaying points.
@param[in] hide_axes (bool): If True - axes is not displayed.
@param[in] axes (ax) Matplotlib axes where clusters should be drawn, if it is not specified (None) then new plot will be created.
@param[in] display_result (bool): If specified then matplotlib axes will be used for drawing and plot will not be shown.
@return (ax) Matplotlib axes where drawn clusters are presented.
""" # Get dimension
dimension = 0
if data is not None and clusters is not None:
dimension = len(data[0]) # depends on [control=['if'], data=[]]
elif data is None and clusters is not None:
dimension = len(clusters[0][0]) # depends on [control=['if'], data=[]]
else:
raise NameError('Data or clusters should be specified exactly.')
'Draw clusters'
colors = ['red', 'blue', 'darkgreen', 'brown', 'violet', 'deepskyblue', 'darkgrey', 'lightsalmon', 'deeppink', 'yellow', 'black', 'mediumspringgreen', 'orange', 'darkviolet', 'darkblue', 'silver', 'lime', 'pink', 'gold', 'bisque']
if len(clusters) > len(colors):
raise NameError('Impossible to represent clusters due to number of specified colors.') # depends on [control=['if'], data=[]]
fig = plt.figure()
if axes is None: # Check for dimensions
if dimension == 1 or dimension == 2:
axes = fig.add_subplot(111) # depends on [control=['if'], data=[]]
elif dimension == 3:
axes = fig.gca(projection='3d') # depends on [control=['if'], data=[]]
else:
raise NameError('Drawer supports only 2d and 3d data representation') # depends on [control=['if'], data=['axes']]
color_index = 0
for cluster in clusters:
color = colors[color_index]
for item in cluster:
if dimension == 1:
if data is None:
axes.plot(item[0], 0.0, color=color, marker=marker_descr) # depends on [control=['if'], data=[]]
else:
axes.plot(data[item][0], 0.0, color=color, marker=marker_descr) # depends on [control=['if'], data=[]]
if dimension == 2:
if data is None:
axes.plot(item[0], item[1], color=color, marker=marker_descr) # depends on [control=['if'], data=[]]
else:
axes.plot(data[item][0], data[item][1], color=color, marker=marker_descr) # depends on [control=['if'], data=[]]
elif dimension == 3:
if data is None:
axes.scatter(item[0], item[1], item[2], c=color, marker=marker_descr) # depends on [control=['if'], data=[]]
else:
axes.scatter(data[item][0], data[item][1], data[item][2], c=color, marker=marker_descr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
color_index += 1 # depends on [control=['for'], data=['cluster']]
for item in noise:
if dimension == 1:
if data is None:
axes.plot(item[0], 0.0, 'w' + marker_descr) # depends on [control=['if'], data=[]]
else:
axes.plot(data[item][0], 0.0, 'w' + marker_descr) # depends on [control=['if'], data=[]]
if dimension == 2:
if data is None:
axes.plot(item[0], item[1], 'w' + marker_descr) # depends on [control=['if'], data=[]]
else:
axes.plot(data[item][0], data[item][1], 'w' + marker_descr) # depends on [control=['if'], data=[]]
elif dimension == 3:
if data is None:
axes.scatter(item[0], item[1], item[2], c='w', marker=marker_descr) # depends on [control=['if'], data=[]]
else:
axes.scatter(data[item][0], data[item][1], data[item][2], c='w', marker=marker_descr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
axes.grid(True)
if hide_axes is True:
axes.xaxis.set_ticklabels([])
axes.yaxis.set_ticklabels([])
if dimension == 3:
axes.zaxis.set_ticklabels([]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if display_result is True:
plt.show() # depends on [control=['if'], data=[]]
return axes |
def touch(self):
"""
Mark this update as complete.
"""
if self.marker_table_bound is None:
self.create_marker_table()
table = self.marker_table_bound
id_exists = self.exists()
with self.engine.begin() as conn:
if not id_exists:
ins = table.insert().values(update_id=self.update_id, target_table=self.target_table,
inserted=datetime.datetime.now())
else:
ins = table.update().where(sqlalchemy.and_(table.c.update_id == self.update_id,
table.c.target_table == self.target_table)).\
values(update_id=self.update_id, target_table=self.target_table,
inserted=datetime.datetime.now())
conn.execute(ins)
assert self.exists() | def function[touch, parameter[self]]:
constant[
Mark this update as complete.
]
if compare[name[self].marker_table_bound is constant[None]] begin[:]
call[name[self].create_marker_table, parameter[]]
variable[table] assign[=] name[self].marker_table_bound
variable[id_exists] assign[=] call[name[self].exists, parameter[]]
with call[name[self].engine.begin, parameter[]] begin[:]
if <ast.UnaryOp object at 0x7da1b1f5b670> begin[:]
variable[ins] assign[=] call[call[name[table].insert, parameter[]].values, parameter[]]
call[name[conn].execute, parameter[name[ins]]]
assert[call[name[self].exists, parameter[]]] | keyword[def] identifier[touch] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[marker_table_bound] keyword[is] keyword[None] :
identifier[self] . identifier[create_marker_table] ()
identifier[table] = identifier[self] . identifier[marker_table_bound]
identifier[id_exists] = identifier[self] . identifier[exists] ()
keyword[with] identifier[self] . identifier[engine] . identifier[begin] () keyword[as] identifier[conn] :
keyword[if] keyword[not] identifier[id_exists] :
identifier[ins] = identifier[table] . identifier[insert] (). identifier[values] ( identifier[update_id] = identifier[self] . identifier[update_id] , identifier[target_table] = identifier[self] . identifier[target_table] ,
identifier[inserted] = identifier[datetime] . identifier[datetime] . identifier[now] ())
keyword[else] :
identifier[ins] = identifier[table] . identifier[update] (). identifier[where] ( identifier[sqlalchemy] . identifier[and_] ( identifier[table] . identifier[c] . identifier[update_id] == identifier[self] . identifier[update_id] ,
identifier[table] . identifier[c] . identifier[target_table] == identifier[self] . identifier[target_table] )). identifier[values] ( identifier[update_id] = identifier[self] . identifier[update_id] , identifier[target_table] = identifier[self] . identifier[target_table] ,
identifier[inserted] = identifier[datetime] . identifier[datetime] . identifier[now] ())
identifier[conn] . identifier[execute] ( identifier[ins] )
keyword[assert] identifier[self] . identifier[exists] () | def touch(self):
"""
Mark this update as complete.
"""
if self.marker_table_bound is None:
self.create_marker_table() # depends on [control=['if'], data=[]]
table = self.marker_table_bound
id_exists = self.exists()
with self.engine.begin() as conn:
if not id_exists:
ins = table.insert().values(update_id=self.update_id, target_table=self.target_table, inserted=datetime.datetime.now()) # depends on [control=['if'], data=[]]
else:
ins = table.update().where(sqlalchemy.and_(table.c.update_id == self.update_id, table.c.target_table == self.target_table)).values(update_id=self.update_id, target_table=self.target_table, inserted=datetime.datetime.now())
conn.execute(ins) # depends on [control=['with'], data=['conn']]
assert self.exists() |
def timezone(utcoffset):
'''
Return a string representing the timezone offset.
Remaining seconds are rounded to the nearest minute.
>>> timezone(3600)
'+01:00'
>>> timezone(5400)
'+01:30'
>>> timezone(-28800)
'-08:00'
'''
hours, seconds = divmod(abs(utcoffset), 3600)
minutes = round(float(seconds) / 60)
if utcoffset >= 0:
sign = '+'
else:
sign = '-'
return '{0}{1:02d}:{2:02d}'.format(sign, int(hours), int(minutes)) | def function[timezone, parameter[utcoffset]]:
constant[
Return a string representing the timezone offset.
Remaining seconds are rounded to the nearest minute.
>>> timezone(3600)
'+01:00'
>>> timezone(5400)
'+01:30'
>>> timezone(-28800)
'-08:00'
]
<ast.Tuple object at 0x7da18f813370> assign[=] call[name[divmod], parameter[call[name[abs], parameter[name[utcoffset]]], constant[3600]]]
variable[minutes] assign[=] call[name[round], parameter[binary_operation[call[name[float], parameter[name[seconds]]] / constant[60]]]]
if compare[name[utcoffset] greater_or_equal[>=] constant[0]] begin[:]
variable[sign] assign[=] constant[+]
return[call[constant[{0}{1:02d}:{2:02d}].format, parameter[name[sign], call[name[int], parameter[name[hours]]], call[name[int], parameter[name[minutes]]]]]] | keyword[def] identifier[timezone] ( identifier[utcoffset] ):
literal[string]
identifier[hours] , identifier[seconds] = identifier[divmod] ( identifier[abs] ( identifier[utcoffset] ), literal[int] )
identifier[minutes] = identifier[round] ( identifier[float] ( identifier[seconds] )/ literal[int] )
keyword[if] identifier[utcoffset] >= literal[int] :
identifier[sign] = literal[string]
keyword[else] :
identifier[sign] = literal[string]
keyword[return] literal[string] . identifier[format] ( identifier[sign] , identifier[int] ( identifier[hours] ), identifier[int] ( identifier[minutes] )) | def timezone(utcoffset):
"""
Return a string representing the timezone offset.
Remaining seconds are rounded to the nearest minute.
>>> timezone(3600)
'+01:00'
>>> timezone(5400)
'+01:30'
>>> timezone(-28800)
'-08:00'
"""
(hours, seconds) = divmod(abs(utcoffset), 3600)
minutes = round(float(seconds) / 60)
if utcoffset >= 0:
sign = '+' # depends on [control=['if'], data=[]]
else:
sign = '-'
return '{0}{1:02d}:{2:02d}'.format(sign, int(hours), int(minutes)) |
def plot_baf_lrr(file_names, options):
"""Plot BAF and LRR for a list of files.
:param file_names: contains the name of the input file for each sample.
:param options: the options.
:type file_names: dict
:type options: argparse.Namespace
Plots the BAF (B Allele Frequency) and LRR (Log R Ratio) of each samples.
Only the sexual chromosome are shown.
"""
# importing important stuff
import matplotlib as mpl
if options.format != "X11" and mpl.get_backend() != "agg":
mpl.use("Agg")
import matplotlib.pyplot as plt
if options.format != "X11":
plt.ioff()
# For each of the sample/files
for sample, file_name in file_names.iteritems():
data = []
# Reading the file
open_func = open
if file_name.endswith(".gz"):
open_func = gzip.open
with open_func(file_name, 'rb') as input_file:
header_index = dict([
(col_name, i)
for i, col_name in
enumerate(input_file.readline().rstrip("\r\n").split("\t"))
])
for col_name in {"Chr", "Position", "B Allele Freq",
"Log R Ratio"}:
if col_name not in header_index:
msg = "{}: no column named {}".format(file_name, col_name)
raise ProgramError(msg)
# Reading the dat
for line in input_file:
row = line.rstrip("\r\n").split("\t")
# We only need X and Y chromosomes
chromosome = encode_chromosome(row[header_index["Chr"]])
if chromosome not in {"X", "Y"}:
continue
# The position
position = row[header_index["Position"]]
try:
position = int(position)
except ValueError:
msg = "{}: impossible position {}".format(file_name,
position)
raise ProgramError(msg)
# The BAF
baf = row[header_index["B Allele Freq"]]
try:
baf = float(baf)
except ValueError:
msg = "{}: impossible baf {}".format(file_name, baf)
raise ProgramError(msg)
# The LRR
lrr = row[header_index["Log R Ratio"]]
try:
lrr = float(lrr)
except ValueError:
msg = "{}: impossible lrr {}".format(file_name, lrr)
raise ProgramError(msg)
# Saving the data
data.append((chromosome, position, lrr, baf))
# Creating the numpy array
data = np.array(data, dtype=[("chr", "a1"), ("pos", int),
("lrr", float), ("baf", float)])
# Creating the figure and axes
fig, axes = plt.subplots(2, 2, figsize=(20, 8))
plt.subplots_adjust(left=0.05, right=0.97, wspace=0.15, hspace=0.3)
fig.suptitle(sample, fontsize=16, weight="bold")
# Setting subplot properties
for ax in axes.flatten():
ax.xaxis.set_ticks_position("bottom")
ax.yaxis.set_ticks_position("left")
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["bottom"].set_position(("outward", 9))
ax.spines["left"].set_position(("outward", 9))
# Separating the axes
x_lrr_ax, x_baf_ax, y_lrr_ax, y_baf_ax = axes.flatten(order='F')
# Printing the X chromosome
curr_chr = data["chr"] == "X"
x_lrr_ax.plot(data["pos"][curr_chr]/1000000.0, data["lrr"][curr_chr],
"o", ms=1, mec="#0099CC",
mfc="#0099CC")[0].set_clip_on(False)
x_baf_ax.plot(data["pos"][curr_chr]/1000000.0, data["baf"][curr_chr],
"o", ms=1, mec="#669900",
mfc="#669900")[0].set_clip_on(False)
x_lrr_ax.axhline(y=0, color="#000000", ls="--", lw=1.2)
x_baf_ax.axhline(y=0.5, color="#000000", ls="--", lw=1.2)
x_lrr_ax.set_ylabel("LRR", weight="bold")
x_baf_ax.set_ylabel("BAF", weight="bold")
x_baf_ax.set_xlabel("Position (Mb)", weight="bold")
x_lrr_ax.set_title("Chromosome X", weight="bold")
# Printing the X chromosome
curr_chr = data["chr"] == "Y"
y_lrr_ax.plot(data["pos"][curr_chr]/1000000.0, data["lrr"][curr_chr],
"o", ms=1, mec="#0099CC",
mfc="#0099CC")[0].set_clip_on(False)
y_baf_ax.plot(data["pos"][curr_chr]/1000000.0, data["baf"][curr_chr],
"o", ms=1, mec="#669900",
mfc="#669900")[0].set_clip_on(False)
y_lrr_ax.axhline(y=0, color="#000000", ls="--", lw=1.2)
y_baf_ax.axhline(y=0.5, color="#000000", ls="--", lw=1.2)
y_lrr_ax.set_ylabel("LRR", weight="bold")
y_baf_ax.set_ylabel("BAF", weight="bold")
y_baf_ax.set_xlabel("Position (Mb)", weight="bold")
y_lrr_ax.set_title("Chromosome Y", weight="bold")
# Saving the figure
if options.format == "X11":
plt.show()
else:
plt.savefig(
"{}_{}_lrr_baf.{}".format(options.out, sample, options.format),
dpi=options.dpi,
)
# Closing the figure
plt.close(fig) | def function[plot_baf_lrr, parameter[file_names, options]]:
constant[Plot BAF and LRR for a list of files.
:param file_names: contains the name of the input file for each sample.
:param options: the options.
:type file_names: dict
:type options: argparse.Namespace
Plots the BAF (B Allele Frequency) and LRR (Log R Ratio) of each samples.
Only the sexual chromosome are shown.
]
import module[matplotlib] as alias[mpl]
if <ast.BoolOp object at 0x7da1b0966f50> begin[:]
call[name[mpl].use, parameter[constant[Agg]]]
import module[matplotlib.pyplot] as alias[plt]
if compare[name[options].format not_equal[!=] constant[X11]] begin[:]
call[name[plt].ioff, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0967430>, <ast.Name object at 0x7da1b09678e0>]]] in starred[call[name[file_names].iteritems, parameter[]]] begin[:]
variable[data] assign[=] list[[]]
variable[open_func] assign[=] name[open]
if call[name[file_name].endswith, parameter[constant[.gz]]] begin[:]
variable[open_func] assign[=] name[gzip].open
with call[name[open_func], parameter[name[file_name], constant[rb]]] begin[:]
variable[header_index] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da1b0966290>]]
for taget[name[col_name]] in starred[<ast.Set object at 0x7da1b0964220>] begin[:]
if compare[name[col_name] <ast.NotIn object at 0x7da2590d7190> name[header_index]] begin[:]
variable[msg] assign[=] call[constant[{}: no column named {}].format, parameter[name[file_name], name[col_name]]]
<ast.Raise object at 0x7da1b09670d0>
for taget[name[line]] in starred[name[input_file]] begin[:]
variable[row] assign[=] call[call[name[line].rstrip, parameter[constant[
]]].split, parameter[constant[ ]]]
variable[chromosome] assign[=] call[name[encode_chromosome], parameter[call[name[row]][call[name[header_index]][constant[Chr]]]]]
if compare[name[chromosome] <ast.NotIn object at 0x7da2590d7190> <ast.Set object at 0x7da1b0924070>] begin[:]
continue
variable[position] assign[=] call[name[row]][call[name[header_index]][constant[Position]]]
<ast.Try object at 0x7da1b09251e0>
variable[baf] assign[=] call[name[row]][call[name[header_index]][constant[B Allele Freq]]]
<ast.Try object at 0x7da1b0926ec0>
variable[lrr] assign[=] call[name[row]][call[name[header_index]][constant[Log R Ratio]]]
<ast.Try object at 0x7da1b09249d0>
call[name[data].append, parameter[tuple[[<ast.Name object at 0x7da1b0924e50>, <ast.Name object at 0x7da1b0926650>, <ast.Name object at 0x7da1b0926aa0>, <ast.Name object at 0x7da1b09253c0>]]]]
variable[data] assign[=] call[name[np].array, parameter[name[data]]]
<ast.Tuple object at 0x7da1b0924340> assign[=] call[name[plt].subplots, parameter[constant[2], constant[2]]]
call[name[plt].subplots_adjust, parameter[]]
call[name[fig].suptitle, parameter[name[sample]]]
for taget[name[ax]] in starred[call[name[axes].flatten, parameter[]]] begin[:]
call[name[ax].xaxis.set_ticks_position, parameter[constant[bottom]]]
call[name[ax].yaxis.set_ticks_position, parameter[constant[left]]]
call[call[name[ax].spines][constant[top]].set_visible, parameter[constant[False]]]
call[call[name[ax].spines][constant[right]].set_visible, parameter[constant[False]]]
call[call[name[ax].spines][constant[bottom]].set_position, parameter[tuple[[<ast.Constant object at 0x7da1b095eef0>, <ast.Constant object at 0x7da1b095f070>]]]]
call[call[name[ax].spines][constant[left]].set_position, parameter[tuple[[<ast.Constant object at 0x7da1b095d900>, <ast.Constant object at 0x7da1b095da20>]]]]
<ast.Tuple object at 0x7da1b095f7c0> assign[=] call[name[axes].flatten, parameter[]]
variable[curr_chr] assign[=] compare[call[name[data]][constant[chr]] equal[==] constant[X]]
call[call[call[name[x_lrr_ax].plot, parameter[binary_operation[call[call[name[data]][constant[pos]]][name[curr_chr]] / constant[1000000.0]], call[call[name[data]][constant[lrr]]][name[curr_chr]], constant[o]]]][constant[0]].set_clip_on, parameter[constant[False]]]
call[call[call[name[x_baf_ax].plot, parameter[binary_operation[call[call[name[data]][constant[pos]]][name[curr_chr]] / constant[1000000.0]], call[call[name[data]][constant[baf]]][name[curr_chr]], constant[o]]]][constant[0]].set_clip_on, parameter[constant[False]]]
call[name[x_lrr_ax].axhline, parameter[]]
call[name[x_baf_ax].axhline, parameter[]]
call[name[x_lrr_ax].set_ylabel, parameter[constant[LRR]]]
call[name[x_baf_ax].set_ylabel, parameter[constant[BAF]]]
call[name[x_baf_ax].set_xlabel, parameter[constant[Position (Mb)]]]
call[name[x_lrr_ax].set_title, parameter[constant[Chromosome X]]]
variable[curr_chr] assign[=] compare[call[name[data]][constant[chr]] equal[==] constant[Y]]
call[call[call[name[y_lrr_ax].plot, parameter[binary_operation[call[call[name[data]][constant[pos]]][name[curr_chr]] / constant[1000000.0]], call[call[name[data]][constant[lrr]]][name[curr_chr]], constant[o]]]][constant[0]].set_clip_on, parameter[constant[False]]]
call[call[call[name[y_baf_ax].plot, parameter[binary_operation[call[call[name[data]][constant[pos]]][name[curr_chr]] / constant[1000000.0]], call[call[name[data]][constant[baf]]][name[curr_chr]], constant[o]]]][constant[0]].set_clip_on, parameter[constant[False]]]
call[name[y_lrr_ax].axhline, parameter[]]
call[name[y_baf_ax].axhline, parameter[]]
call[name[y_lrr_ax].set_ylabel, parameter[constant[LRR]]]
call[name[y_baf_ax].set_ylabel, parameter[constant[BAF]]]
call[name[y_baf_ax].set_xlabel, parameter[constant[Position (Mb)]]]
call[name[y_lrr_ax].set_title, parameter[constant[Chromosome Y]]]
if compare[name[options].format equal[==] constant[X11]] begin[:]
call[name[plt].show, parameter[]]
call[name[plt].close, parameter[name[fig]]] | keyword[def] identifier[plot_baf_lrr] ( identifier[file_names] , identifier[options] ):
literal[string]
keyword[import] identifier[matplotlib] keyword[as] identifier[mpl]
keyword[if] identifier[options] . identifier[format] != literal[string] keyword[and] identifier[mpl] . identifier[get_backend] ()!= literal[string] :
identifier[mpl] . identifier[use] ( literal[string] )
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[if] identifier[options] . identifier[format] != literal[string] :
identifier[plt] . identifier[ioff] ()
keyword[for] identifier[sample] , identifier[file_name] keyword[in] identifier[file_names] . identifier[iteritems] ():
identifier[data] =[]
identifier[open_func] = identifier[open]
keyword[if] identifier[file_name] . identifier[endswith] ( literal[string] ):
identifier[open_func] = identifier[gzip] . identifier[open]
keyword[with] identifier[open_func] ( identifier[file_name] , literal[string] ) keyword[as] identifier[input_file] :
identifier[header_index] = identifier[dict] ([
( identifier[col_name] , identifier[i] )
keyword[for] identifier[i] , identifier[col_name] keyword[in]
identifier[enumerate] ( identifier[input_file] . identifier[readline] (). identifier[rstrip] ( literal[string] ). identifier[split] ( literal[string] ))
])
keyword[for] identifier[col_name] keyword[in] { literal[string] , literal[string] , literal[string] ,
literal[string] }:
keyword[if] identifier[col_name] keyword[not] keyword[in] identifier[header_index] :
identifier[msg] = literal[string] . identifier[format] ( identifier[file_name] , identifier[col_name] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[for] identifier[line] keyword[in] identifier[input_file] :
identifier[row] = identifier[line] . identifier[rstrip] ( literal[string] ). identifier[split] ( literal[string] )
identifier[chromosome] = identifier[encode_chromosome] ( identifier[row] [ identifier[header_index] [ literal[string] ]])
keyword[if] identifier[chromosome] keyword[not] keyword[in] { literal[string] , literal[string] }:
keyword[continue]
identifier[position] = identifier[row] [ identifier[header_index] [ literal[string] ]]
keyword[try] :
identifier[position] = identifier[int] ( identifier[position] )
keyword[except] identifier[ValueError] :
identifier[msg] = literal[string] . identifier[format] ( identifier[file_name] ,
identifier[position] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
identifier[baf] = identifier[row] [ identifier[header_index] [ literal[string] ]]
keyword[try] :
identifier[baf] = identifier[float] ( identifier[baf] )
keyword[except] identifier[ValueError] :
identifier[msg] = literal[string] . identifier[format] ( identifier[file_name] , identifier[baf] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
identifier[lrr] = identifier[row] [ identifier[header_index] [ literal[string] ]]
keyword[try] :
identifier[lrr] = identifier[float] ( identifier[lrr] )
keyword[except] identifier[ValueError] :
identifier[msg] = literal[string] . identifier[format] ( identifier[file_name] , identifier[lrr] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
identifier[data] . identifier[append] (( identifier[chromosome] , identifier[position] , identifier[lrr] , identifier[baf] ))
identifier[data] = identifier[np] . identifier[array] ( identifier[data] , identifier[dtype] =[( literal[string] , literal[string] ),( literal[string] , identifier[int] ),
( literal[string] , identifier[float] ),( literal[string] , identifier[float] )])
identifier[fig] , identifier[axes] = identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[figsize] =( literal[int] , literal[int] ))
identifier[plt] . identifier[subplots_adjust] ( identifier[left] = literal[int] , identifier[right] = literal[int] , identifier[wspace] = literal[int] , identifier[hspace] = literal[int] )
identifier[fig] . identifier[suptitle] ( identifier[sample] , identifier[fontsize] = literal[int] , identifier[weight] = literal[string] )
keyword[for] identifier[ax] keyword[in] identifier[axes] . identifier[flatten] ():
identifier[ax] . identifier[xaxis] . identifier[set_ticks_position] ( literal[string] )
identifier[ax] . identifier[yaxis] . identifier[set_ticks_position] ( literal[string] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_position] (( literal[string] , literal[int] ))
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_position] (( literal[string] , literal[int] ))
identifier[x_lrr_ax] , identifier[x_baf_ax] , identifier[y_lrr_ax] , identifier[y_baf_ax] = identifier[axes] . identifier[flatten] ( identifier[order] = literal[string] )
identifier[curr_chr] = identifier[data] [ literal[string] ]== literal[string]
identifier[x_lrr_ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[curr_chr] ]/ literal[int] , identifier[data] [ literal[string] ][ identifier[curr_chr] ],
literal[string] , identifier[ms] = literal[int] , identifier[mec] = literal[string] ,
identifier[mfc] = literal[string] )[ literal[int] ]. identifier[set_clip_on] ( keyword[False] )
identifier[x_baf_ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[curr_chr] ]/ literal[int] , identifier[data] [ literal[string] ][ identifier[curr_chr] ],
literal[string] , identifier[ms] = literal[int] , identifier[mec] = literal[string] ,
identifier[mfc] = literal[string] )[ literal[int] ]. identifier[set_clip_on] ( keyword[False] )
identifier[x_lrr_ax] . identifier[axhline] ( identifier[y] = literal[int] , identifier[color] = literal[string] , identifier[ls] = literal[string] , identifier[lw] = literal[int] )
identifier[x_baf_ax] . identifier[axhline] ( identifier[y] = literal[int] , identifier[color] = literal[string] , identifier[ls] = literal[string] , identifier[lw] = literal[int] )
identifier[x_lrr_ax] . identifier[set_ylabel] ( literal[string] , identifier[weight] = literal[string] )
identifier[x_baf_ax] . identifier[set_ylabel] ( literal[string] , identifier[weight] = literal[string] )
identifier[x_baf_ax] . identifier[set_xlabel] ( literal[string] , identifier[weight] = literal[string] )
identifier[x_lrr_ax] . identifier[set_title] ( literal[string] , identifier[weight] = literal[string] )
identifier[curr_chr] = identifier[data] [ literal[string] ]== literal[string]
identifier[y_lrr_ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[curr_chr] ]/ literal[int] , identifier[data] [ literal[string] ][ identifier[curr_chr] ],
literal[string] , identifier[ms] = literal[int] , identifier[mec] = literal[string] ,
identifier[mfc] = literal[string] )[ literal[int] ]. identifier[set_clip_on] ( keyword[False] )
identifier[y_baf_ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[curr_chr] ]/ literal[int] , identifier[data] [ literal[string] ][ identifier[curr_chr] ],
literal[string] , identifier[ms] = literal[int] , identifier[mec] = literal[string] ,
identifier[mfc] = literal[string] )[ literal[int] ]. identifier[set_clip_on] ( keyword[False] )
identifier[y_lrr_ax] . identifier[axhline] ( identifier[y] = literal[int] , identifier[color] = literal[string] , identifier[ls] = literal[string] , identifier[lw] = literal[int] )
identifier[y_baf_ax] . identifier[axhline] ( identifier[y] = literal[int] , identifier[color] = literal[string] , identifier[ls] = literal[string] , identifier[lw] = literal[int] )
identifier[y_lrr_ax] . identifier[set_ylabel] ( literal[string] , identifier[weight] = literal[string] )
identifier[y_baf_ax] . identifier[set_ylabel] ( literal[string] , identifier[weight] = literal[string] )
identifier[y_baf_ax] . identifier[set_xlabel] ( literal[string] , identifier[weight] = literal[string] )
identifier[y_lrr_ax] . identifier[set_title] ( literal[string] , identifier[weight] = literal[string] )
keyword[if] identifier[options] . identifier[format] == literal[string] :
identifier[plt] . identifier[show] ()
keyword[else] :
identifier[plt] . identifier[savefig] (
literal[string] . identifier[format] ( identifier[options] . identifier[out] , identifier[sample] , identifier[options] . identifier[format] ),
identifier[dpi] = identifier[options] . identifier[dpi] ,
)
identifier[plt] . identifier[close] ( identifier[fig] ) | def plot_baf_lrr(file_names, options):
"""Plot BAF and LRR for a list of files.
:param file_names: contains the name of the input file for each sample.
:param options: the options.
:type file_names: dict
:type options: argparse.Namespace
Plots the BAF (B Allele Frequency) and LRR (Log R Ratio) of each samples.
Only the sexual chromosome are shown.
"""
# importing important stuff
import matplotlib as mpl
if options.format != 'X11' and mpl.get_backend() != 'agg':
mpl.use('Agg') # depends on [control=['if'], data=[]]
import matplotlib.pyplot as plt
if options.format != 'X11':
plt.ioff() # depends on [control=['if'], data=[]]
# For each of the sample/files
for (sample, file_name) in file_names.iteritems():
data = []
# Reading the file
open_func = open
if file_name.endswith('.gz'):
open_func = gzip.open # depends on [control=['if'], data=[]]
with open_func(file_name, 'rb') as input_file:
header_index = dict([(col_name, i) for (i, col_name) in enumerate(input_file.readline().rstrip('\r\n').split('\t'))])
for col_name in {'Chr', 'Position', 'B Allele Freq', 'Log R Ratio'}:
if col_name not in header_index:
msg = '{}: no column named {}'.format(file_name, col_name)
raise ProgramError(msg) # depends on [control=['if'], data=['col_name']] # depends on [control=['for'], data=['col_name']]
# Reading the dat
for line in input_file:
row = line.rstrip('\r\n').split('\t')
# We only need X and Y chromosomes
chromosome = encode_chromosome(row[header_index['Chr']])
if chromosome not in {'X', 'Y'}:
continue # depends on [control=['if'], data=[]]
# The position
position = row[header_index['Position']]
try:
position = int(position) # depends on [control=['try'], data=[]]
except ValueError:
msg = '{}: impossible position {}'.format(file_name, position)
raise ProgramError(msg) # depends on [control=['except'], data=[]]
# The BAF
baf = row[header_index['B Allele Freq']]
try:
baf = float(baf) # depends on [control=['try'], data=[]]
except ValueError:
msg = '{}: impossible baf {}'.format(file_name, baf)
raise ProgramError(msg) # depends on [control=['except'], data=[]]
# The LRR
lrr = row[header_index['Log R Ratio']]
try:
lrr = float(lrr) # depends on [control=['try'], data=[]]
except ValueError:
msg = '{}: impossible lrr {}'.format(file_name, lrr)
raise ProgramError(msg) # depends on [control=['except'], data=[]]
# Saving the data
data.append((chromosome, position, lrr, baf)) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['input_file']]
# Creating the numpy array
data = np.array(data, dtype=[('chr', 'a1'), ('pos', int), ('lrr', float), ('baf', float)])
# Creating the figure and axes
(fig, axes) = plt.subplots(2, 2, figsize=(20, 8))
plt.subplots_adjust(left=0.05, right=0.97, wspace=0.15, hspace=0.3)
fig.suptitle(sample, fontsize=16, weight='bold')
# Setting subplot properties
for ax in axes.flatten():
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['bottom'].set_position(('outward', 9))
ax.spines['left'].set_position(('outward', 9)) # depends on [control=['for'], data=['ax']]
# Separating the axes
(x_lrr_ax, x_baf_ax, y_lrr_ax, y_baf_ax) = axes.flatten(order='F')
# Printing the X chromosome
curr_chr = data['chr'] == 'X'
x_lrr_ax.plot(data['pos'][curr_chr] / 1000000.0, data['lrr'][curr_chr], 'o', ms=1, mec='#0099CC', mfc='#0099CC')[0].set_clip_on(False)
x_baf_ax.plot(data['pos'][curr_chr] / 1000000.0, data['baf'][curr_chr], 'o', ms=1, mec='#669900', mfc='#669900')[0].set_clip_on(False)
x_lrr_ax.axhline(y=0, color='#000000', ls='--', lw=1.2)
x_baf_ax.axhline(y=0.5, color='#000000', ls='--', lw=1.2)
x_lrr_ax.set_ylabel('LRR', weight='bold')
x_baf_ax.set_ylabel('BAF', weight='bold')
x_baf_ax.set_xlabel('Position (Mb)', weight='bold')
x_lrr_ax.set_title('Chromosome X', weight='bold')
# Printing the X chromosome
curr_chr = data['chr'] == 'Y'
y_lrr_ax.plot(data['pos'][curr_chr] / 1000000.0, data['lrr'][curr_chr], 'o', ms=1, mec='#0099CC', mfc='#0099CC')[0].set_clip_on(False)
y_baf_ax.plot(data['pos'][curr_chr] / 1000000.0, data['baf'][curr_chr], 'o', ms=1, mec='#669900', mfc='#669900')[0].set_clip_on(False)
y_lrr_ax.axhline(y=0, color='#000000', ls='--', lw=1.2)
y_baf_ax.axhline(y=0.5, color='#000000', ls='--', lw=1.2)
y_lrr_ax.set_ylabel('LRR', weight='bold')
y_baf_ax.set_ylabel('BAF', weight='bold')
y_baf_ax.set_xlabel('Position (Mb)', weight='bold')
y_lrr_ax.set_title('Chromosome Y', weight='bold')
# Saving the figure
if options.format == 'X11':
plt.show() # depends on [control=['if'], data=[]]
else:
plt.savefig('{}_{}_lrr_baf.{}'.format(options.out, sample, options.format), dpi=options.dpi)
# Closing the figure
plt.close(fig) # depends on [control=['for'], data=[]] |
def omim(context, api_key, institute):
"""
Update the automate generated omim gene panel in the database.
"""
LOG.info("Running scout update omim")
adapter = context.obj['adapter']
api_key = api_key or context.obj.get('omim_api_key')
if not api_key:
LOG.warning("Please provide a omim api key to load the omim gene panel")
context.abort()
institute_obj = adapter.institute(institute)
if not institute_obj:
LOG.info("Institute %s could not be found in database", institute)
LOG.warning("Please specify an existing institute")
context.abort()
try:
adapter.load_omim_panel(api_key, institute=institute)
except Exception as err:
LOG.error(err)
context.abort() | def function[omim, parameter[context, api_key, institute]]:
constant[
Update the automate generated omim gene panel in the database.
]
call[name[LOG].info, parameter[constant[Running scout update omim]]]
variable[adapter] assign[=] call[name[context].obj][constant[adapter]]
variable[api_key] assign[=] <ast.BoolOp object at 0x7da2041db610>
if <ast.UnaryOp object at 0x7da2041dba60> begin[:]
call[name[LOG].warning, parameter[constant[Please provide a omim api key to load the omim gene panel]]]
call[name[context].abort, parameter[]]
variable[institute_obj] assign[=] call[name[adapter].institute, parameter[name[institute]]]
if <ast.UnaryOp object at 0x7da2041da650> begin[:]
call[name[LOG].info, parameter[constant[Institute %s could not be found in database], name[institute]]]
call[name[LOG].warning, parameter[constant[Please specify an existing institute]]]
call[name[context].abort, parameter[]]
<ast.Try object at 0x7da2041db6a0> | keyword[def] identifier[omim] ( identifier[context] , identifier[api_key] , identifier[institute] ):
literal[string]
identifier[LOG] . identifier[info] ( literal[string] )
identifier[adapter] = identifier[context] . identifier[obj] [ literal[string] ]
identifier[api_key] = identifier[api_key] keyword[or] identifier[context] . identifier[obj] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[api_key] :
identifier[LOG] . identifier[warning] ( literal[string] )
identifier[context] . identifier[abort] ()
identifier[institute_obj] = identifier[adapter] . identifier[institute] ( identifier[institute] )
keyword[if] keyword[not] identifier[institute_obj] :
identifier[LOG] . identifier[info] ( literal[string] , identifier[institute] )
identifier[LOG] . identifier[warning] ( literal[string] )
identifier[context] . identifier[abort] ()
keyword[try] :
identifier[adapter] . identifier[load_omim_panel] ( identifier[api_key] , identifier[institute] = identifier[institute] )
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[LOG] . identifier[error] ( identifier[err] )
identifier[context] . identifier[abort] () | def omim(context, api_key, institute):
"""
Update the automate generated omim gene panel in the database.
"""
LOG.info('Running scout update omim')
adapter = context.obj['adapter']
api_key = api_key or context.obj.get('omim_api_key')
if not api_key:
LOG.warning('Please provide a omim api key to load the omim gene panel')
context.abort() # depends on [control=['if'], data=[]]
institute_obj = adapter.institute(institute)
if not institute_obj:
LOG.info('Institute %s could not be found in database', institute)
LOG.warning('Please specify an existing institute')
context.abort() # depends on [control=['if'], data=[]]
try:
adapter.load_omim_panel(api_key, institute=institute) # depends on [control=['try'], data=[]]
except Exception as err:
LOG.error(err)
context.abort() # depends on [control=['except'], data=['err']] |
def _select_better_fit(self, matching_venvs):
"""Receive a list of matching venvs, and decide which one is the best fit."""
# keep the venvs in a separate array, to pick up the winner, and the (sorted, to compare
# each dependency with its equivalent) in other structure to later compare
venvs = []
to_compare = []
for matching, venv in matching_venvs:
to_compare.append(sorted(matching, key=lambda req: getattr(req, 'key', '')))
venvs.append(venv)
# compare each n-tuple of dependencies to see which one is bigger, and add score to the
# position of the winner
scores = [0] * len(venvs)
for dependencies in zip(*to_compare):
if not isinstance(dependencies[0], Distribution):
# only distribution URLs can be compared
continue
winner = dependencies.index(max(dependencies))
scores[winner] = scores[winner] + 1
# get the rightmost winner (in case of ties, to select the latest venv)
winner_pos = None
winner_score = -1
for i, score in enumerate(scores):
if score >= winner_score:
winner_score = score
winner_pos = i
return venvs[winner_pos] | def function[_select_better_fit, parameter[self, matching_venvs]]:
constant[Receive a list of matching venvs, and decide which one is the best fit.]
variable[venvs] assign[=] list[[]]
variable[to_compare] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0d12800>, <ast.Name object at 0x7da1b0d11060>]]] in starred[name[matching_venvs]] begin[:]
call[name[to_compare].append, parameter[call[name[sorted], parameter[name[matching]]]]]
call[name[venvs].append, parameter[name[venv]]]
variable[scores] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0d114e0>]] * call[name[len], parameter[name[venvs]]]]
for taget[name[dependencies]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da1b0d119c0>]]] begin[:]
if <ast.UnaryOp object at 0x7da1b0d12cb0> begin[:]
continue
variable[winner] assign[=] call[name[dependencies].index, parameter[call[name[max], parameter[name[dependencies]]]]]
call[name[scores]][name[winner]] assign[=] binary_operation[call[name[scores]][name[winner]] + constant[1]]
variable[winner_pos] assign[=] constant[None]
variable[winner_score] assign[=] <ast.UnaryOp object at 0x7da1b0d131f0>
for taget[tuple[[<ast.Name object at 0x7da1b0e4f340>, <ast.Name object at 0x7da1b0e4ef20>]]] in starred[call[name[enumerate], parameter[name[scores]]]] begin[:]
if compare[name[score] greater_or_equal[>=] name[winner_score]] begin[:]
variable[winner_score] assign[=] name[score]
variable[winner_pos] assign[=] name[i]
return[call[name[venvs]][name[winner_pos]]] | keyword[def] identifier[_select_better_fit] ( identifier[self] , identifier[matching_venvs] ):
literal[string]
identifier[venvs] =[]
identifier[to_compare] =[]
keyword[for] identifier[matching] , identifier[venv] keyword[in] identifier[matching_venvs] :
identifier[to_compare] . identifier[append] ( identifier[sorted] ( identifier[matching] , identifier[key] = keyword[lambda] identifier[req] : identifier[getattr] ( identifier[req] , literal[string] , literal[string] )))
identifier[venvs] . identifier[append] ( identifier[venv] )
identifier[scores] =[ literal[int] ]* identifier[len] ( identifier[venvs] )
keyword[for] identifier[dependencies] keyword[in] identifier[zip] (* identifier[to_compare] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[dependencies] [ literal[int] ], identifier[Distribution] ):
keyword[continue]
identifier[winner] = identifier[dependencies] . identifier[index] ( identifier[max] ( identifier[dependencies] ))
identifier[scores] [ identifier[winner] ]= identifier[scores] [ identifier[winner] ]+ literal[int]
identifier[winner_pos] = keyword[None]
identifier[winner_score] =- literal[int]
keyword[for] identifier[i] , identifier[score] keyword[in] identifier[enumerate] ( identifier[scores] ):
keyword[if] identifier[score] >= identifier[winner_score] :
identifier[winner_score] = identifier[score]
identifier[winner_pos] = identifier[i]
keyword[return] identifier[venvs] [ identifier[winner_pos] ] | def _select_better_fit(self, matching_venvs):
"""Receive a list of matching venvs, and decide which one is the best fit."""
# keep the venvs in a separate array, to pick up the winner, and the (sorted, to compare
# each dependency with its equivalent) in other structure to later compare
venvs = []
to_compare = []
for (matching, venv) in matching_venvs:
to_compare.append(sorted(matching, key=lambda req: getattr(req, 'key', '')))
venvs.append(venv) # depends on [control=['for'], data=[]]
# compare each n-tuple of dependencies to see which one is bigger, and add score to the
# position of the winner
scores = [0] * len(venvs)
for dependencies in zip(*to_compare):
if not isinstance(dependencies[0], Distribution):
# only distribution URLs can be compared
continue # depends on [control=['if'], data=[]]
winner = dependencies.index(max(dependencies))
scores[winner] = scores[winner] + 1 # depends on [control=['for'], data=['dependencies']]
# get the rightmost winner (in case of ties, to select the latest venv)
winner_pos = None
winner_score = -1
for (i, score) in enumerate(scores):
if score >= winner_score:
winner_score = score
winner_pos = i # depends on [control=['if'], data=['score', 'winner_score']] # depends on [control=['for'], data=[]]
return venvs[winner_pos] |
def filter(self, mask):
"""
Create a SiteCollection with only a subset of sites.
:param mask:
Numpy array of boolean values of the same length as the site
collection. ``True`` values should indicate that site with that
index should be included into the filtered collection.
:returns:
A new :class:`SiteCollection` instance, unless all the
values in ``mask`` are ``True``, in which case this site collection
is returned, or if all the values in ``mask`` are ``False``,
in which case method returns ``None``. New collection has data
of only those sites that were marked for inclusion in the mask.
"""
assert len(mask) == len(self), (len(mask), len(self))
if mask.all():
# all sites satisfy the filter, return
# this collection unchanged
return self
if not mask.any():
# no sites pass the filter, return None
return None
# extract indices of Trues from the mask
indices, = mask.nonzero()
return self.filtered(indices) | def function[filter, parameter[self, mask]]:
constant[
Create a SiteCollection with only a subset of sites.
:param mask:
Numpy array of boolean values of the same length as the site
collection. ``True`` values should indicate that site with that
index should be included into the filtered collection.
:returns:
A new :class:`SiteCollection` instance, unless all the
values in ``mask`` are ``True``, in which case this site collection
is returned, or if all the values in ``mask`` are ``False``,
in which case method returns ``None``. New collection has data
of only those sites that were marked for inclusion in the mask.
]
assert[compare[call[name[len], parameter[name[mask]]] equal[==] call[name[len], parameter[name[self]]]]]
if call[name[mask].all, parameter[]] begin[:]
return[name[self]]
if <ast.UnaryOp object at 0x7da20c794070> begin[:]
return[constant[None]]
<ast.Tuple object at 0x7da18dc045b0> assign[=] call[name[mask].nonzero, parameter[]]
return[call[name[self].filtered, parameter[name[indices]]]] | keyword[def] identifier[filter] ( identifier[self] , identifier[mask] ):
literal[string]
keyword[assert] identifier[len] ( identifier[mask] )== identifier[len] ( identifier[self] ),( identifier[len] ( identifier[mask] ), identifier[len] ( identifier[self] ))
keyword[if] identifier[mask] . identifier[all] ():
keyword[return] identifier[self]
keyword[if] keyword[not] identifier[mask] . identifier[any] ():
keyword[return] keyword[None]
identifier[indices] ,= identifier[mask] . identifier[nonzero] ()
keyword[return] identifier[self] . identifier[filtered] ( identifier[indices] ) | def filter(self, mask):
"""
Create a SiteCollection with only a subset of sites.
:param mask:
Numpy array of boolean values of the same length as the site
collection. ``True`` values should indicate that site with that
index should be included into the filtered collection.
:returns:
A new :class:`SiteCollection` instance, unless all the
values in ``mask`` are ``True``, in which case this site collection
is returned, or if all the values in ``mask`` are ``False``,
in which case method returns ``None``. New collection has data
of only those sites that were marked for inclusion in the mask.
"""
assert len(mask) == len(self), (len(mask), len(self))
if mask.all():
# all sites satisfy the filter, return
# this collection unchanged
return self # depends on [control=['if'], data=[]]
if not mask.any():
# no sites pass the filter, return None
return None # depends on [control=['if'], data=[]]
# extract indices of Trues from the mask
(indices,) = mask.nonzero()
return self.filtered(indices) |
def get_objective_bank(self, objective_bank_id=None):
"""Gets the ObjectiveBank specified by its Id.
In plenary mode, the exact Id is found or a NotFound results.
Otherwise, the returned ObjectiveBank may have a different Id
than requested, such as the case where a duplicate Id was
assigned to a ObjectiveBank and retained for compatility.
arg: objectiveBankId (osid.id.Id): Id of the ObjectiveBank
return: (osid.learning.ObjectiveBank) - the objective bank
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method is must be implemented.
"""
if objective_bank_id is None:
raise NullArgument()
url_path = construct_url('objective_banks',
bank_id=objective_bank_id)
return objects.ObjectiveBank(self._get_request(url_path)) | def function[get_objective_bank, parameter[self, objective_bank_id]]:
constant[Gets the ObjectiveBank specified by its Id.
In plenary mode, the exact Id is found or a NotFound results.
Otherwise, the returned ObjectiveBank may have a different Id
than requested, such as the case where a duplicate Id was
assigned to a ObjectiveBank and retained for compatility.
arg: objectiveBankId (osid.id.Id): Id of the ObjectiveBank
return: (osid.learning.ObjectiveBank) - the objective bank
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method is must be implemented.
]
if compare[name[objective_bank_id] is constant[None]] begin[:]
<ast.Raise object at 0x7da2041dad40>
variable[url_path] assign[=] call[name[construct_url], parameter[constant[objective_banks]]]
return[call[name[objects].ObjectiveBank, parameter[call[name[self]._get_request, parameter[name[url_path]]]]]] | keyword[def] identifier[get_objective_bank] ( identifier[self] , identifier[objective_bank_id] = keyword[None] ):
literal[string]
keyword[if] identifier[objective_bank_id] keyword[is] keyword[None] :
keyword[raise] identifier[NullArgument] ()
identifier[url_path] = identifier[construct_url] ( literal[string] ,
identifier[bank_id] = identifier[objective_bank_id] )
keyword[return] identifier[objects] . identifier[ObjectiveBank] ( identifier[self] . identifier[_get_request] ( identifier[url_path] )) | def get_objective_bank(self, objective_bank_id=None):
"""Gets the ObjectiveBank specified by its Id.
In plenary mode, the exact Id is found or a NotFound results.
Otherwise, the returned ObjectiveBank may have a different Id
than requested, such as the case where a duplicate Id was
assigned to a ObjectiveBank and retained for compatility.
arg: objectiveBankId (osid.id.Id): Id of the ObjectiveBank
return: (osid.learning.ObjectiveBank) - the objective bank
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method is must be implemented.
"""
if objective_bank_id is None:
raise NullArgument() # depends on [control=['if'], data=[]]
url_path = construct_url('objective_banks', bank_id=objective_bank_id)
return objects.ObjectiveBank(self._get_request(url_path)) |
def get_catalogs(self):
"""Gets the catalog list resulting from the search.
return: (osid.cataloging.CatalogList) - the catalogs list
raise: IllegalState - list has already been retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.retrieved:
raise errors.IllegalState('List has already been retrieved.')
self.retrieved = True
return objects.CatalogList(self._results, runtime=self._runtime) | def function[get_catalogs, parameter[self]]:
constant[Gets the catalog list resulting from the search.
return: (osid.cataloging.CatalogList) - the catalogs list
raise: IllegalState - list has already been retrieved
*compliance: mandatory -- This method must be implemented.*
]
if name[self].retrieved begin[:]
<ast.Raise object at 0x7da2054a6170>
name[self].retrieved assign[=] constant[True]
return[call[name[objects].CatalogList, parameter[name[self]._results]]] | keyword[def] identifier[get_catalogs] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[retrieved] :
keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] )
identifier[self] . identifier[retrieved] = keyword[True]
keyword[return] identifier[objects] . identifier[CatalogList] ( identifier[self] . identifier[_results] , identifier[runtime] = identifier[self] . identifier[_runtime] ) | def get_catalogs(self):
"""Gets the catalog list resulting from the search.
return: (osid.cataloging.CatalogList) - the catalogs list
raise: IllegalState - list has already been retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.retrieved:
raise errors.IllegalState('List has already been retrieved.') # depends on [control=['if'], data=[]]
self.retrieved = True
return objects.CatalogList(self._results, runtime=self._runtime) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.