code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_ids_by_program(self, program):
"""
Return a set containing the process IDs from rows whose
program string equals the given program.
"""
return set(row.process_id for row in self if row.program == program) | def function[get_ids_by_program, parameter[self, program]]:
constant[
Return a set containing the process IDs from rows whose
program string equals the given program.
]
return[call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b0b73700>]]] | keyword[def] identifier[get_ids_by_program] ( identifier[self] , identifier[program] ):
literal[string]
keyword[return] identifier[set] ( identifier[row] . identifier[process_id] keyword[for] identifier[row] keyword[in] identifier[self] keyword[if] identifier[row] . identifier[program] == identifier[program] ) | def get_ids_by_program(self, program):
"""
Return a set containing the process IDs from rows whose
program string equals the given program.
"""
return set((row.process_id for row in self if row.program == program)) |
def get_name(self):
"""
Tries to get WF name from 'process' or 'collobration' or 'pariticipant'
Returns:
str. WF name.
"""
ns = {'ns': '{%s}' % BPMN_MODEL_NS}
for path in ('.//{ns}process',
'.//{ns}collaboration',
'.//{ns}collaboration/{ns}participant/'):
tag = self.doc_xpath(path.format(**ns))
if tag:
name = tag[0].get('name')
if name:
return name
return self.get_id() | def function[get_name, parameter[self]]:
constant[
Tries to get WF name from 'process' or 'collobration' or 'pariticipant'
Returns:
str. WF name.
]
variable[ns] assign[=] dictionary[[<ast.Constant object at 0x7da207f9b2e0>], [<ast.BinOp object at 0x7da207f98880>]]
for taget[name[path]] in starred[tuple[[<ast.Constant object at 0x7da207f99030>, <ast.Constant object at 0x7da207f99c00>, <ast.Constant object at 0x7da207f99c30>]]] begin[:]
variable[tag] assign[=] call[name[self].doc_xpath, parameter[call[name[path].format, parameter[]]]]
if name[tag] begin[:]
variable[name] assign[=] call[call[name[tag]][constant[0]].get, parameter[constant[name]]]
if name[name] begin[:]
return[name[name]]
return[call[name[self].get_id, parameter[]]] | keyword[def] identifier[get_name] ( identifier[self] ):
literal[string]
identifier[ns] ={ literal[string] : literal[string] % identifier[BPMN_MODEL_NS] }
keyword[for] identifier[path] keyword[in] ( literal[string] ,
literal[string] ,
literal[string] ):
identifier[tag] = identifier[self] . identifier[doc_xpath] ( identifier[path] . identifier[format] (** identifier[ns] ))
keyword[if] identifier[tag] :
identifier[name] = identifier[tag] [ literal[int] ]. identifier[get] ( literal[string] )
keyword[if] identifier[name] :
keyword[return] identifier[name]
keyword[return] identifier[self] . identifier[get_id] () | def get_name(self):
"""
Tries to get WF name from 'process' or 'collobration' or 'pariticipant'
Returns:
str. WF name.
"""
ns = {'ns': '{%s}' % BPMN_MODEL_NS}
for path in ('.//{ns}process', './/{ns}collaboration', './/{ns}collaboration/{ns}participant/'):
tag = self.doc_xpath(path.format(**ns))
if tag:
name = tag[0].get('name')
if name:
return name # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']]
return self.get_id() |
def set_color_scheme(self, foreground_color, background_color):
"""Set color scheme of the console (foreground and background)."""
self.ansi_handler.set_color_scheme(foreground_color, background_color)
background_color = QColor(background_color)
foreground_color = QColor(foreground_color)
self.set_palette(background=background_color,
foreground=foreground_color)
self.set_pythonshell_font() | def function[set_color_scheme, parameter[self, foreground_color, background_color]]:
constant[Set color scheme of the console (foreground and background).]
call[name[self].ansi_handler.set_color_scheme, parameter[name[foreground_color], name[background_color]]]
variable[background_color] assign[=] call[name[QColor], parameter[name[background_color]]]
variable[foreground_color] assign[=] call[name[QColor], parameter[name[foreground_color]]]
call[name[self].set_palette, parameter[]]
call[name[self].set_pythonshell_font, parameter[]] | keyword[def] identifier[set_color_scheme] ( identifier[self] , identifier[foreground_color] , identifier[background_color] ):
literal[string]
identifier[self] . identifier[ansi_handler] . identifier[set_color_scheme] ( identifier[foreground_color] , identifier[background_color] )
identifier[background_color] = identifier[QColor] ( identifier[background_color] )
identifier[foreground_color] = identifier[QColor] ( identifier[foreground_color] )
identifier[self] . identifier[set_palette] ( identifier[background] = identifier[background_color] ,
identifier[foreground] = identifier[foreground_color] )
identifier[self] . identifier[set_pythonshell_font] () | def set_color_scheme(self, foreground_color, background_color):
"""Set color scheme of the console (foreground and background)."""
self.ansi_handler.set_color_scheme(foreground_color, background_color)
background_color = QColor(background_color)
foreground_color = QColor(foreground_color)
self.set_palette(background=background_color, foreground=foreground_color)
self.set_pythonshell_font() |
def getIncludedInBuilds(self):
"""Get all :class:`rtcclient.models.IncludedInBuild` objects that
have already included this workitem
WARNING: If one of the IncludedInBuilds is removed or cannot be
retrieved/found correctly, then 404 error will be raised.
:return: a :class:`list` contains all the
:class:`rtcclient.models.IncludedInBuild` objects
:rtype: list
"""
build_tag = ("rtc_cm:com.ibm.team.build.linktype.includedWorkItems."
"com.ibm.team.build.common.link.includedInBuilds")
return self.rtc_obj._get_paged_resources("IncludedInBuild",
workitem_id=self.identifier,
customized_attr=build_tag,
page_size="5") | def function[getIncludedInBuilds, parameter[self]]:
constant[Get all :class:`rtcclient.models.IncludedInBuild` objects that
have already included this workitem
WARNING: If one of the IncludedInBuilds is removed or cannot be
retrieved/found correctly, then 404 error will be raised.
:return: a :class:`list` contains all the
:class:`rtcclient.models.IncludedInBuild` objects
:rtype: list
]
variable[build_tag] assign[=] constant[rtc_cm:com.ibm.team.build.linktype.includedWorkItems.com.ibm.team.build.common.link.includedInBuilds]
return[call[name[self].rtc_obj._get_paged_resources, parameter[constant[IncludedInBuild]]]] | keyword[def] identifier[getIncludedInBuilds] ( identifier[self] ):
literal[string]
identifier[build_tag] =( literal[string]
literal[string] )
keyword[return] identifier[self] . identifier[rtc_obj] . identifier[_get_paged_resources] ( literal[string] ,
identifier[workitem_id] = identifier[self] . identifier[identifier] ,
identifier[customized_attr] = identifier[build_tag] ,
identifier[page_size] = literal[string] ) | def getIncludedInBuilds(self):
"""Get all :class:`rtcclient.models.IncludedInBuild` objects that
have already included this workitem
WARNING: If one of the IncludedInBuilds is removed or cannot be
retrieved/found correctly, then 404 error will be raised.
:return: a :class:`list` contains all the
:class:`rtcclient.models.IncludedInBuild` objects
:rtype: list
"""
build_tag = 'rtc_cm:com.ibm.team.build.linktype.includedWorkItems.com.ibm.team.build.common.link.includedInBuilds'
return self.rtc_obj._get_paged_resources('IncludedInBuild', workitem_id=self.identifier, customized_attr=build_tag, page_size='5') |
def is_valid_op(self, symmop):
"""
Check if a particular symmetry operation is a valid symmetry operation
for a molecule, i.e., the operation maps all atoms to another
equivalent atom.
Args:
symmop (SymmOp): Symmetry operation to test.
Returns:
(bool): Whether SymmOp is valid for Molecule.
"""
coords = self.centered_mol.cart_coords
for site in self.centered_mol:
coord = symmop.operate(site.coords)
ind = find_in_coord_list(coords, coord, self.tol)
if not (len(ind) == 1
and self.centered_mol[ind[0]].species
== site.species):
return False
return True | def function[is_valid_op, parameter[self, symmop]]:
constant[
Check if a particular symmetry operation is a valid symmetry operation
for a molecule, i.e., the operation maps all atoms to another
equivalent atom.
Args:
symmop (SymmOp): Symmetry operation to test.
Returns:
(bool): Whether SymmOp is valid for Molecule.
]
variable[coords] assign[=] name[self].centered_mol.cart_coords
for taget[name[site]] in starred[name[self].centered_mol] begin[:]
variable[coord] assign[=] call[name[symmop].operate, parameter[name[site].coords]]
variable[ind] assign[=] call[name[find_in_coord_list], parameter[name[coords], name[coord], name[self].tol]]
if <ast.UnaryOp object at 0x7da1b1c5a380> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[is_valid_op] ( identifier[self] , identifier[symmop] ):
literal[string]
identifier[coords] = identifier[self] . identifier[centered_mol] . identifier[cart_coords]
keyword[for] identifier[site] keyword[in] identifier[self] . identifier[centered_mol] :
identifier[coord] = identifier[symmop] . identifier[operate] ( identifier[site] . identifier[coords] )
identifier[ind] = identifier[find_in_coord_list] ( identifier[coords] , identifier[coord] , identifier[self] . identifier[tol] )
keyword[if] keyword[not] ( identifier[len] ( identifier[ind] )== literal[int]
keyword[and] identifier[self] . identifier[centered_mol] [ identifier[ind] [ literal[int] ]]. identifier[species]
== identifier[site] . identifier[species] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_valid_op(self, symmop):
"""
Check if a particular symmetry operation is a valid symmetry operation
for a molecule, i.e., the operation maps all atoms to another
equivalent atom.
Args:
symmop (SymmOp): Symmetry operation to test.
Returns:
(bool): Whether SymmOp is valid for Molecule.
"""
coords = self.centered_mol.cart_coords
for site in self.centered_mol:
coord = symmop.operate(site.coords)
ind = find_in_coord_list(coords, coord, self.tol)
if not (len(ind) == 1 and self.centered_mol[ind[0]].species == site.species):
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['site']]
return True |
def generateExecutable(self, outpath='.', signed=False):
"""
Generates the executable for this builder in the output path.
:param outpath | <str>
"""
if not (self.runtime() or self.specfile()):
return True
if not self.distributionPath():
return True
if os.path.exists(self.distributionPath()):
shutil.rmtree(self.distributionPath())
if os.path.isfile(self.sourcePath()):
basepath = os.path.normpath(os.path.dirname(self.sourcePath()))
else:
basepath = os.path.normpath(self.sourcePath())
# store the plugin table of contents
self.generatePlugins(basepath)
# generate the specfile if necessary
specfile = self.specfile()
# generate the spec file options
opts = {
'name': self.name(),
'exname': self.executableName(),
'product': self.productName(),
'runtime': self.runtime(),
'srcpath': self.sourcePath(),
'buildpath': self.buildPath(),
'hookpaths': ',\n'.join(wrap_str(self.hookPaths())),
'hiddenimports': ',\n'.join(wrap_str(self.hiddenImports())),
'distpath': self.distributionPath(),
'platform': sys.platform,
'excludes': ',\n'.join(wrap_str(self.executableExcludes()))
}
if not specfile:
datasets = []
for typ, data in self.executableData():
if typ == 'tree':
args = {
'path': data[0],
'prefix': data[1],
'excludes': ','.join(wrap_str(data[2]))
}
datasets.append(templ.SPECTREE.format(**args))
else:
args = {}
args.update(data)
args.setdefault('type', typ)
datasets.append(templ.SPECDATA.format(**args))
opts['datasets'] = '\n'.join(datasets)
opts.update(self._executableOptions)
if self.executableCliName():
opts['cliname'] = self.executableCliName()
opts['collect'] = templ.SPECFILE_CLI.format(**opts)
else:
opts['collect'] = templ.SPECFILE_COLLECT.format(**opts)
if opts['onefile']:
data = templ.SPECFILE_ONEFILE.format(**opts)
else:
data = templ.SPECFILE.format(**opts)
# generate the spec file for building
specfile = os.path.join(self.buildPath(), self.name() + '.spec')
f = open(specfile, 'w')
f.write(data)
f.close()
cmd = os.path.expandvars(self.executableOption('cmd'))
success = cmdexec(cmd.format(spec=specfile)) == 0
if signed:
binfile = os.path.join(opts['distpath'],
opts['product'],
opts['exname'] + '.exe')
self.sign(binfile)
return success | def function[generateExecutable, parameter[self, outpath, signed]]:
constant[
Generates the executable for this builder in the output path.
:param outpath | <str>
]
if <ast.UnaryOp object at 0x7da1b289bd90> begin[:]
return[constant[True]]
if <ast.UnaryOp object at 0x7da1b289a980> begin[:]
return[constant[True]]
if call[name[os].path.exists, parameter[call[name[self].distributionPath, parameter[]]]] begin[:]
call[name[shutil].rmtree, parameter[call[name[self].distributionPath, parameter[]]]]
if call[name[os].path.isfile, parameter[call[name[self].sourcePath, parameter[]]]] begin[:]
variable[basepath] assign[=] call[name[os].path.normpath, parameter[call[name[os].path.dirname, parameter[call[name[self].sourcePath, parameter[]]]]]]
call[name[self].generatePlugins, parameter[name[basepath]]]
variable[specfile] assign[=] call[name[self].specfile, parameter[]]
variable[opts] assign[=] dictionary[[<ast.Constant object at 0x7da1b2899cf0>, <ast.Constant object at 0x7da1b2899cc0>, <ast.Constant object at 0x7da1b2899c90>, <ast.Constant object at 0x7da1b2899c60>, <ast.Constant object at 0x7da1b2899c30>, <ast.Constant object at 0x7da1b2899c00>, <ast.Constant object at 0x7da1b2899bd0>, <ast.Constant object at 0x7da1b2899ba0>, <ast.Constant object at 0x7da1b2899b70>, <ast.Constant object at 0x7da1b2899b40>, <ast.Constant object at 0x7da1b2899b10>], [<ast.Call object at 0x7da1b2899ae0>, <ast.Call object at 0x7da1b2899a50>, <ast.Call object at 0x7da1b28999c0>, <ast.Call object at 0x7da1b2899930>, <ast.Call object at 0x7da1b28998a0>, <ast.Call object at 0x7da1b2899810>, <ast.Call object at 0x7da1b2898040>, <ast.Call object at 0x7da1b28981c0>, <ast.Call object at 0x7da1b2898340>, <ast.Attribute object at 0x7da1b28983d0>, <ast.Call object at 0x7da1b2898430>]]
if <ast.UnaryOp object at 0x7da1b28985e0> begin[:]
variable[datasets] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b2898730>, <ast.Name object at 0x7da1b2898760>]]] in starred[call[name[self].executableData, parameter[]]] begin[:]
if compare[name[typ] equal[==] constant[tree]] begin[:]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b2898970>, <ast.Constant object at 0x7da1b28989a0>, <ast.Constant object at 0x7da1b28989d0>], [<ast.Subscript object at 0x7da1b2898a00>, <ast.Subscript object at 0x7da1b2898a90>, <ast.Call object at 0x7da1b2898b20>]]
call[name[datasets].append, parameter[call[name[templ].SPECTREE.format, parameter[]]]]
call[name[opts]][constant[datasets]] assign[=] call[constant[
].join, parameter[name[datasets]]]
call[name[opts].update, parameter[name[self]._executableOptions]]
if call[name[self].executableCliName, parameter[]] begin[:]
call[name[opts]][constant[cliname]] assign[=] call[name[self].executableCliName, parameter[]]
call[name[opts]][constant[collect]] assign[=] call[name[templ].SPECFILE_CLI.format, parameter[]]
if call[name[opts]][constant[onefile]] begin[:]
variable[data] assign[=] call[name[templ].SPECFILE_ONEFILE.format, parameter[]]
variable[specfile] assign[=] call[name[os].path.join, parameter[call[name[self].buildPath, parameter[]], binary_operation[call[name[self].name, parameter[]] + constant[.spec]]]]
variable[f] assign[=] call[name[open], parameter[name[specfile], constant[w]]]
call[name[f].write, parameter[name[data]]]
call[name[f].close, parameter[]]
variable[cmd] assign[=] call[name[os].path.expandvars, parameter[call[name[self].executableOption, parameter[constant[cmd]]]]]
variable[success] assign[=] compare[call[name[cmdexec], parameter[call[name[cmd].format, parameter[]]]] equal[==] constant[0]]
if name[signed] begin[:]
variable[binfile] assign[=] call[name[os].path.join, parameter[call[name[opts]][constant[distpath]], call[name[opts]][constant[product]], binary_operation[call[name[opts]][constant[exname]] + constant[.exe]]]]
call[name[self].sign, parameter[name[binfile]]]
return[name[success]] | keyword[def] identifier[generateExecutable] ( identifier[self] , identifier[outpath] = literal[string] , identifier[signed] = keyword[False] ):
literal[string]
keyword[if] keyword[not] ( identifier[self] . identifier[runtime] () keyword[or] identifier[self] . identifier[specfile] ()):
keyword[return] keyword[True]
keyword[if] keyword[not] identifier[self] . identifier[distributionPath] ():
keyword[return] keyword[True]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[distributionPath] ()):
identifier[shutil] . identifier[rmtree] ( identifier[self] . identifier[distributionPath] ())
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[sourcePath] ()):
identifier[basepath] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[self] . identifier[sourcePath] ()))
keyword[else] :
identifier[basepath] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[self] . identifier[sourcePath] ())
identifier[self] . identifier[generatePlugins] ( identifier[basepath] )
identifier[specfile] = identifier[self] . identifier[specfile] ()
identifier[opts] ={
literal[string] : identifier[self] . identifier[name] (),
literal[string] : identifier[self] . identifier[executableName] (),
literal[string] : identifier[self] . identifier[productName] (),
literal[string] : identifier[self] . identifier[runtime] (),
literal[string] : identifier[self] . identifier[sourcePath] (),
literal[string] : identifier[self] . identifier[buildPath] (),
literal[string] : literal[string] . identifier[join] ( identifier[wrap_str] ( identifier[self] . identifier[hookPaths] ())),
literal[string] : literal[string] . identifier[join] ( identifier[wrap_str] ( identifier[self] . identifier[hiddenImports] ())),
literal[string] : identifier[self] . identifier[distributionPath] (),
literal[string] : identifier[sys] . identifier[platform] ,
literal[string] : literal[string] . identifier[join] ( identifier[wrap_str] ( identifier[self] . identifier[executableExcludes] ()))
}
keyword[if] keyword[not] identifier[specfile] :
identifier[datasets] =[]
keyword[for] identifier[typ] , identifier[data] keyword[in] identifier[self] . identifier[executableData] ():
keyword[if] identifier[typ] == literal[string] :
identifier[args] ={
literal[string] : identifier[data] [ literal[int] ],
literal[string] : identifier[data] [ literal[int] ],
literal[string] : literal[string] . identifier[join] ( identifier[wrap_str] ( identifier[data] [ literal[int] ]))
}
identifier[datasets] . identifier[append] ( identifier[templ] . identifier[SPECTREE] . identifier[format] (** identifier[args] ))
keyword[else] :
identifier[args] ={}
identifier[args] . identifier[update] ( identifier[data] )
identifier[args] . identifier[setdefault] ( literal[string] , identifier[typ] )
identifier[datasets] . identifier[append] ( identifier[templ] . identifier[SPECDATA] . identifier[format] (** identifier[args] ))
identifier[opts] [ literal[string] ]= literal[string] . identifier[join] ( identifier[datasets] )
identifier[opts] . identifier[update] ( identifier[self] . identifier[_executableOptions] )
keyword[if] identifier[self] . identifier[executableCliName] ():
identifier[opts] [ literal[string] ]= identifier[self] . identifier[executableCliName] ()
identifier[opts] [ literal[string] ]= identifier[templ] . identifier[SPECFILE_CLI] . identifier[format] (** identifier[opts] )
keyword[else] :
identifier[opts] [ literal[string] ]= identifier[templ] . identifier[SPECFILE_COLLECT] . identifier[format] (** identifier[opts] )
keyword[if] identifier[opts] [ literal[string] ]:
identifier[data] = identifier[templ] . identifier[SPECFILE_ONEFILE] . identifier[format] (** identifier[opts] )
keyword[else] :
identifier[data] = identifier[templ] . identifier[SPECFILE] . identifier[format] (** identifier[opts] )
identifier[specfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[buildPath] (), identifier[self] . identifier[name] ()+ literal[string] )
identifier[f] = identifier[open] ( identifier[specfile] , literal[string] )
identifier[f] . identifier[write] ( identifier[data] )
identifier[f] . identifier[close] ()
identifier[cmd] = identifier[os] . identifier[path] . identifier[expandvars] ( identifier[self] . identifier[executableOption] ( literal[string] ))
identifier[success] = identifier[cmdexec] ( identifier[cmd] . identifier[format] ( identifier[spec] = identifier[specfile] ))== literal[int]
keyword[if] identifier[signed] :
identifier[binfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[opts] [ literal[string] ],
identifier[opts] [ literal[string] ],
identifier[opts] [ literal[string] ]+ literal[string] )
identifier[self] . identifier[sign] ( identifier[binfile] )
keyword[return] identifier[success] | def generateExecutable(self, outpath='.', signed=False):
"""
Generates the executable for this builder in the output path.
:param outpath | <str>
"""
if not (self.runtime() or self.specfile()):
return True # depends on [control=['if'], data=[]]
if not self.distributionPath():
return True # depends on [control=['if'], data=[]]
if os.path.exists(self.distributionPath()):
shutil.rmtree(self.distributionPath()) # depends on [control=['if'], data=[]]
if os.path.isfile(self.sourcePath()):
basepath = os.path.normpath(os.path.dirname(self.sourcePath())) # depends on [control=['if'], data=[]]
else:
basepath = os.path.normpath(self.sourcePath())
# store the plugin table of contents
self.generatePlugins(basepath)
# generate the specfile if necessary
specfile = self.specfile()
# generate the spec file options
opts = {'name': self.name(), 'exname': self.executableName(), 'product': self.productName(), 'runtime': self.runtime(), 'srcpath': self.sourcePath(), 'buildpath': self.buildPath(), 'hookpaths': ',\n'.join(wrap_str(self.hookPaths())), 'hiddenimports': ',\n'.join(wrap_str(self.hiddenImports())), 'distpath': self.distributionPath(), 'platform': sys.platform, 'excludes': ',\n'.join(wrap_str(self.executableExcludes()))}
if not specfile:
datasets = []
for (typ, data) in self.executableData():
if typ == 'tree':
args = {'path': data[0], 'prefix': data[1], 'excludes': ','.join(wrap_str(data[2]))}
datasets.append(templ.SPECTREE.format(**args)) # depends on [control=['if'], data=[]]
else:
args = {}
args.update(data)
args.setdefault('type', typ)
datasets.append(templ.SPECDATA.format(**args)) # depends on [control=['for'], data=[]]
opts['datasets'] = '\n'.join(datasets)
opts.update(self._executableOptions)
if self.executableCliName():
opts['cliname'] = self.executableCliName()
opts['collect'] = templ.SPECFILE_CLI.format(**opts) # depends on [control=['if'], data=[]]
else:
opts['collect'] = templ.SPECFILE_COLLECT.format(**opts)
if opts['onefile']:
data = templ.SPECFILE_ONEFILE.format(**opts) # depends on [control=['if'], data=[]]
else:
data = templ.SPECFILE.format(**opts)
# generate the spec file for building
specfile = os.path.join(self.buildPath(), self.name() + '.spec')
f = open(specfile, 'w')
f.write(data)
f.close() # depends on [control=['if'], data=[]]
cmd = os.path.expandvars(self.executableOption('cmd'))
success = cmdexec(cmd.format(spec=specfile)) == 0
if signed:
binfile = os.path.join(opts['distpath'], opts['product'], opts['exname'] + '.exe')
self.sign(binfile) # depends on [control=['if'], data=[]]
return success |
def account_info(self):
""" Certain attributes have a user's account information
associated with it such as a gifted or crafted item.
A dict with two keys: 'persona' and 'id64'.
None if the attribute has no account information attached to it. """
account_info = self._attribute.get("account_info")
if account_info:
return {"persona": account_info.get("personaname", ""),
"id64": account_info["steamid"]}
else:
return None | def function[account_info, parameter[self]]:
constant[ Certain attributes have a user's account information
associated with it such as a gifted or crafted item.
A dict with two keys: 'persona' and 'id64'.
None if the attribute has no account information attached to it. ]
variable[account_info] assign[=] call[name[self]._attribute.get, parameter[constant[account_info]]]
if name[account_info] begin[:]
return[dictionary[[<ast.Constant object at 0x7da1b0f1bc40>, <ast.Constant object at 0x7da1b0f18ac0>], [<ast.Call object at 0x7da1b0f18a30>, <ast.Subscript object at 0x7da1b0f195a0>]]] | keyword[def] identifier[account_info] ( identifier[self] ):
literal[string]
identifier[account_info] = identifier[self] . identifier[_attribute] . identifier[get] ( literal[string] )
keyword[if] identifier[account_info] :
keyword[return] { literal[string] : identifier[account_info] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[account_info] [ literal[string] ]}
keyword[else] :
keyword[return] keyword[None] | def account_info(self):
""" Certain attributes have a user's account information
associated with it such as a gifted or crafted item.
A dict with two keys: 'persona' and 'id64'.
None if the attribute has no account information attached to it. """
account_info = self._attribute.get('account_info')
if account_info:
return {'persona': account_info.get('personaname', ''), 'id64': account_info['steamid']} # depends on [control=['if'], data=[]]
else:
return None |
def p_x_commalist(self,t):
"""commalist : commalist ',' expression
| expression
"""
if len(t) == 2: t[0] = CommaX([t[1]])
elif len(t) == 4: t[0] = CommaX(t[1].children+[t[3]])
else: raise NotImplementedError('unk_len',len(t)) # pragma: no cover | def function[p_x_commalist, parameter[self, t]]:
constant[commalist : commalist ',' expression
| expression
]
if compare[call[name[len], parameter[name[t]]] equal[==] constant[2]] begin[:]
call[name[t]][constant[0]] assign[=] call[name[CommaX], parameter[list[[<ast.Subscript object at 0x7da1b1403f70>]]]] | keyword[def] identifier[p_x_commalist] ( identifier[self] , identifier[t] ):
literal[string]
keyword[if] identifier[len] ( identifier[t] )== literal[int] : identifier[t] [ literal[int] ]= identifier[CommaX] ([ identifier[t] [ literal[int] ]])
keyword[elif] identifier[len] ( identifier[t] )== literal[int] : identifier[t] [ literal[int] ]= identifier[CommaX] ( identifier[t] [ literal[int] ]. identifier[children] +[ identifier[t] [ literal[int] ]])
keyword[else] : keyword[raise] identifier[NotImplementedError] ( literal[string] , identifier[len] ( identifier[t] )) | def p_x_commalist(self, t):
"""commalist : commalist ',' expression
| expression
"""
if len(t) == 2:
t[0] = CommaX([t[1]]) # depends on [control=['if'], data=[]]
elif len(t) == 4:
t[0] = CommaX(t[1].children + [t[3]]) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('unk_len', len(t)) # pragma: no cover |
def attach_socket(self, container, params=None, ws=False):
"""
Like ``attach``, but returns the underlying socket-like object for the
HTTP request.
Args:
container (str): The container to attach to.
params (dict): Dictionary of request parameters (e.g. ``stdout``,
``stderr``, ``stream``).
For ``detachKeys``, ~/.docker/config.json is used by default.
ws (bool): Use websockets instead of raw HTTP.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
if params is None:
params = {
'stdout': 1,
'stderr': 1,
'stream': 1
}
if 'detachKeys' not in params \
and 'detachKeys' in self._general_configs:
params['detachKeys'] = self._general_configs['detachKeys']
if ws:
return self._attach_websocket(container, params)
headers = {
'Connection': 'Upgrade',
'Upgrade': 'tcp'
}
u = self._url("/containers/{0}/attach", container)
return self._get_raw_response_socket(
self.post(
u, None, params=self._attach_params(params), stream=True,
headers=headers
)
) | def function[attach_socket, parameter[self, container, params, ws]]:
constant[
Like ``attach``, but returns the underlying socket-like object for the
HTTP request.
Args:
container (str): The container to attach to.
params (dict): Dictionary of request parameters (e.g. ``stdout``,
``stderr``, ``stream``).
For ``detachKeys``, ~/.docker/config.json is used by default.
ws (bool): Use websockets instead of raw HTTP.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
]
if compare[name[params] is constant[None]] begin[:]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18dc98220>, <ast.Constant object at 0x7da1b1c78ee0>, <ast.Constant object at 0x7da1b1c79510>], [<ast.Constant object at 0x7da1b1c795d0>, <ast.Constant object at 0x7da1b1c795a0>, <ast.Constant object at 0x7da1b1c79600>]]
if <ast.BoolOp object at 0x7da1b1c79660> begin[:]
call[name[params]][constant[detachKeys]] assign[=] call[name[self]._general_configs][constant[detachKeys]]
if name[ws] begin[:]
return[call[name[self]._attach_websocket, parameter[name[container], name[params]]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c7b700>, <ast.Constant object at 0x7da1b1c7b6a0>], [<ast.Constant object at 0x7da1b1c79960>, <ast.Constant object at 0x7da1b1c79a50>]]
variable[u] assign[=] call[name[self]._url, parameter[constant[/containers/{0}/attach], name[container]]]
return[call[name[self]._get_raw_response_socket, parameter[call[name[self].post, parameter[name[u], constant[None]]]]]] | keyword[def] identifier[attach_socket] ( identifier[self] , identifier[container] , identifier[params] = keyword[None] , identifier[ws] = keyword[False] ):
literal[string]
keyword[if] identifier[params] keyword[is] keyword[None] :
identifier[params] ={
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int]
}
keyword[if] literal[string] keyword[not] keyword[in] identifier[params] keyword[and] literal[string] keyword[in] identifier[self] . identifier[_general_configs] :
identifier[params] [ literal[string] ]= identifier[self] . identifier[_general_configs] [ literal[string] ]
keyword[if] identifier[ws] :
keyword[return] identifier[self] . identifier[_attach_websocket] ( identifier[container] , identifier[params] )
identifier[headers] ={
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[u] = identifier[self] . identifier[_url] ( literal[string] , identifier[container] )
keyword[return] identifier[self] . identifier[_get_raw_response_socket] (
identifier[self] . identifier[post] (
identifier[u] , keyword[None] , identifier[params] = identifier[self] . identifier[_attach_params] ( identifier[params] ), identifier[stream] = keyword[True] ,
identifier[headers] = identifier[headers]
)
) | def attach_socket(self, container, params=None, ws=False):
"""
Like ``attach``, but returns the underlying socket-like object for the
HTTP request.
Args:
container (str): The container to attach to.
params (dict): Dictionary of request parameters (e.g. ``stdout``,
``stderr``, ``stream``).
For ``detachKeys``, ~/.docker/config.json is used by default.
ws (bool): Use websockets instead of raw HTTP.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
if params is None:
params = {'stdout': 1, 'stderr': 1, 'stream': 1} # depends on [control=['if'], data=['params']]
if 'detachKeys' not in params and 'detachKeys' in self._general_configs:
params['detachKeys'] = self._general_configs['detachKeys'] # depends on [control=['if'], data=[]]
if ws:
return self._attach_websocket(container, params) # depends on [control=['if'], data=[]]
headers = {'Connection': 'Upgrade', 'Upgrade': 'tcp'}
u = self._url('/containers/{0}/attach', container)
return self._get_raw_response_socket(self.post(u, None, params=self._attach_params(params), stream=True, headers=headers)) |
def open(self, mode=MODE_READ):
"""
Opens this repo in the specified mode.
TODO: figure out the correct semantics of this and document
the intended future behaviour as well as the current
transitional behaviour.
"""
if mode not in [MODE_READ, MODE_WRITE]:
error = "Open mode must be '{}' or '{}'".format(
MODE_READ, MODE_WRITE)
raise ValueError(error)
self._openMode = mode
if mode == MODE_READ:
self.assertExists()
if mode == MODE_READ:
# This is part of the transitional behaviour where
# we load the whole DB into memory to get access to
# the data model.
self.load() | def function[open, parameter[self, mode]]:
constant[
Opens this repo in the specified mode.
TODO: figure out the correct semantics of this and document
the intended future behaviour as well as the current
transitional behaviour.
]
if compare[name[mode] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Name object at 0x7da20c76d9f0>, <ast.Name object at 0x7da20c76ce80>]]] begin[:]
variable[error] assign[=] call[constant[Open mode must be '{}' or '{}'].format, parameter[name[MODE_READ], name[MODE_WRITE]]]
<ast.Raise object at 0x7da20c76c520>
name[self]._openMode assign[=] name[mode]
if compare[name[mode] equal[==] name[MODE_READ]] begin[:]
call[name[self].assertExists, parameter[]]
if compare[name[mode] equal[==] name[MODE_READ]] begin[:]
call[name[self].load, parameter[]] | keyword[def] identifier[open] ( identifier[self] , identifier[mode] = identifier[MODE_READ] ):
literal[string]
keyword[if] identifier[mode] keyword[not] keyword[in] [ identifier[MODE_READ] , identifier[MODE_WRITE] ]:
identifier[error] = literal[string] . identifier[format] (
identifier[MODE_READ] , identifier[MODE_WRITE] )
keyword[raise] identifier[ValueError] ( identifier[error] )
identifier[self] . identifier[_openMode] = identifier[mode]
keyword[if] identifier[mode] == identifier[MODE_READ] :
identifier[self] . identifier[assertExists] ()
keyword[if] identifier[mode] == identifier[MODE_READ] :
identifier[self] . identifier[load] () | def open(self, mode=MODE_READ):
"""
Opens this repo in the specified mode.
TODO: figure out the correct semantics of this and document
the intended future behaviour as well as the current
transitional behaviour.
"""
if mode not in [MODE_READ, MODE_WRITE]:
error = "Open mode must be '{}' or '{}'".format(MODE_READ, MODE_WRITE)
raise ValueError(error) # depends on [control=['if'], data=[]]
self._openMode = mode
if mode == MODE_READ:
self.assertExists() # depends on [control=['if'], data=[]]
if mode == MODE_READ:
# This is part of the transitional behaviour where
# we load the whole DB into memory to get access to
# the data model.
self.load() # depends on [control=['if'], data=[]] |
def is_name_valid(fqn):
"""
Is a fully-qualified name acceptable?
Return True if so
Return False if not
>>> is_name_valid('abcd')
False
>>> is_name_valid('abcd.')
False
>>> is_name_valid('.abcd')
False
>>> is_name_valid('Abcd.abcd')
False
>>> is_name_valid('abcd.abc.d')
False
>>> is_name_valid('abcd.abc+d')
False
>>> is_name_valid('a.b.c')
False
>>> is_name_valid(True)
False
>>> is_name_valid(123)
False
>>> is_name_valid(None)
False
>>> is_name_valid('')
False
>>> is_name_valid('abcdabcdabcdabcdabcdabcdabcdabcda.bcd')
True
>>> is_name_valid('abcdabcdabcdabcdabcdabcdabcdabcdab.bcd')
False
>>> is_name_valid('abcdabcdabcdabcdabcdabcdabcdabcdabc.d')
True
>>> is_name_valid('a+b.c')
False
>>> is_name_valid('a_b.c')
True
"""
if not isinstance(fqn, (str,unicode)):
return False
if fqn.count( "." ) != 1:
return False
name, namespace_id = fqn.split(".")
if len(name) == 0 or len(namespace_id) == 0:
return False
if not is_b40( name ) or "+" in name or "." in name:
return False
if not is_namespace_valid( namespace_id ):
return False
if len(fqn) > LENGTHS['blockchain_id_name']:
# too long
return False
return True | def function[is_name_valid, parameter[fqn]]:
constant[
Is a fully-qualified name acceptable?
Return True if so
Return False if not
>>> is_name_valid('abcd')
False
>>> is_name_valid('abcd.')
False
>>> is_name_valid('.abcd')
False
>>> is_name_valid('Abcd.abcd')
False
>>> is_name_valid('abcd.abc.d')
False
>>> is_name_valid('abcd.abc+d')
False
>>> is_name_valid('a.b.c')
False
>>> is_name_valid(True)
False
>>> is_name_valid(123)
False
>>> is_name_valid(None)
False
>>> is_name_valid('')
False
>>> is_name_valid('abcdabcdabcdabcdabcdabcdabcdabcda.bcd')
True
>>> is_name_valid('abcdabcdabcdabcdabcdabcdabcdabcdab.bcd')
False
>>> is_name_valid('abcdabcdabcdabcdabcdabcdabcdabcdabc.d')
True
>>> is_name_valid('a+b.c')
False
>>> is_name_valid('a_b.c')
True
]
if <ast.UnaryOp object at 0x7da1b17d7ac0> begin[:]
return[constant[False]]
if compare[call[name[fqn].count, parameter[constant[.]]] not_equal[!=] constant[1]] begin[:]
return[constant[False]]
<ast.Tuple object at 0x7da1b17d56c0> assign[=] call[name[fqn].split, parameter[constant[.]]]
if <ast.BoolOp object at 0x7da1b17d7070> begin[:]
return[constant[False]]
if <ast.BoolOp object at 0x7da1b17d44c0> begin[:]
return[constant[False]]
if <ast.UnaryOp object at 0x7da1b17d7dc0> begin[:]
return[constant[False]]
if compare[call[name[len], parameter[name[fqn]]] greater[>] call[name[LENGTHS]][constant[blockchain_id_name]]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[is_name_valid] ( identifier[fqn] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[fqn] ,( identifier[str] , identifier[unicode] )):
keyword[return] keyword[False]
keyword[if] identifier[fqn] . identifier[count] ( literal[string] )!= literal[int] :
keyword[return] keyword[False]
identifier[name] , identifier[namespace_id] = identifier[fqn] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[name] )== literal[int] keyword[or] identifier[len] ( identifier[namespace_id] )== literal[int] :
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[is_b40] ( identifier[name] ) keyword[or] literal[string] keyword[in] identifier[name] keyword[or] literal[string] keyword[in] identifier[name] :
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[is_namespace_valid] ( identifier[namespace_id] ):
keyword[return] keyword[False]
keyword[if] identifier[len] ( identifier[fqn] )> identifier[LENGTHS] [ literal[string] ]:
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_name_valid(fqn):
"""
Is a fully-qualified name acceptable?
Return True if so
Return False if not
>>> is_name_valid('abcd')
False
>>> is_name_valid('abcd.')
False
>>> is_name_valid('.abcd')
False
>>> is_name_valid('Abcd.abcd')
False
>>> is_name_valid('abcd.abc.d')
False
>>> is_name_valid('abcd.abc+d')
False
>>> is_name_valid('a.b.c')
False
>>> is_name_valid(True)
False
>>> is_name_valid(123)
False
>>> is_name_valid(None)
False
>>> is_name_valid('')
False
>>> is_name_valid('abcdabcdabcdabcdabcdabcdabcdabcda.bcd')
True
>>> is_name_valid('abcdabcdabcdabcdabcdabcdabcdabcdab.bcd')
False
>>> is_name_valid('abcdabcdabcdabcdabcdabcdabcdabcdabc.d')
True
>>> is_name_valid('a+b.c')
False
>>> is_name_valid('a_b.c')
True
"""
if not isinstance(fqn, (str, unicode)):
return False # depends on [control=['if'], data=[]]
if fqn.count('.') != 1:
return False # depends on [control=['if'], data=[]]
(name, namespace_id) = fqn.split('.')
if len(name) == 0 or len(namespace_id) == 0:
return False # depends on [control=['if'], data=[]]
if not is_b40(name) or '+' in name or '.' in name:
return False # depends on [control=['if'], data=[]]
if not is_namespace_valid(namespace_id):
return False # depends on [control=['if'], data=[]]
if len(fqn) > LENGTHS['blockchain_id_name']:
# too long
return False # depends on [control=['if'], data=[]]
return True |
def merge_entity(self, entity, if_match='*'):
'''
Adds a merge entity operation to the batch. See
:func:`~azure.storage.table.tableservice.TableService.merge_entity` for more
information on merges.
The operation will not be executed until the batch is committed.
:param entity:
The entity to merge. Could be a dict or an entity object.
Must contain a PartitionKey and a RowKey.
:type entity: dict or :class:`~azure.storage.table.models.Entity`
:param str if_match:
The client may specify the ETag for the entity on the
request in order to compare to the ETag maintained by the service
for the purpose of optimistic concurrency. The merge operation
will be performed only if the ETag sent by the client matches the
value maintained by the server, indicating that the entity has
not been modified since it was retrieved by the client. To force
an unconditional merge, set If-Match to the wildcard character (*).
'''
request = _merge_entity(entity, if_match, self._require_encryption,
self._key_encryption_key)
self._add_to_batch(entity['PartitionKey'], entity['RowKey'], request) | def function[merge_entity, parameter[self, entity, if_match]]:
constant[
Adds a merge entity operation to the batch. See
:func:`~azure.storage.table.tableservice.TableService.merge_entity` for more
information on merges.
The operation will not be executed until the batch is committed.
:param entity:
The entity to merge. Could be a dict or an entity object.
Must contain a PartitionKey and a RowKey.
:type entity: dict or :class:`~azure.storage.table.models.Entity`
:param str if_match:
The client may specify the ETag for the entity on the
request in order to compare to the ETag maintained by the service
for the purpose of optimistic concurrency. The merge operation
will be performed only if the ETag sent by the client matches the
value maintained by the server, indicating that the entity has
not been modified since it was retrieved by the client. To force
an unconditional merge, set If-Match to the wildcard character (*).
]
variable[request] assign[=] call[name[_merge_entity], parameter[name[entity], name[if_match], name[self]._require_encryption, name[self]._key_encryption_key]]
call[name[self]._add_to_batch, parameter[call[name[entity]][constant[PartitionKey]], call[name[entity]][constant[RowKey]], name[request]]] | keyword[def] identifier[merge_entity] ( identifier[self] , identifier[entity] , identifier[if_match] = literal[string] ):
literal[string]
identifier[request] = identifier[_merge_entity] ( identifier[entity] , identifier[if_match] , identifier[self] . identifier[_require_encryption] ,
identifier[self] . identifier[_key_encryption_key] )
identifier[self] . identifier[_add_to_batch] ( identifier[entity] [ literal[string] ], identifier[entity] [ literal[string] ], identifier[request] ) | def merge_entity(self, entity, if_match='*'):
"""
Adds a merge entity operation to the batch. See
:func:`~azure.storage.table.tableservice.TableService.merge_entity` for more
information on merges.
The operation will not be executed until the batch is committed.
:param entity:
The entity to merge. Could be a dict or an entity object.
Must contain a PartitionKey and a RowKey.
:type entity: dict or :class:`~azure.storage.table.models.Entity`
:param str if_match:
The client may specify the ETag for the entity on the
request in order to compare to the ETag maintained by the service
for the purpose of optimistic concurrency. The merge operation
will be performed only if the ETag sent by the client matches the
value maintained by the server, indicating that the entity has
not been modified since it was retrieved by the client. To force
an unconditional merge, set If-Match to the wildcard character (*).
"""
request = _merge_entity(entity, if_match, self._require_encryption, self._key_encryption_key)
self._add_to_batch(entity['PartitionKey'], entity['RowKey'], request) |
def installed(name,
pkgs=None,
pip_bin=None,
requirements=None,
bin_env=None,
use_wheel=False,
no_use_wheel=False,
log=None,
proxy=None,
timeout=None,
repo=None,
editable=None,
find_links=None,
index_url=None,
extra_index_url=None,
no_index=False,
mirrors=None,
build=None,
target=None,
download=None,
download_cache=None,
source=None,
upgrade=False,
force_reinstall=False,
ignore_installed=False,
exists_action=None,
no_deps=False,
no_install=False,
no_download=False,
install_options=None,
global_options=None,
user=None,
cwd=None,
pre_releases=False,
cert=None,
allow_all_external=False,
allow_external=None,
allow_unverified=None,
process_dependency_links=False,
env_vars=None,
use_vt=False,
trusted_host=None,
no_cache_dir=False,
cache_dir=None,
no_binary=None,
extra_args=None,
**kwargs):
'''
Make sure the package is installed
name
The name of the python package to install. You can also specify version
numbers here using the standard operators ``==, >=, <=``. If
``requirements`` is given, this parameter will be ignored.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django >= 1.6, <= 1.7
- require:
- pkg: python-pip
This will install the latest Django version greater than 1.6 but less
than 1.7.
requirements
Path to a pip requirements file. If the path begins with salt://
the file will be transferred from the master file server.
user
The user under which to run pip
use_wheel : False
Prefer wheel archives (requires pip>=1.4)
no_use_wheel : False
Force to not use wheel archives (requires pip>=1.4)
no_binary
Force to not use binary packages (requires pip >= 7.0.0)
Accepts either :all: to disable all binary packages, :none: to empty the set,
or a list of one or more packages
Example:
.. code-block:: yaml
django:
pip.installed:
- no_binary: ':all:'
flask:
pip.installed:
- no_binary:
- itsdangerous
- click
log
Log file where a complete (maximum verbosity) record will be kept
proxy
Specify a proxy in the form
user:passwd@proxy.server:port. Note that the
user:password@ is optional and required only if you
are behind an authenticated proxy. If you provide
user@proxy.server:port then you will be prompted for a
password.
timeout
Set the socket timeout (default 15 seconds)
editable
install something editable (i.e.
git+https://github.com/worldcompany/djangoembed.git#egg=djangoembed)
find_links
URL to look for packages at
index_url
Base URL of Python Package Index
extra_index_url
Extra URLs of package indexes to use in addition to ``index_url``
no_index
Ignore package index
mirrors
Specific mirror URL(s) to query (automatically adds --use-mirrors)
build
Unpack packages into ``build`` dir
target
Install packages into ``target`` dir
download
Download packages into ``download`` instead of installing them
download_cache
Cache downloaded packages in ``download_cache`` dir
source
Check out ``editable`` packages into ``source`` dir
upgrade
Upgrade all packages to the newest available version
force_reinstall
When upgrading, reinstall all packages even if they are already
up-to-date.
ignore_installed
Ignore the installed packages (reinstalling instead)
exists_action
Default action when a path already exists: (s)witch, (i)gnore, (w)ipe,
(b)ackup
no_deps
Ignore package dependencies
no_install
Download and unpack all packages, but don't actually install them
no_cache_dir:
Disable the cache.
cwd
Current working directory to run pip from
pre_releases
Include pre-releases in the available versions
cert
Provide a path to an alternate CA bundle
allow_all_external
Allow the installation of all externally hosted files
allow_external
Allow the installation of externally hosted files (comma separated list)
allow_unverified
Allow the installation of insecure and unverifiable files (comma separated list)
process_dependency_links
Enable the processing of dependency links
bin_env : None
Absolute path to a virtual environment directory or absolute path to
a pip executable. The example below assumes a virtual environment
has been created at ``/foo/.virtualenvs/bar``.
env_vars
Add or modify environment variables. Useful for tweaking build steps,
such as specifying INCLUDE or LIBRARY paths in Makefiles, build scripts or
compiler calls. This must be in the form of a dictionary or a mapping.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django_app
- env_vars:
CUSTOM_PATH: /opt/django_app
VERBOSE: True
use_vt
Use VT terminal emulation (see output while installing)
trusted_host
Mark this host as trusted, even though it does not have valid or any
HTTPS.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django >= 1.6, <= 1.7
- bin_env: /foo/.virtualenvs/bar
- require:
- pkg: python-pip
Or
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django >= 1.6, <= 1.7
- bin_env: /foo/.virtualenvs/bar/bin/pip
- require:
- pkg: python-pip
.. admonition:: Attention
The following arguments are deprecated, do not use.
pip_bin : None
Deprecated, use ``bin_env``
.. versionchanged:: 0.17.0
``use_wheel`` option added.
install_options
Extra arguments to be supplied to the setup.py install command.
If you are using an option with a directory path, be sure to use
absolute path.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django
- install_options:
- --prefix=/blah
- require:
- pkg: python-pip
global_options
Extra global options to be supplied to the setup.py call before the
install command.
.. versionadded:: 2014.1.3
.. admonition:: Attention
As of Salt 0.17.0 the pip state **needs** an importable pip module.
This usually means having the system's pip package installed or running
Salt from an active `virtualenv`_.
The reason for this requirement is because ``pip`` already does a
pretty good job parsing its own requirements. It makes no sense for
Salt to do ``pip`` requirements parsing and validation before passing
them to the ``pip`` library. It's functionality duplication and it's
more error prone.
.. admonition:: Attention
Please set ``reload_modules: True`` to have the salt minion
import this module after installation.
Example:
.. code-block:: yaml
pyopenssl:
pip.installed:
- name: pyOpenSSL
- reload_modules: True
- exists_action: i
extra_args
pip keyword and positional arguments not yet implemented in salt
.. code-block:: yaml
pandas:
pip.installed:
- name: pandas
- extra_args:
- --latest-pip-kwarg: param
- --latest-pip-arg
.. warning::
If unsupported options are passed here that are not supported in a
minion's version of pip, a `No such option error` will be thrown.
.. _`virtualenv`: http://www.virtualenv.org/en/latest/
'''
if pip_bin and not bin_env:
bin_env = pip_bin
# If pkgs is present, ignore name
if pkgs:
if not isinstance(pkgs, list):
return {'name': name,
'result': False,
'changes': {},
'comment': 'pkgs argument must be formatted as a list'}
else:
pkgs = [name]
# Assumption: If `pkg` is not an `string`, it's a `collections.OrderedDict`
# prepro = lambda pkg: pkg if type(pkg) == str else \
# ' '.join((pkg.items()[0][0], pkg.items()[0][1].replace(',', ';')))
# pkgs = ','.join([prepro(pkg) for pkg in pkgs])
prepro = lambda pkg: pkg if isinstance(pkg, six.string_types) else \
' '.join((six.iteritems(pkg)[0][0], six.iteritems(pkg)[0][1]))
pkgs = [prepro(pkg) for pkg in pkgs]
ret = {'name': ';'.join(pkgs), 'result': None,
'comment': '', 'changes': {}}
try:
cur_version = __salt__['pip.version'](bin_env)
except (CommandNotFoundError, CommandExecutionError) as err:
ret['result'] = None
ret['comment'] = 'Error installing \'{0}\': {1}'.format(name, err)
return ret
# Check that the pip binary supports the 'use_wheel' option
if use_wheel:
min_version = '1.4'
max_version = '9.0.3'
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
if too_low or too_high:
ret['result'] = False
ret['comment'] = ('The \'use_wheel\' option is only supported in '
'pip between {0} and {1}. The version of pip detected '
'was {2}.').format(min_version, max_version, cur_version)
return ret
# Check that the pip binary supports the 'no_use_wheel' option
if no_use_wheel:
min_version = '1.4'
max_version = '9.0.3'
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
if too_low or too_high:
ret['result'] = False
ret['comment'] = ('The \'no_use_wheel\' option is only supported in '
'pip between {0} and {1}. The version of pip detected '
'was {2}.').format(min_version, max_version, cur_version)
return ret
# Check that the pip binary supports the 'no_binary' option
if no_binary:
min_version = '7.0.0'
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
if too_low:
ret['result'] = False
ret['comment'] = ('The \'no_binary\' option is only supported in '
'pip {0} and newer. The version of pip detected '
'was {1}.').format(min_version, cur_version)
return ret
# Get the packages parsed name and version from the pip library.
# This only is done when there is no requirements or editable parameter.
pkgs_details = []
if pkgs and not (requirements or editable):
comments = []
for pkg in iter(pkgs):
out = _check_pkg_version_format(pkg)
if out['result'] is False:
ret['result'] = False
comments.append(out['comment'])
elif out['result'] is True:
pkgs_details.append((out['prefix'], pkg, out['version_spec']))
if ret['result'] is False:
ret['comment'] = '\n'.join(comments)
return ret
# If a requirements file is specified, only install the contents of the
# requirements file. Similarly, using the --editable flag with pip should
# also ignore the "name" and "pkgs" parameters.
target_pkgs = []
already_installed_comments = []
if requirements or editable:
comments = []
# Append comments if this is a dry run.
if __opts__['test']:
ret['result'] = None
if requirements:
# TODO: Check requirements file against currently-installed
# packages to provide more accurate state output.
comments.append('Requirements file \'{0}\' will be '
'processed.'.format(requirements))
if editable:
comments.append(
'Package will be installed in editable mode (i.e. '
'setuptools "develop mode") from {0}.'.format(editable)
)
ret['comment'] = ' '.join(comments)
return ret
# No requirements case.
# Check pre-existence of the requested packages.
else:
# Attempt to pre-cache a the current pip list
try:
pip_list = __salt__['pip.list'](bin_env=bin_env, user=user, cwd=cwd)
# If we fail, then just send False, and we'll try again in the next function call
except Exception as exc:
log.exception(exc)
pip_list = False
for prefix, state_pkg_name, version_spec in pkgs_details:
if prefix:
state_pkg_name = state_pkg_name
version_spec = version_spec
out = _check_if_installed(prefix, state_pkg_name, version_spec,
ignore_installed, force_reinstall,
upgrade, user, cwd, bin_env, env_vars,
index_url, extra_index_url, pip_list,
**kwargs)
# If _check_if_installed result is None, something went wrong with
# the command running. This way we keep stateful output.
if out['result'] is None:
ret['result'] = False
ret['comment'] = out['comment']
return ret
else:
out = {'result': False, 'comment': None}
result = out['result']
# The package is not present. Add it to the pkgs to install.
if result is False:
# Replace commas (used for version ranges) with semicolons
# (which are not supported) in name so it does not treat
# them as multiple packages.
target_pkgs.append((prefix, state_pkg_name.replace(',', ';')))
# Append comments if this is a dry run.
if __opts__['test']:
msg = 'Python package {0} is set to be installed'
ret['result'] = None
ret['comment'] = msg.format(state_pkg_name)
return ret
# The package is already present and will not be reinstalled.
elif result is True:
# Append comment stating its presence
already_installed_comments.append(out['comment'])
# The command pip.list failed. Abort.
elif result is None:
ret['result'] = None
ret['comment'] = out['comment']
return ret
# No packages to install.
if not target_pkgs:
ret['result'] = True
aicomms = '\n'.join(already_installed_comments)
last_line = 'All specified packages are already installed' + (' and up-to-date' if upgrade else '')
ret['comment'] = aicomms + ('\n' if aicomms else '') + last_line
return ret
# Construct the string that will get passed to the install call
pkgs_str = ','.join([state_name for _, state_name in target_pkgs])
# Call to install the package. Actual installation takes place here
pip_install_call = __salt__['pip.install'](
pkgs='{0}'.format(pkgs_str) if pkgs_str else '',
requirements=requirements,
bin_env=bin_env,
use_wheel=use_wheel,
no_use_wheel=no_use_wheel,
no_binary=no_binary,
log=log,
proxy=proxy,
timeout=timeout,
editable=editable,
find_links=find_links,
index_url=index_url,
extra_index_url=extra_index_url,
no_index=no_index,
mirrors=mirrors,
build=build,
target=target,
download=download,
download_cache=download_cache,
source=source,
upgrade=upgrade,
force_reinstall=force_reinstall,
ignore_installed=ignore_installed,
exists_action=exists_action,
no_deps=no_deps,
no_install=no_install,
no_download=no_download,
install_options=install_options,
global_options=global_options,
user=user,
cwd=cwd,
pre_releases=pre_releases,
cert=cert,
allow_all_external=allow_all_external,
allow_external=allow_external,
allow_unverified=allow_unverified,
process_dependency_links=process_dependency_links,
saltenv=__env__,
env_vars=env_vars,
use_vt=use_vt,
trusted_host=trusted_host,
no_cache_dir=no_cache_dir,
extra_args=extra_args,
**kwargs
)
if pip_install_call and pip_install_call.get('retcode', 1) == 0:
ret['result'] = True
if requirements or editable:
comments = []
if requirements:
PIP_REQUIREMENTS_NOCHANGE = [
'Requirement already satisfied',
'Requirement already up-to-date',
'Requirement not upgraded',
'Collecting',
'Cloning',
'Cleaning up...',
]
for line in pip_install_call.get('stdout', '').split('\n'):
if not any(
[
line.strip().startswith(x)
for x in PIP_REQUIREMENTS_NOCHANGE
]
):
ret['changes']['requirements'] = True
if ret['changes'].get('requirements'):
comments.append('Successfully processed requirements file '
'{0}.'.format(requirements))
else:
comments.append('Requirements were already installed.')
if editable:
comments.append('Package successfully installed from VCS '
'checkout {0}.'.format(editable))
ret['changes']['editable'] = True
ret['comment'] = ' '.join(comments)
else:
# Check that the packages set to be installed were installed.
# Create comments reporting success and failures
pkg_404_comms = []
already_installed_packages = set()
for line in pip_install_call.get('stdout', '').split('\n'):
# Output for already installed packages:
# 'Requirement already up-to-date: jinja2 in /usr/local/lib/python2.7/dist-packages\nCleaning up...'
if line.startswith('Requirement already up-to-date: '):
package = line.split(':', 1)[1].split()[0]
already_installed_packages.add(package.lower())
for prefix, state_name in target_pkgs:
# Case for packages that are not an URL
if prefix:
pipsearch = salt.utils.data.CaseInsensitiveDict(
__salt__['pip.list'](prefix, bin_env,
user=user, cwd=cwd,
env_vars=env_vars,
**kwargs)
)
# If we didn't find the package in the system after
# installing it report it
if not pipsearch:
pkg_404_comms.append(
'There was no error installing package \'{0}\' '
'although it does not show when calling '
'\'pip.freeze\'.'.format(pkg)
)
else:
if prefix in pipsearch \
and prefix.lower() not in already_installed_packages:
ver = pipsearch[prefix]
ret['changes']['{0}=={1}'.format(prefix, ver)] = 'Installed'
# Case for packages that are an URL
else:
ret['changes']['{0}==???'.format(state_name)] = 'Installed'
# Set comments
aicomms = '\n'.join(already_installed_comments)
succ_comm = 'All packages were successfully installed'\
if not pkg_404_comms else '\n'.join(pkg_404_comms)
ret['comment'] = aicomms + ('\n' if aicomms else '') + succ_comm
return ret
elif pip_install_call:
ret['result'] = False
if 'stdout' in pip_install_call:
error = 'Error: {0} {1}'.format(pip_install_call['stdout'],
pip_install_call['stderr'])
else:
error = 'Error: {0}'.format(pip_install_call['comment'])
if requirements or editable:
comments = []
if requirements:
comments.append('Unable to process requirements file '
'"{0}".'.format(requirements))
if editable:
comments.append('Unable to install from VCS checkout'
'{0}.'.format(editable))
comments.append(error)
ret['comment'] = ' '.join(comments)
else:
pkgs_str = ', '.join([state_name for _, state_name in target_pkgs])
aicomms = '\n'.join(already_installed_comments)
error_comm = ('Failed to install packages: {0}. '
'{1}'.format(pkgs_str, error))
ret['comment'] = aicomms + ('\n' if aicomms else '') + error_comm
else:
ret['result'] = False
ret['comment'] = 'Could not install package'
return ret | def function[installed, parameter[name, pkgs, pip_bin, requirements, bin_env, use_wheel, no_use_wheel, log, proxy, timeout, repo, editable, find_links, index_url, extra_index_url, no_index, mirrors, build, target, download, download_cache, source, upgrade, force_reinstall, ignore_installed, exists_action, no_deps, no_install, no_download, install_options, global_options, user, cwd, pre_releases, cert, allow_all_external, allow_external, allow_unverified, process_dependency_links, env_vars, use_vt, trusted_host, no_cache_dir, cache_dir, no_binary, extra_args]]:
constant[
Make sure the package is installed
name
The name of the python package to install. You can also specify version
numbers here using the standard operators ``==, >=, <=``. If
``requirements`` is given, this parameter will be ignored.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django >= 1.6, <= 1.7
- require:
- pkg: python-pip
This will install the latest Django version greater than 1.6 but less
than 1.7.
requirements
Path to a pip requirements file. If the path begins with salt://
the file will be transferred from the master file server.
user
The user under which to run pip
use_wheel : False
Prefer wheel archives (requires pip>=1.4)
no_use_wheel : False
Force to not use wheel archives (requires pip>=1.4)
no_binary
Force to not use binary packages (requires pip >= 7.0.0)
Accepts either :all: to disable all binary packages, :none: to empty the set,
or a list of one or more packages
Example:
.. code-block:: yaml
django:
pip.installed:
- no_binary: ':all:'
flask:
pip.installed:
- no_binary:
- itsdangerous
- click
log
Log file where a complete (maximum verbosity) record will be kept
proxy
Specify a proxy in the form
user:passwd@proxy.server:port. Note that the
user:password@ is optional and required only if you
are behind an authenticated proxy. If you provide
user@proxy.server:port then you will be prompted for a
password.
timeout
Set the socket timeout (default 15 seconds)
editable
install something editable (i.e.
git+https://github.com/worldcompany/djangoembed.git#egg=djangoembed)
find_links
URL to look for packages at
index_url
Base URL of Python Package Index
extra_index_url
Extra URLs of package indexes to use in addition to ``index_url``
no_index
Ignore package index
mirrors
Specific mirror URL(s) to query (automatically adds --use-mirrors)
build
Unpack packages into ``build`` dir
target
Install packages into ``target`` dir
download
Download packages into ``download`` instead of installing them
download_cache
Cache downloaded packages in ``download_cache`` dir
source
Check out ``editable`` packages into ``source`` dir
upgrade
Upgrade all packages to the newest available version
force_reinstall
When upgrading, reinstall all packages even if they are already
up-to-date.
ignore_installed
Ignore the installed packages (reinstalling instead)
exists_action
Default action when a path already exists: (s)witch, (i)gnore, (w)ipe,
(b)ackup
no_deps
Ignore package dependencies
no_install
Download and unpack all packages, but don't actually install them
no_cache_dir:
Disable the cache.
cwd
Current working directory to run pip from
pre_releases
Include pre-releases in the available versions
cert
Provide a path to an alternate CA bundle
allow_all_external
Allow the installation of all externally hosted files
allow_external
Allow the installation of externally hosted files (comma separated list)
allow_unverified
Allow the installation of insecure and unverifiable files (comma separated list)
process_dependency_links
Enable the processing of dependency links
bin_env : None
Absolute path to a virtual environment directory or absolute path to
a pip executable. The example below assumes a virtual environment
has been created at ``/foo/.virtualenvs/bar``.
env_vars
Add or modify environment variables. Useful for tweaking build steps,
such as specifying INCLUDE or LIBRARY paths in Makefiles, build scripts or
compiler calls. This must be in the form of a dictionary or a mapping.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django_app
- env_vars:
CUSTOM_PATH: /opt/django_app
VERBOSE: True
use_vt
Use VT terminal emulation (see output while installing)
trusted_host
Mark this host as trusted, even though it does not have valid or any
HTTPS.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django >= 1.6, <= 1.7
- bin_env: /foo/.virtualenvs/bar
- require:
- pkg: python-pip
Or
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django >= 1.6, <= 1.7
- bin_env: /foo/.virtualenvs/bar/bin/pip
- require:
- pkg: python-pip
.. admonition:: Attention
The following arguments are deprecated, do not use.
pip_bin : None
Deprecated, use ``bin_env``
.. versionchanged:: 0.17.0
``use_wheel`` option added.
install_options
Extra arguments to be supplied to the setup.py install command.
If you are using an option with a directory path, be sure to use
absolute path.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django
- install_options:
- --prefix=/blah
- require:
- pkg: python-pip
global_options
Extra global options to be supplied to the setup.py call before the
install command.
.. versionadded:: 2014.1.3
.. admonition:: Attention
As of Salt 0.17.0 the pip state **needs** an importable pip module.
This usually means having the system's pip package installed or running
Salt from an active `virtualenv`_.
The reason for this requirement is because ``pip`` already does a
pretty good job parsing its own requirements. It makes no sense for
Salt to do ``pip`` requirements parsing and validation before passing
them to the ``pip`` library. It's functionality duplication and it's
more error prone.
.. admonition:: Attention
Please set ``reload_modules: True`` to have the salt minion
import this module after installation.
Example:
.. code-block:: yaml
pyopenssl:
pip.installed:
- name: pyOpenSSL
- reload_modules: True
- exists_action: i
extra_args
pip keyword and positional arguments not yet implemented in salt
.. code-block:: yaml
pandas:
pip.installed:
- name: pandas
- extra_args:
- --latest-pip-kwarg: param
- --latest-pip-arg
.. warning::
If unsupported options are passed here that are not supported in a
minion's version of pip, a `No such option error` will be thrown.
.. _`virtualenv`: http://www.virtualenv.org/en/latest/
]
if <ast.BoolOp object at 0x7da20c6c4340> begin[:]
variable[bin_env] assign[=] name[pip_bin]
if name[pkgs] begin[:]
if <ast.UnaryOp object at 0x7da20c6c53c0> begin[:]
return[dictionary[[<ast.Constant object at 0x7da20c6c6f50>, <ast.Constant object at 0x7da20c6c5720>, <ast.Constant object at 0x7da20c6c6c50>, <ast.Constant object at 0x7da20c6c5600>], [<ast.Name object at 0x7da20c6c7310>, <ast.Constant object at 0x7da20c6c6380>, <ast.Dict object at 0x7da20c6c62f0>, <ast.Constant object at 0x7da20c6c5150>]]]
variable[prepro] assign[=] <ast.Lambda object at 0x7da20c6c6f80>
variable[pkgs] assign[=] <ast.ListComp object at 0x7da18f58f070>
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c70a0>, <ast.Constant object at 0x7da20c6c5960>, <ast.Constant object at 0x7da20c6c75b0>, <ast.Constant object at 0x7da20c6c4760>], [<ast.Call object at 0x7da20c6c7670>, <ast.Constant object at 0x7da20c6c5ea0>, <ast.Constant object at 0x7da20c6c7940>, <ast.Dict object at 0x7da20c6c7550>]]
<ast.Try object at 0x7da20c6c5cc0>
if name[use_wheel] begin[:]
variable[min_version] assign[=] constant[1.4]
variable[max_version] assign[=] constant[9.0.3]
variable[too_low] assign[=] call[name[salt].utils.versions.compare, parameter[]]
variable[too_high] assign[=] call[name[salt].utils.versions.compare, parameter[]]
if <ast.BoolOp object at 0x7da20c6c4d90> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[The 'use_wheel' option is only supported in pip between {0} and {1}. The version of pip detected was {2}.].format, parameter[name[min_version], name[max_version], name[cur_version]]]
return[name[ret]]
if name[no_use_wheel] begin[:]
variable[min_version] assign[=] constant[1.4]
variable[max_version] assign[=] constant[9.0.3]
variable[too_low] assign[=] call[name[salt].utils.versions.compare, parameter[]]
variable[too_high] assign[=] call[name[salt].utils.versions.compare, parameter[]]
if <ast.BoolOp object at 0x7da20c6c6aa0> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[The 'no_use_wheel' option is only supported in pip between {0} and {1}. The version of pip detected was {2}.].format, parameter[name[min_version], name[max_version], name[cur_version]]]
return[name[ret]]
if name[no_binary] begin[:]
variable[min_version] assign[=] constant[7.0.0]
variable[too_low] assign[=] call[name[salt].utils.versions.compare, parameter[]]
if name[too_low] begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[The 'no_binary' option is only supported in pip {0} and newer. The version of pip detected was {1}.].format, parameter[name[min_version], name[cur_version]]]
return[name[ret]]
variable[pkgs_details] assign[=] list[[]]
if <ast.BoolOp object at 0x7da20c6c7610> begin[:]
variable[comments] assign[=] list[[]]
for taget[name[pkg]] in starred[call[name[iter], parameter[name[pkgs]]]] begin[:]
variable[out] assign[=] call[name[_check_pkg_version_format], parameter[name[pkg]]]
if compare[call[name[out]][constant[result]] is constant[False]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[comments].append, parameter[call[name[out]][constant[comment]]]]
if compare[call[name[ret]][constant[result]] is constant[False]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[
].join, parameter[name[comments]]]
return[name[ret]]
variable[target_pkgs] assign[=] list[[]]
variable[already_installed_comments] assign[=] list[[]]
if <ast.BoolOp object at 0x7da2044c3ac0> begin[:]
variable[comments] assign[=] list[[]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[None]
if name[requirements] begin[:]
call[name[comments].append, parameter[call[constant[Requirements file '{0}' will be processed.].format, parameter[name[requirements]]]]]
if name[editable] begin[:]
call[name[comments].append, parameter[call[constant[Package will be installed in editable mode (i.e. setuptools "develop mode") from {0}.].format, parameter[name[editable]]]]]
call[name[ret]][constant[comment]] assign[=] call[constant[ ].join, parameter[name[comments]]]
return[name[ret]]
variable[pkgs_str] assign[=] call[constant[,].join, parameter[<ast.ListComp object at 0x7da2044c0850>]]
variable[pip_install_call] assign[=] call[call[name[__salt__]][constant[pip.install]], parameter[]]
if <ast.BoolOp object at 0x7da20c76ef20> begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
if <ast.BoolOp object at 0x7da20c76ef80> begin[:]
variable[comments] assign[=] list[[]]
if name[requirements] begin[:]
variable[PIP_REQUIREMENTS_NOCHANGE] assign[=] list[[<ast.Constant object at 0x7da20c76fbe0>, <ast.Constant object at 0x7da20c76e710>, <ast.Constant object at 0x7da20c76e8f0>, <ast.Constant object at 0x7da20c76f3d0>, <ast.Constant object at 0x7da20c76e1d0>, <ast.Constant object at 0x7da20c76f370>]]
for taget[name[line]] in starred[call[call[name[pip_install_call].get, parameter[constant[stdout], constant[]]].split, parameter[constant[
]]]] begin[:]
if <ast.UnaryOp object at 0x7da20c76fdf0> begin[:]
call[call[name[ret]][constant[changes]]][constant[requirements]] assign[=] constant[True]
if call[call[name[ret]][constant[changes]].get, parameter[constant[requirements]]] begin[:]
call[name[comments].append, parameter[call[constant[Successfully processed requirements file {0}.].format, parameter[name[requirements]]]]]
if name[editable] begin[:]
call[name[comments].append, parameter[call[constant[Package successfully installed from VCS checkout {0}.].format, parameter[name[editable]]]]]
call[call[name[ret]][constant[changes]]][constant[editable]] assign[=] constant[True]
call[name[ret]][constant[comment]] assign[=] call[constant[ ].join, parameter[name[comments]]]
return[name[ret]] | keyword[def] identifier[installed] ( identifier[name] ,
identifier[pkgs] = keyword[None] ,
identifier[pip_bin] = keyword[None] ,
identifier[requirements] = keyword[None] ,
identifier[bin_env] = keyword[None] ,
identifier[use_wheel] = keyword[False] ,
identifier[no_use_wheel] = keyword[False] ,
identifier[log] = keyword[None] ,
identifier[proxy] = keyword[None] ,
identifier[timeout] = keyword[None] ,
identifier[repo] = keyword[None] ,
identifier[editable] = keyword[None] ,
identifier[find_links] = keyword[None] ,
identifier[index_url] = keyword[None] ,
identifier[extra_index_url] = keyword[None] ,
identifier[no_index] = keyword[False] ,
identifier[mirrors] = keyword[None] ,
identifier[build] = keyword[None] ,
identifier[target] = keyword[None] ,
identifier[download] = keyword[None] ,
identifier[download_cache] = keyword[None] ,
identifier[source] = keyword[None] ,
identifier[upgrade] = keyword[False] ,
identifier[force_reinstall] = keyword[False] ,
identifier[ignore_installed] = keyword[False] ,
identifier[exists_action] = keyword[None] ,
identifier[no_deps] = keyword[False] ,
identifier[no_install] = keyword[False] ,
identifier[no_download] = keyword[False] ,
identifier[install_options] = keyword[None] ,
identifier[global_options] = keyword[None] ,
identifier[user] = keyword[None] ,
identifier[cwd] = keyword[None] ,
identifier[pre_releases] = keyword[False] ,
identifier[cert] = keyword[None] ,
identifier[allow_all_external] = keyword[False] ,
identifier[allow_external] = keyword[None] ,
identifier[allow_unverified] = keyword[None] ,
identifier[process_dependency_links] = keyword[False] ,
identifier[env_vars] = keyword[None] ,
identifier[use_vt] = keyword[False] ,
identifier[trusted_host] = keyword[None] ,
identifier[no_cache_dir] = keyword[False] ,
identifier[cache_dir] = keyword[None] ,
identifier[no_binary] = keyword[None] ,
identifier[extra_args] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
keyword[if] identifier[pip_bin] keyword[and] keyword[not] identifier[bin_env] :
identifier[bin_env] = identifier[pip_bin]
keyword[if] identifier[pkgs] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[pkgs] , identifier[list] ):
keyword[return] { literal[string] : identifier[name] ,
literal[string] : keyword[False] ,
literal[string] :{},
literal[string] : literal[string] }
keyword[else] :
identifier[pkgs] =[ identifier[name] ]
identifier[prepro] = keyword[lambda] identifier[pkg] : identifier[pkg] keyword[if] identifier[isinstance] ( identifier[pkg] , identifier[six] . identifier[string_types] ) keyword[else] literal[string] . identifier[join] (( identifier[six] . identifier[iteritems] ( identifier[pkg] )[ literal[int] ][ literal[int] ], identifier[six] . identifier[iteritems] ( identifier[pkg] )[ literal[int] ][ literal[int] ]))
identifier[pkgs] =[ identifier[prepro] ( identifier[pkg] ) keyword[for] identifier[pkg] keyword[in] identifier[pkgs] ]
identifier[ret] ={ literal[string] : literal[string] . identifier[join] ( identifier[pkgs] ), literal[string] : keyword[None] ,
literal[string] : literal[string] , literal[string] :{}}
keyword[try] :
identifier[cur_version] = identifier[__salt__] [ literal[string] ]( identifier[bin_env] )
keyword[except] ( identifier[CommandNotFoundError] , identifier[CommandExecutionError] ) keyword[as] identifier[err] :
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] , identifier[err] )
keyword[return] identifier[ret]
keyword[if] identifier[use_wheel] :
identifier[min_version] = literal[string]
identifier[max_version] = literal[string]
identifier[too_low] = identifier[salt] . identifier[utils] . identifier[versions] . identifier[compare] ( identifier[ver1] = identifier[cur_version] , identifier[oper] = literal[string] , identifier[ver2] = identifier[min_version] )
identifier[too_high] = identifier[salt] . identifier[utils] . identifier[versions] . identifier[compare] ( identifier[ver1] = identifier[cur_version] , identifier[oper] = literal[string] , identifier[ver2] = identifier[max_version] )
keyword[if] identifier[too_low] keyword[or] identifier[too_high] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]=( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[min_version] , identifier[max_version] , identifier[cur_version] )
keyword[return] identifier[ret]
keyword[if] identifier[no_use_wheel] :
identifier[min_version] = literal[string]
identifier[max_version] = literal[string]
identifier[too_low] = identifier[salt] . identifier[utils] . identifier[versions] . identifier[compare] ( identifier[ver1] = identifier[cur_version] , identifier[oper] = literal[string] , identifier[ver2] = identifier[min_version] )
identifier[too_high] = identifier[salt] . identifier[utils] . identifier[versions] . identifier[compare] ( identifier[ver1] = identifier[cur_version] , identifier[oper] = literal[string] , identifier[ver2] = identifier[max_version] )
keyword[if] identifier[too_low] keyword[or] identifier[too_high] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]=( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[min_version] , identifier[max_version] , identifier[cur_version] )
keyword[return] identifier[ret]
keyword[if] identifier[no_binary] :
identifier[min_version] = literal[string]
identifier[too_low] = identifier[salt] . identifier[utils] . identifier[versions] . identifier[compare] ( identifier[ver1] = identifier[cur_version] , identifier[oper] = literal[string] , identifier[ver2] = identifier[min_version] )
keyword[if] identifier[too_low] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]=( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[min_version] , identifier[cur_version] )
keyword[return] identifier[ret]
identifier[pkgs_details] =[]
keyword[if] identifier[pkgs] keyword[and] keyword[not] ( identifier[requirements] keyword[or] identifier[editable] ):
identifier[comments] =[]
keyword[for] identifier[pkg] keyword[in] identifier[iter] ( identifier[pkgs] ):
identifier[out] = identifier[_check_pkg_version_format] ( identifier[pkg] )
keyword[if] identifier[out] [ literal[string] ] keyword[is] keyword[False] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[comments] . identifier[append] ( identifier[out] [ literal[string] ])
keyword[elif] identifier[out] [ literal[string] ] keyword[is] keyword[True] :
identifier[pkgs_details] . identifier[append] (( identifier[out] [ literal[string] ], identifier[pkg] , identifier[out] [ literal[string] ]))
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[False] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ( identifier[comments] )
keyword[return] identifier[ret]
identifier[target_pkgs] =[]
identifier[already_installed_comments] =[]
keyword[if] identifier[requirements] keyword[or] identifier[editable] :
identifier[comments] =[]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[None]
keyword[if] identifier[requirements] :
identifier[comments] . identifier[append] ( literal[string]
literal[string] . identifier[format] ( identifier[requirements] ))
keyword[if] identifier[editable] :
identifier[comments] . identifier[append] (
literal[string]
literal[string] . identifier[format] ( identifier[editable] )
)
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ( identifier[comments] )
keyword[return] identifier[ret]
keyword[else] :
keyword[try] :
identifier[pip_list] = identifier[__salt__] [ literal[string] ]( identifier[bin_env] = identifier[bin_env] , identifier[user] = identifier[user] , identifier[cwd] = identifier[cwd] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[log] . identifier[exception] ( identifier[exc] )
identifier[pip_list] = keyword[False]
keyword[for] identifier[prefix] , identifier[state_pkg_name] , identifier[version_spec] keyword[in] identifier[pkgs_details] :
keyword[if] identifier[prefix] :
identifier[state_pkg_name] = identifier[state_pkg_name]
identifier[version_spec] = identifier[version_spec]
identifier[out] = identifier[_check_if_installed] ( identifier[prefix] , identifier[state_pkg_name] , identifier[version_spec] ,
identifier[ignore_installed] , identifier[force_reinstall] ,
identifier[upgrade] , identifier[user] , identifier[cwd] , identifier[bin_env] , identifier[env_vars] ,
identifier[index_url] , identifier[extra_index_url] , identifier[pip_list] ,
** identifier[kwargs] )
keyword[if] identifier[out] [ literal[string] ] keyword[is] keyword[None] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= identifier[out] [ literal[string] ]
keyword[return] identifier[ret]
keyword[else] :
identifier[out] ={ literal[string] : keyword[False] , literal[string] : keyword[None] }
identifier[result] = identifier[out] [ literal[string] ]
keyword[if] identifier[result] keyword[is] keyword[False] :
identifier[target_pkgs] . identifier[append] (( identifier[prefix] , identifier[state_pkg_name] . identifier[replace] ( literal[string] , literal[string] )))
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[msg] = literal[string]
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= identifier[msg] . identifier[format] ( identifier[state_pkg_name] )
keyword[return] identifier[ret]
keyword[elif] identifier[result] keyword[is] keyword[True] :
identifier[already_installed_comments] . identifier[append] ( identifier[out] [ literal[string] ])
keyword[elif] identifier[result] keyword[is] keyword[None] :
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= identifier[out] [ literal[string] ]
keyword[return] identifier[ret]
keyword[if] keyword[not] identifier[target_pkgs] :
identifier[ret] [ literal[string] ]= keyword[True]
identifier[aicomms] = literal[string] . identifier[join] ( identifier[already_installed_comments] )
identifier[last_line] = literal[string] +( literal[string] keyword[if] identifier[upgrade] keyword[else] literal[string] )
identifier[ret] [ literal[string] ]= identifier[aicomms] +( literal[string] keyword[if] identifier[aicomms] keyword[else] literal[string] )+ identifier[last_line]
keyword[return] identifier[ret]
identifier[pkgs_str] = literal[string] . identifier[join] ([ identifier[state_name] keyword[for] identifier[_] , identifier[state_name] keyword[in] identifier[target_pkgs] ])
identifier[pip_install_call] = identifier[__salt__] [ literal[string] ](
identifier[pkgs] = literal[string] . identifier[format] ( identifier[pkgs_str] ) keyword[if] identifier[pkgs_str] keyword[else] literal[string] ,
identifier[requirements] = identifier[requirements] ,
identifier[bin_env] = identifier[bin_env] ,
identifier[use_wheel] = identifier[use_wheel] ,
identifier[no_use_wheel] = identifier[no_use_wheel] ,
identifier[no_binary] = identifier[no_binary] ,
identifier[log] = identifier[log] ,
identifier[proxy] = identifier[proxy] ,
identifier[timeout] = identifier[timeout] ,
identifier[editable] = identifier[editable] ,
identifier[find_links] = identifier[find_links] ,
identifier[index_url] = identifier[index_url] ,
identifier[extra_index_url] = identifier[extra_index_url] ,
identifier[no_index] = identifier[no_index] ,
identifier[mirrors] = identifier[mirrors] ,
identifier[build] = identifier[build] ,
identifier[target] = identifier[target] ,
identifier[download] = identifier[download] ,
identifier[download_cache] = identifier[download_cache] ,
identifier[source] = identifier[source] ,
identifier[upgrade] = identifier[upgrade] ,
identifier[force_reinstall] = identifier[force_reinstall] ,
identifier[ignore_installed] = identifier[ignore_installed] ,
identifier[exists_action] = identifier[exists_action] ,
identifier[no_deps] = identifier[no_deps] ,
identifier[no_install] = identifier[no_install] ,
identifier[no_download] = identifier[no_download] ,
identifier[install_options] = identifier[install_options] ,
identifier[global_options] = identifier[global_options] ,
identifier[user] = identifier[user] ,
identifier[cwd] = identifier[cwd] ,
identifier[pre_releases] = identifier[pre_releases] ,
identifier[cert] = identifier[cert] ,
identifier[allow_all_external] = identifier[allow_all_external] ,
identifier[allow_external] = identifier[allow_external] ,
identifier[allow_unverified] = identifier[allow_unverified] ,
identifier[process_dependency_links] = identifier[process_dependency_links] ,
identifier[saltenv] = identifier[__env__] ,
identifier[env_vars] = identifier[env_vars] ,
identifier[use_vt] = identifier[use_vt] ,
identifier[trusted_host] = identifier[trusted_host] ,
identifier[no_cache_dir] = identifier[no_cache_dir] ,
identifier[extra_args] = identifier[extra_args] ,
** identifier[kwargs]
)
keyword[if] identifier[pip_install_call] keyword[and] identifier[pip_install_call] . identifier[get] ( literal[string] , literal[int] )== literal[int] :
identifier[ret] [ literal[string] ]= keyword[True]
keyword[if] identifier[requirements] keyword[or] identifier[editable] :
identifier[comments] =[]
keyword[if] identifier[requirements] :
identifier[PIP_REQUIREMENTS_NOCHANGE] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
keyword[for] identifier[line] keyword[in] identifier[pip_install_call] . identifier[get] ( literal[string] , literal[string] ). identifier[split] ( literal[string] ):
keyword[if] keyword[not] identifier[any] (
[
identifier[line] . identifier[strip] (). identifier[startswith] ( identifier[x] )
keyword[for] identifier[x] keyword[in] identifier[PIP_REQUIREMENTS_NOCHANGE]
]
):
identifier[ret] [ literal[string] ][ literal[string] ]= keyword[True]
keyword[if] identifier[ret] [ literal[string] ]. identifier[get] ( literal[string] ):
identifier[comments] . identifier[append] ( literal[string]
literal[string] . identifier[format] ( identifier[requirements] ))
keyword[else] :
identifier[comments] . identifier[append] ( literal[string] )
keyword[if] identifier[editable] :
identifier[comments] . identifier[append] ( literal[string]
literal[string] . identifier[format] ( identifier[editable] ))
identifier[ret] [ literal[string] ][ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ( identifier[comments] )
keyword[else] :
identifier[pkg_404_comms] =[]
identifier[already_installed_packages] = identifier[set] ()
keyword[for] identifier[line] keyword[in] identifier[pip_install_call] . identifier[get] ( literal[string] , literal[string] ). identifier[split] ( literal[string] ):
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[package] = identifier[line] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[split] ()[ literal[int] ]
identifier[already_installed_packages] . identifier[add] ( identifier[package] . identifier[lower] ())
keyword[for] identifier[prefix] , identifier[state_name] keyword[in] identifier[target_pkgs] :
keyword[if] identifier[prefix] :
identifier[pipsearch] = identifier[salt] . identifier[utils] . identifier[data] . identifier[CaseInsensitiveDict] (
identifier[__salt__] [ literal[string] ]( identifier[prefix] , identifier[bin_env] ,
identifier[user] = identifier[user] , identifier[cwd] = identifier[cwd] ,
identifier[env_vars] = identifier[env_vars] ,
** identifier[kwargs] )
)
keyword[if] keyword[not] identifier[pipsearch] :
identifier[pkg_404_comms] . identifier[append] (
literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[pkg] )
)
keyword[else] :
keyword[if] identifier[prefix] keyword[in] identifier[pipsearch] keyword[and] identifier[prefix] . identifier[lower] () keyword[not] keyword[in] identifier[already_installed_packages] :
identifier[ver] = identifier[pipsearch] [ identifier[prefix] ]
identifier[ret] [ literal[string] ][ literal[string] . identifier[format] ( identifier[prefix] , identifier[ver] )]= literal[string]
keyword[else] :
identifier[ret] [ literal[string] ][ literal[string] . identifier[format] ( identifier[state_name] )]= literal[string]
identifier[aicomms] = literal[string] . identifier[join] ( identifier[already_installed_comments] )
identifier[succ_comm] = literal[string] keyword[if] keyword[not] identifier[pkg_404_comms] keyword[else] literal[string] . identifier[join] ( identifier[pkg_404_comms] )
identifier[ret] [ literal[string] ]= identifier[aicomms] +( literal[string] keyword[if] identifier[aicomms] keyword[else] literal[string] )+ identifier[succ_comm]
keyword[return] identifier[ret]
keyword[elif] identifier[pip_install_call] :
identifier[ret] [ literal[string] ]= keyword[False]
keyword[if] literal[string] keyword[in] identifier[pip_install_call] :
identifier[error] = literal[string] . identifier[format] ( identifier[pip_install_call] [ literal[string] ],
identifier[pip_install_call] [ literal[string] ])
keyword[else] :
identifier[error] = literal[string] . identifier[format] ( identifier[pip_install_call] [ literal[string] ])
keyword[if] identifier[requirements] keyword[or] identifier[editable] :
identifier[comments] =[]
keyword[if] identifier[requirements] :
identifier[comments] . identifier[append] ( literal[string]
literal[string] . identifier[format] ( identifier[requirements] ))
keyword[if] identifier[editable] :
identifier[comments] . identifier[append] ( literal[string]
literal[string] . identifier[format] ( identifier[editable] ))
identifier[comments] . identifier[append] ( identifier[error] )
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ( identifier[comments] )
keyword[else] :
identifier[pkgs_str] = literal[string] . identifier[join] ([ identifier[state_name] keyword[for] identifier[_] , identifier[state_name] keyword[in] identifier[target_pkgs] ])
identifier[aicomms] = literal[string] . identifier[join] ( identifier[already_installed_comments] )
identifier[error_comm] =( literal[string]
literal[string] . identifier[format] ( identifier[pkgs_str] , identifier[error] ))
identifier[ret] [ literal[string] ]= identifier[aicomms] +( literal[string] keyword[if] identifier[aicomms] keyword[else] literal[string] )+ identifier[error_comm]
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret] | def installed(name, pkgs=None, pip_bin=None, requirements=None, bin_env=None, use_wheel=False, no_use_wheel=False, log=None, proxy=None, timeout=None, repo=None, editable=None, find_links=None, index_url=None, extra_index_url=None, no_index=False, mirrors=None, build=None, target=None, download=None, download_cache=None, source=None, upgrade=False, force_reinstall=False, ignore_installed=False, exists_action=None, no_deps=False, no_install=False, no_download=False, install_options=None, global_options=None, user=None, cwd=None, pre_releases=False, cert=None, allow_all_external=False, allow_external=None, allow_unverified=None, process_dependency_links=False, env_vars=None, use_vt=False, trusted_host=None, no_cache_dir=False, cache_dir=None, no_binary=None, extra_args=None, **kwargs):
"""
Make sure the package is installed
name
The name of the python package to install. You can also specify version
numbers here using the standard operators ``==, >=, <=``. If
``requirements`` is given, this parameter will be ignored.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django >= 1.6, <= 1.7
- require:
- pkg: python-pip
This will install the latest Django version greater than 1.6 but less
than 1.7.
requirements
Path to a pip requirements file. If the path begins with salt://
the file will be transferred from the master file server.
user
The user under which to run pip
use_wheel : False
Prefer wheel archives (requires pip>=1.4)
no_use_wheel : False
Force to not use wheel archives (requires pip>=1.4)
no_binary
Force to not use binary packages (requires pip >= 7.0.0)
Accepts either :all: to disable all binary packages, :none: to empty the set,
or a list of one or more packages
Example:
.. code-block:: yaml
django:
pip.installed:
- no_binary: ':all:'
flask:
pip.installed:
- no_binary:
- itsdangerous
- click
log
Log file where a complete (maximum verbosity) record will be kept
proxy
Specify a proxy in the form
user:passwd@proxy.server:port. Note that the
user:password@ is optional and required only if you
are behind an authenticated proxy. If you provide
user@proxy.server:port then you will be prompted for a
password.
timeout
Set the socket timeout (default 15 seconds)
editable
install something editable (i.e.
git+https://github.com/worldcompany/djangoembed.git#egg=djangoembed)
find_links
URL to look for packages at
index_url
Base URL of Python Package Index
extra_index_url
Extra URLs of package indexes to use in addition to ``index_url``
no_index
Ignore package index
mirrors
Specific mirror URL(s) to query (automatically adds --use-mirrors)
build
Unpack packages into ``build`` dir
target
Install packages into ``target`` dir
download
Download packages into ``download`` instead of installing them
download_cache
Cache downloaded packages in ``download_cache`` dir
source
Check out ``editable`` packages into ``source`` dir
upgrade
Upgrade all packages to the newest available version
force_reinstall
When upgrading, reinstall all packages even if they are already
up-to-date.
ignore_installed
Ignore the installed packages (reinstalling instead)
exists_action
Default action when a path already exists: (s)witch, (i)gnore, (w)ipe,
(b)ackup
no_deps
Ignore package dependencies
no_install
Download and unpack all packages, but don't actually install them
no_cache_dir:
Disable the cache.
cwd
Current working directory to run pip from
pre_releases
Include pre-releases in the available versions
cert
Provide a path to an alternate CA bundle
allow_all_external
Allow the installation of all externally hosted files
allow_external
Allow the installation of externally hosted files (comma separated list)
allow_unverified
Allow the installation of insecure and unverifiable files (comma separated list)
process_dependency_links
Enable the processing of dependency links
bin_env : None
Absolute path to a virtual environment directory or absolute path to
a pip executable. The example below assumes a virtual environment
has been created at ``/foo/.virtualenvs/bar``.
env_vars
Add or modify environment variables. Useful for tweaking build steps,
such as specifying INCLUDE or LIBRARY paths in Makefiles, build scripts or
compiler calls. This must be in the form of a dictionary or a mapping.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django_app
- env_vars:
CUSTOM_PATH: /opt/django_app
VERBOSE: True
use_vt
Use VT terminal emulation (see output while installing)
trusted_host
Mark this host as trusted, even though it does not have valid or any
HTTPS.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django >= 1.6, <= 1.7
- bin_env: /foo/.virtualenvs/bar
- require:
- pkg: python-pip
Or
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django >= 1.6, <= 1.7
- bin_env: /foo/.virtualenvs/bar/bin/pip
- require:
- pkg: python-pip
.. admonition:: Attention
The following arguments are deprecated, do not use.
pip_bin : None
Deprecated, use ``bin_env``
.. versionchanged:: 0.17.0
``use_wheel`` option added.
install_options
Extra arguments to be supplied to the setup.py install command.
If you are using an option with a directory path, be sure to use
absolute path.
Example:
.. code-block:: yaml
django:
pip.installed:
- name: django
- install_options:
- --prefix=/blah
- require:
- pkg: python-pip
global_options
Extra global options to be supplied to the setup.py call before the
install command.
.. versionadded:: 2014.1.3
.. admonition:: Attention
As of Salt 0.17.0 the pip state **needs** an importable pip module.
This usually means having the system's pip package installed or running
Salt from an active `virtualenv`_.
The reason for this requirement is because ``pip`` already does a
pretty good job parsing its own requirements. It makes no sense for
Salt to do ``pip`` requirements parsing and validation before passing
them to the ``pip`` library. It's functionality duplication and it's
more error prone.
.. admonition:: Attention
Please set ``reload_modules: True`` to have the salt minion
import this module after installation.
Example:
.. code-block:: yaml
pyopenssl:
pip.installed:
- name: pyOpenSSL
- reload_modules: True
- exists_action: i
extra_args
pip keyword and positional arguments not yet implemented in salt
.. code-block:: yaml
pandas:
pip.installed:
- name: pandas
- extra_args:
- --latest-pip-kwarg: param
- --latest-pip-arg
.. warning::
If unsupported options are passed here that are not supported in a
minion's version of pip, a `No such option error` will be thrown.
.. _`virtualenv`: http://www.virtualenv.org/en/latest/
"""
if pip_bin and (not bin_env):
bin_env = pip_bin # depends on [control=['if'], data=[]]
# If pkgs is present, ignore name
if pkgs:
if not isinstance(pkgs, list):
return {'name': name, 'result': False, 'changes': {}, 'comment': 'pkgs argument must be formatted as a list'} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
pkgs = [name]
# Assumption: If `pkg` is not an `string`, it's a `collections.OrderedDict`
# prepro = lambda pkg: pkg if type(pkg) == str else \
# ' '.join((pkg.items()[0][0], pkg.items()[0][1].replace(',', ';')))
# pkgs = ','.join([prepro(pkg) for pkg in pkgs])
prepro = lambda pkg: pkg if isinstance(pkg, six.string_types) else ' '.join((six.iteritems(pkg)[0][0], six.iteritems(pkg)[0][1]))
pkgs = [prepro(pkg) for pkg in pkgs]
ret = {'name': ';'.join(pkgs), 'result': None, 'comment': '', 'changes': {}}
try:
cur_version = __salt__['pip.version'](bin_env) # depends on [control=['try'], data=[]]
except (CommandNotFoundError, CommandExecutionError) as err:
ret['result'] = None
ret['comment'] = "Error installing '{0}': {1}".format(name, err)
return ret # depends on [control=['except'], data=['err']]
# Check that the pip binary supports the 'use_wheel' option
if use_wheel:
min_version = '1.4'
max_version = '9.0.3'
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
if too_low or too_high:
ret['result'] = False
ret['comment'] = "The 'use_wheel' option is only supported in pip between {0} and {1}. The version of pip detected was {2}.".format(min_version, max_version, cur_version)
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Check that the pip binary supports the 'no_use_wheel' option
if no_use_wheel:
min_version = '1.4'
max_version = '9.0.3'
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
if too_low or too_high:
ret['result'] = False
ret['comment'] = "The 'no_use_wheel' option is only supported in pip between {0} and {1}. The version of pip detected was {2}.".format(min_version, max_version, cur_version)
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Check that the pip binary supports the 'no_binary' option
if no_binary:
min_version = '7.0.0'
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
if too_low:
ret['result'] = False
ret['comment'] = "The 'no_binary' option is only supported in pip {0} and newer. The version of pip detected was {1}.".format(min_version, cur_version)
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Get the packages parsed name and version from the pip library.
# This only is done when there is no requirements or editable parameter.
pkgs_details = []
if pkgs and (not (requirements or editable)):
comments = []
for pkg in iter(pkgs):
out = _check_pkg_version_format(pkg)
if out['result'] is False:
ret['result'] = False
comments.append(out['comment']) # depends on [control=['if'], data=[]]
elif out['result'] is True:
pkgs_details.append((out['prefix'], pkg, out['version_spec'])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pkg']]
if ret['result'] is False:
ret['comment'] = '\n'.join(comments)
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# If a requirements file is specified, only install the contents of the
# requirements file. Similarly, using the --editable flag with pip should
# also ignore the "name" and "pkgs" parameters.
target_pkgs = []
already_installed_comments = []
if requirements or editable:
comments = []
# Append comments if this is a dry run.
if __opts__['test']:
ret['result'] = None
if requirements:
# TODO: Check requirements file against currently-installed
# packages to provide more accurate state output.
comments.append("Requirements file '{0}' will be processed.".format(requirements)) # depends on [control=['if'], data=[]]
if editable:
comments.append('Package will be installed in editable mode (i.e. setuptools "develop mode") from {0}.'.format(editable)) # depends on [control=['if'], data=[]]
ret['comment'] = ' '.join(comments)
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# No requirements case.
# Check pre-existence of the requested packages.
# Attempt to pre-cache a the current pip list
try:
pip_list = __salt__['pip.list'](bin_env=bin_env, user=user, cwd=cwd) # depends on [control=['try'], data=[]]
# If we fail, then just send False, and we'll try again in the next function call
except Exception as exc:
log.exception(exc)
pip_list = False # depends on [control=['except'], data=['exc']]
for (prefix, state_pkg_name, version_spec) in pkgs_details:
if prefix:
state_pkg_name = state_pkg_name
version_spec = version_spec
out = _check_if_installed(prefix, state_pkg_name, version_spec, ignore_installed, force_reinstall, upgrade, user, cwd, bin_env, env_vars, index_url, extra_index_url, pip_list, **kwargs)
# If _check_if_installed result is None, something went wrong with
# the command running. This way we keep stateful output.
if out['result'] is None:
ret['result'] = False
ret['comment'] = out['comment']
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
out = {'result': False, 'comment': None}
result = out['result']
# The package is not present. Add it to the pkgs to install.
if result is False:
# Replace commas (used for version ranges) with semicolons
# (which are not supported) in name so it does not treat
# them as multiple packages.
target_pkgs.append((prefix, state_pkg_name.replace(',', ';')))
# Append comments if this is a dry run.
if __opts__['test']:
msg = 'Python package {0} is set to be installed'
ret['result'] = None
ret['comment'] = msg.format(state_pkg_name)
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# The package is already present and will not be reinstalled.
elif result is True:
# Append comment stating its presence
already_installed_comments.append(out['comment']) # depends on [control=['if'], data=[]]
# The command pip.list failed. Abort.
elif result is None:
ret['result'] = None
ret['comment'] = out['comment']
return ret # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# No packages to install.
if not target_pkgs:
ret['result'] = True
aicomms = '\n'.join(already_installed_comments)
last_line = 'All specified packages are already installed' + (' and up-to-date' if upgrade else '')
ret['comment'] = aicomms + ('\n' if aicomms else '') + last_line
return ret # depends on [control=['if'], data=[]]
# Construct the string that will get passed to the install call
pkgs_str = ','.join([state_name for (_, state_name) in target_pkgs])
# Call to install the package. Actual installation takes place here
pip_install_call = __salt__['pip.install'](pkgs='{0}'.format(pkgs_str) if pkgs_str else '', requirements=requirements, bin_env=bin_env, use_wheel=use_wheel, no_use_wheel=no_use_wheel, no_binary=no_binary, log=log, proxy=proxy, timeout=timeout, editable=editable, find_links=find_links, index_url=index_url, extra_index_url=extra_index_url, no_index=no_index, mirrors=mirrors, build=build, target=target, download=download, download_cache=download_cache, source=source, upgrade=upgrade, force_reinstall=force_reinstall, ignore_installed=ignore_installed, exists_action=exists_action, no_deps=no_deps, no_install=no_install, no_download=no_download, install_options=install_options, global_options=global_options, user=user, cwd=cwd, pre_releases=pre_releases, cert=cert, allow_all_external=allow_all_external, allow_external=allow_external, allow_unverified=allow_unverified, process_dependency_links=process_dependency_links, saltenv=__env__, env_vars=env_vars, use_vt=use_vt, trusted_host=trusted_host, no_cache_dir=no_cache_dir, extra_args=extra_args, **kwargs)
if pip_install_call and pip_install_call.get('retcode', 1) == 0:
ret['result'] = True
if requirements or editable:
comments = []
if requirements:
PIP_REQUIREMENTS_NOCHANGE = ['Requirement already satisfied', 'Requirement already up-to-date', 'Requirement not upgraded', 'Collecting', 'Cloning', 'Cleaning up...']
for line in pip_install_call.get('stdout', '').split('\n'):
if not any([line.strip().startswith(x) for x in PIP_REQUIREMENTS_NOCHANGE]):
ret['changes']['requirements'] = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
if ret['changes'].get('requirements'):
comments.append('Successfully processed requirements file {0}.'.format(requirements)) # depends on [control=['if'], data=[]]
else:
comments.append('Requirements were already installed.') # depends on [control=['if'], data=[]]
if editable:
comments.append('Package successfully installed from VCS checkout {0}.'.format(editable))
ret['changes']['editable'] = True # depends on [control=['if'], data=[]]
ret['comment'] = ' '.join(comments) # depends on [control=['if'], data=[]]
else:
# Check that the packages set to be installed were installed.
# Create comments reporting success and failures
pkg_404_comms = []
already_installed_packages = set()
for line in pip_install_call.get('stdout', '').split('\n'):
# Output for already installed packages:
# 'Requirement already up-to-date: jinja2 in /usr/local/lib/python2.7/dist-packages\nCleaning up...'
if line.startswith('Requirement already up-to-date: '):
package = line.split(':', 1)[1].split()[0]
already_installed_packages.add(package.lower()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
for (prefix, state_name) in target_pkgs:
# Case for packages that are not an URL
if prefix:
pipsearch = salt.utils.data.CaseInsensitiveDict(__salt__['pip.list'](prefix, bin_env, user=user, cwd=cwd, env_vars=env_vars, **kwargs))
# If we didn't find the package in the system after
# installing it report it
if not pipsearch:
pkg_404_comms.append("There was no error installing package '{0}' although it does not show when calling 'pip.freeze'.".format(pkg)) # depends on [control=['if'], data=[]]
elif prefix in pipsearch and prefix.lower() not in already_installed_packages:
ver = pipsearch[prefix]
ret['changes']['{0}=={1}'.format(prefix, ver)] = 'Installed' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Case for packages that are an URL
ret['changes']['{0}==???'.format(state_name)] = 'Installed' # depends on [control=['for'], data=[]]
# Set comments
aicomms = '\n'.join(already_installed_comments)
succ_comm = 'All packages were successfully installed' if not pkg_404_comms else '\n'.join(pkg_404_comms)
ret['comment'] = aicomms + ('\n' if aicomms else '') + succ_comm
return ret # depends on [control=['if'], data=[]]
elif pip_install_call:
ret['result'] = False
if 'stdout' in pip_install_call:
error = 'Error: {0} {1}'.format(pip_install_call['stdout'], pip_install_call['stderr']) # depends on [control=['if'], data=['pip_install_call']]
else:
error = 'Error: {0}'.format(pip_install_call['comment'])
if requirements or editable:
comments = []
if requirements:
comments.append('Unable to process requirements file "{0}".'.format(requirements)) # depends on [control=['if'], data=[]]
if editable:
comments.append('Unable to install from VCS checkout{0}.'.format(editable)) # depends on [control=['if'], data=[]]
comments.append(error)
ret['comment'] = ' '.join(comments) # depends on [control=['if'], data=[]]
else:
pkgs_str = ', '.join([state_name for (_, state_name) in target_pkgs])
aicomms = '\n'.join(already_installed_comments)
error_comm = 'Failed to install packages: {0}. {1}'.format(pkgs_str, error)
ret['comment'] = aicomms + ('\n' if aicomms else '') + error_comm # depends on [control=['if'], data=[]]
else:
ret['result'] = False
ret['comment'] = 'Could not install package'
return ret |
def update_result_ctrl(self, event):
"""Update event result following execution by main window"""
# Check to see if macro window still exists
if not self:
return
printLen = 0
self.result_ctrl.SetValue('')
if hasattr(event, 'msg'):
# Output of script (from print statements, for example)
self.result_ctrl.AppendText(event.msg)
printLen = len(event.msg)
if hasattr(event, 'err'):
# Error messages
errLen = len(event.err)
errStyle = wx.TextAttr(wx.RED)
self.result_ctrl.AppendText(event.err)
self.result_ctrl.SetStyle(printLen, printLen+errLen, errStyle)
if not hasattr(event, 'err') or event.err == '':
# No error passed. Close dialog if user requested it.
if self._ok_pressed:
self.Destroy()
self._ok_pressed = False | def function[update_result_ctrl, parameter[self, event]]:
constant[Update event result following execution by main window]
if <ast.UnaryOp object at 0x7da1b1544d00> begin[:]
return[None]
variable[printLen] assign[=] constant[0]
call[name[self].result_ctrl.SetValue, parameter[constant[]]]
if call[name[hasattr], parameter[name[event], constant[msg]]] begin[:]
call[name[self].result_ctrl.AppendText, parameter[name[event].msg]]
variable[printLen] assign[=] call[name[len], parameter[name[event].msg]]
if call[name[hasattr], parameter[name[event], constant[err]]] begin[:]
variable[errLen] assign[=] call[name[len], parameter[name[event].err]]
variable[errStyle] assign[=] call[name[wx].TextAttr, parameter[name[wx].RED]]
call[name[self].result_ctrl.AppendText, parameter[name[event].err]]
call[name[self].result_ctrl.SetStyle, parameter[name[printLen], binary_operation[name[printLen] + name[errLen]], name[errStyle]]]
if <ast.BoolOp object at 0x7da1b15457b0> begin[:]
if name[self]._ok_pressed begin[:]
call[name[self].Destroy, parameter[]]
name[self]._ok_pressed assign[=] constant[False] | keyword[def] identifier[update_result_ctrl] ( identifier[self] , identifier[event] ):
literal[string]
keyword[if] keyword[not] identifier[self] :
keyword[return]
identifier[printLen] = literal[int]
identifier[self] . identifier[result_ctrl] . identifier[SetValue] ( literal[string] )
keyword[if] identifier[hasattr] ( identifier[event] , literal[string] ):
identifier[self] . identifier[result_ctrl] . identifier[AppendText] ( identifier[event] . identifier[msg] )
identifier[printLen] = identifier[len] ( identifier[event] . identifier[msg] )
keyword[if] identifier[hasattr] ( identifier[event] , literal[string] ):
identifier[errLen] = identifier[len] ( identifier[event] . identifier[err] )
identifier[errStyle] = identifier[wx] . identifier[TextAttr] ( identifier[wx] . identifier[RED] )
identifier[self] . identifier[result_ctrl] . identifier[AppendText] ( identifier[event] . identifier[err] )
identifier[self] . identifier[result_ctrl] . identifier[SetStyle] ( identifier[printLen] , identifier[printLen] + identifier[errLen] , identifier[errStyle] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[event] , literal[string] ) keyword[or] identifier[event] . identifier[err] == literal[string] :
keyword[if] identifier[self] . identifier[_ok_pressed] :
identifier[self] . identifier[Destroy] ()
identifier[self] . identifier[_ok_pressed] = keyword[False] | def update_result_ctrl(self, event):
"""Update event result following execution by main window"""
# Check to see if macro window still exists
if not self:
return # depends on [control=['if'], data=[]]
printLen = 0
self.result_ctrl.SetValue('')
if hasattr(event, 'msg'):
# Output of script (from print statements, for example)
self.result_ctrl.AppendText(event.msg)
printLen = len(event.msg) # depends on [control=['if'], data=[]]
if hasattr(event, 'err'):
# Error messages
errLen = len(event.err)
errStyle = wx.TextAttr(wx.RED)
self.result_ctrl.AppendText(event.err)
self.result_ctrl.SetStyle(printLen, printLen + errLen, errStyle) # depends on [control=['if'], data=[]]
if not hasattr(event, 'err') or event.err == '':
# No error passed. Close dialog if user requested it.
if self._ok_pressed:
self.Destroy() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self._ok_pressed = False |
def send_message(self, msg, stats=True):
""" Send or queue outgoing message
@param msg: Message to send
@param stats: If set to True, will update statistics after operation
completes
"""
self.send_jsonified(proto.json_encode(msg), stats) | def function[send_message, parameter[self, msg, stats]]:
constant[ Send or queue outgoing message
@param msg: Message to send
@param stats: If set to True, will update statistics after operation
completes
]
call[name[self].send_jsonified, parameter[call[name[proto].json_encode, parameter[name[msg]]], name[stats]]] | keyword[def] identifier[send_message] ( identifier[self] , identifier[msg] , identifier[stats] = keyword[True] ):
literal[string]
identifier[self] . identifier[send_jsonified] ( identifier[proto] . identifier[json_encode] ( identifier[msg] ), identifier[stats] ) | def send_message(self, msg, stats=True):
""" Send or queue outgoing message
@param msg: Message to send
@param stats: If set to True, will update statistics after operation
completes
"""
self.send_jsonified(proto.json_encode(msg), stats) |
def write_to(self, out):
""" Write the raw header content to the out stream
Parameters:
----------
out : {file object}
The output stream
"""
out.write(bytes(self.header))
out.write(self.record_data) | def function[write_to, parameter[self, out]]:
constant[ Write the raw header content to the out stream
Parameters:
----------
out : {file object}
The output stream
]
call[name[out].write, parameter[call[name[bytes], parameter[name[self].header]]]]
call[name[out].write, parameter[name[self].record_data]] | keyword[def] identifier[write_to] ( identifier[self] , identifier[out] ):
literal[string]
identifier[out] . identifier[write] ( identifier[bytes] ( identifier[self] . identifier[header] ))
identifier[out] . identifier[write] ( identifier[self] . identifier[record_data] ) | def write_to(self, out):
""" Write the raw header content to the out stream
Parameters:
----------
out : {file object}
The output stream
"""
out.write(bytes(self.header))
out.write(self.record_data) |
def services(self):
"""Fetch all instances of services for this EP."""
ids = [ref['id'] for ref in self['services']]
return [Service.fetch(id) for id in ids] | def function[services, parameter[self]]:
constant[Fetch all instances of services for this EP.]
variable[ids] assign[=] <ast.ListComp object at 0x7da1b07fab60>
return[<ast.ListComp object at 0x7da1b07fa3b0>] | keyword[def] identifier[services] ( identifier[self] ):
literal[string]
identifier[ids] =[ identifier[ref] [ literal[string] ] keyword[for] identifier[ref] keyword[in] identifier[self] [ literal[string] ]]
keyword[return] [ identifier[Service] . identifier[fetch] ( identifier[id] ) keyword[for] identifier[id] keyword[in] identifier[ids] ] | def services(self):
"""Fetch all instances of services for this EP."""
ids = [ref['id'] for ref in self['services']]
return [Service.fetch(id) for id in ids] |
def copy(self, names=None, dtype=None, levels=None, codes=None,
deep=False, _set_identity=False, **kwargs):
"""
Make a copy of this object. Names, dtype, levels and codes can be
passed and will be set on new copy.
Parameters
----------
names : sequence, optional
dtype : numpy dtype or pandas type, optional
levels : sequence, optional
codes : sequence, optional
Returns
-------
copy : MultiIndex
Notes
-----
In most cases, there should be no functional difference from using
``deep``, but if ``deep`` is passed it will attempt to deepcopy.
This could be potentially expensive on large MultiIndex objects.
"""
name = kwargs.get('name')
names = self._validate_names(name=name, names=names, deep=deep)
if deep:
from copy import deepcopy
if levels is None:
levels = deepcopy(self.levels)
if codes is None:
codes = deepcopy(self.codes)
else:
if levels is None:
levels = self.levels
if codes is None:
codes = self.codes
return MultiIndex(levels=levels, codes=codes, names=names,
sortorder=self.sortorder, verify_integrity=False,
_set_identity=_set_identity) | def function[copy, parameter[self, names, dtype, levels, codes, deep, _set_identity]]:
constant[
Make a copy of this object. Names, dtype, levels and codes can be
passed and will be set on new copy.
Parameters
----------
names : sequence, optional
dtype : numpy dtype or pandas type, optional
levels : sequence, optional
codes : sequence, optional
Returns
-------
copy : MultiIndex
Notes
-----
In most cases, there should be no functional difference from using
``deep``, but if ``deep`` is passed it will attempt to deepcopy.
This could be potentially expensive on large MultiIndex objects.
]
variable[name] assign[=] call[name[kwargs].get, parameter[constant[name]]]
variable[names] assign[=] call[name[self]._validate_names, parameter[]]
if name[deep] begin[:]
from relative_module[copy] import module[deepcopy]
if compare[name[levels] is constant[None]] begin[:]
variable[levels] assign[=] call[name[deepcopy], parameter[name[self].levels]]
if compare[name[codes] is constant[None]] begin[:]
variable[codes] assign[=] call[name[deepcopy], parameter[name[self].codes]]
return[call[name[MultiIndex], parameter[]]] | keyword[def] identifier[copy] ( identifier[self] , identifier[names] = keyword[None] , identifier[dtype] = keyword[None] , identifier[levels] = keyword[None] , identifier[codes] = keyword[None] ,
identifier[deep] = keyword[False] , identifier[_set_identity] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[name] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[names] = identifier[self] . identifier[_validate_names] ( identifier[name] = identifier[name] , identifier[names] = identifier[names] , identifier[deep] = identifier[deep] )
keyword[if] identifier[deep] :
keyword[from] identifier[copy] keyword[import] identifier[deepcopy]
keyword[if] identifier[levels] keyword[is] keyword[None] :
identifier[levels] = identifier[deepcopy] ( identifier[self] . identifier[levels] )
keyword[if] identifier[codes] keyword[is] keyword[None] :
identifier[codes] = identifier[deepcopy] ( identifier[self] . identifier[codes] )
keyword[else] :
keyword[if] identifier[levels] keyword[is] keyword[None] :
identifier[levels] = identifier[self] . identifier[levels]
keyword[if] identifier[codes] keyword[is] keyword[None] :
identifier[codes] = identifier[self] . identifier[codes]
keyword[return] identifier[MultiIndex] ( identifier[levels] = identifier[levels] , identifier[codes] = identifier[codes] , identifier[names] = identifier[names] ,
identifier[sortorder] = identifier[self] . identifier[sortorder] , identifier[verify_integrity] = keyword[False] ,
identifier[_set_identity] = identifier[_set_identity] ) | def copy(self, names=None, dtype=None, levels=None, codes=None, deep=False, _set_identity=False, **kwargs):
"""
Make a copy of this object. Names, dtype, levels and codes can be
passed and will be set on new copy.
Parameters
----------
names : sequence, optional
dtype : numpy dtype or pandas type, optional
levels : sequence, optional
codes : sequence, optional
Returns
-------
copy : MultiIndex
Notes
-----
In most cases, there should be no functional difference from using
``deep``, but if ``deep`` is passed it will attempt to deepcopy.
This could be potentially expensive on large MultiIndex objects.
"""
name = kwargs.get('name')
names = self._validate_names(name=name, names=names, deep=deep)
if deep:
from copy import deepcopy
if levels is None:
levels = deepcopy(self.levels) # depends on [control=['if'], data=['levels']]
if codes is None:
codes = deepcopy(self.codes) # depends on [control=['if'], data=['codes']] # depends on [control=['if'], data=[]]
else:
if levels is None:
levels = self.levels # depends on [control=['if'], data=['levels']]
if codes is None:
codes = self.codes # depends on [control=['if'], data=['codes']]
return MultiIndex(levels=levels, codes=codes, names=names, sortorder=self.sortorder, verify_integrity=False, _set_identity=_set_identity) |
def process_dataset(dataset, models, **kargs):
""" Convert ``dataset`` to processed data using ``models``.
:class:`gvar.dataset.Dataset` (or similar dictionary) object
``dataset`` is processed by each model in list ``models``,
and the results collected into a new dictionary ``pdata`` for use in
:meth:`MultiFitter.lsqfit` and :meth:`MultiFitter.chained_lsqft`.
Assumes that the models have defined method
:meth:`MultiFitterModel.builddataset`. Keyword arguments
``kargs`` are passed on to :func:`gvar.dataset.avg_data` when
averaging the data.
"""
dset = collections.OrderedDict()
for m in MultiFitter.flatten_models(models):
dset[m.datatag] = (
m.builddataset(dataset) if m.ncg <= 1 else
MultiFitter.coarse_grain(m.builddataset(dataset), ncg=m.ncg)
)
return gvar.dataset.avg_data(dset, **kargs) | def function[process_dataset, parameter[dataset, models]]:
constant[ Convert ``dataset`` to processed data using ``models``.
:class:`gvar.dataset.Dataset` (or similar dictionary) object
``dataset`` is processed by each model in list ``models``,
and the results collected into a new dictionary ``pdata`` for use in
:meth:`MultiFitter.lsqfit` and :meth:`MultiFitter.chained_lsqft`.
Assumes that the models have defined method
:meth:`MultiFitterModel.builddataset`. Keyword arguments
``kargs`` are passed on to :func:`gvar.dataset.avg_data` when
averaging the data.
]
variable[dset] assign[=] call[name[collections].OrderedDict, parameter[]]
for taget[name[m]] in starred[call[name[MultiFitter].flatten_models, parameter[name[models]]]] begin[:]
call[name[dset]][name[m].datatag] assign[=] <ast.IfExp object at 0x7da2044c0340>
return[call[name[gvar].dataset.avg_data, parameter[name[dset]]]] | keyword[def] identifier[process_dataset] ( identifier[dataset] , identifier[models] ,** identifier[kargs] ):
literal[string]
identifier[dset] = identifier[collections] . identifier[OrderedDict] ()
keyword[for] identifier[m] keyword[in] identifier[MultiFitter] . identifier[flatten_models] ( identifier[models] ):
identifier[dset] [ identifier[m] . identifier[datatag] ]=(
identifier[m] . identifier[builddataset] ( identifier[dataset] ) keyword[if] identifier[m] . identifier[ncg] <= literal[int] keyword[else]
identifier[MultiFitter] . identifier[coarse_grain] ( identifier[m] . identifier[builddataset] ( identifier[dataset] ), identifier[ncg] = identifier[m] . identifier[ncg] )
)
keyword[return] identifier[gvar] . identifier[dataset] . identifier[avg_data] ( identifier[dset] ,** identifier[kargs] ) | def process_dataset(dataset, models, **kargs):
""" Convert ``dataset`` to processed data using ``models``.
:class:`gvar.dataset.Dataset` (or similar dictionary) object
``dataset`` is processed by each model in list ``models``,
and the results collected into a new dictionary ``pdata`` for use in
:meth:`MultiFitter.lsqfit` and :meth:`MultiFitter.chained_lsqft`.
Assumes that the models have defined method
:meth:`MultiFitterModel.builddataset`. Keyword arguments
``kargs`` are passed on to :func:`gvar.dataset.avg_data` when
averaging the data.
"""
dset = collections.OrderedDict()
for m in MultiFitter.flatten_models(models):
dset[m.datatag] = m.builddataset(dataset) if m.ncg <= 1 else MultiFitter.coarse_grain(m.builddataset(dataset), ncg=m.ncg) # depends on [control=['for'], data=['m']]
return gvar.dataset.avg_data(dset, **kargs) |
def send_command(self, command):
'''Send a command to a device'''
data = {"command": command, "device_id": self.device_id}
try:
response = self.api_iface._api_post("/api/v2/commands", data)
return Command(response, self)
except APIError as e:
print("API error: ")
for key,value in e.data.iteritems:
print(str(key) + ": " + str(value)) | def function[send_command, parameter[self, command]]:
constant[Send a command to a device]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da20c992380>, <ast.Constant object at 0x7da20c991f30>], [<ast.Name object at 0x7da20c9902e0>, <ast.Attribute object at 0x7da20c990f10>]]
<ast.Try object at 0x7da20c992740> | keyword[def] identifier[send_command] ( identifier[self] , identifier[command] ):
literal[string]
identifier[data] ={ literal[string] : identifier[command] , literal[string] : identifier[self] . identifier[device_id] }
keyword[try] :
identifier[response] = identifier[self] . identifier[api_iface] . identifier[_api_post] ( literal[string] , identifier[data] )
keyword[return] identifier[Command] ( identifier[response] , identifier[self] )
keyword[except] identifier[APIError] keyword[as] identifier[e] :
identifier[print] ( literal[string] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[e] . identifier[data] . identifier[iteritems] :
identifier[print] ( identifier[str] ( identifier[key] )+ literal[string] + identifier[str] ( identifier[value] )) | def send_command(self, command):
"""Send a command to a device"""
data = {'command': command, 'device_id': self.device_id}
try:
response = self.api_iface._api_post('/api/v2/commands', data)
return Command(response, self) # depends on [control=['try'], data=[]]
except APIError as e:
print('API error: ')
for (key, value) in e.data.iteritems:
print(str(key) + ': ' + str(value)) # depends on [control=['for'], data=[]] # depends on [control=['except'], data=['e']] |
def git_checkout(git_branch=None, locale_root=None):
"""
Checkouts branch to last commit
:param git_branch: branch to checkout
:param locale_root: locale folder path
:return: tuple stdout, stderr of completed command
"""
if git_branch is None:
git_branch = settings.GIT_BRANCH
if locale_root is None:
locale_root = settings.LOCALE_ROOT
proc = Popen('git checkout ' + git_branch + ' -- ' + locale_root,
shell=True, stdout=PIPE, stderr=PIPE)
stdout, stderr = proc.communicate()
return stdout, stderr | def function[git_checkout, parameter[git_branch, locale_root]]:
constant[
Checkouts branch to last commit
:param git_branch: branch to checkout
:param locale_root: locale folder path
:return: tuple stdout, stderr of completed command
]
if compare[name[git_branch] is constant[None]] begin[:]
variable[git_branch] assign[=] name[settings].GIT_BRANCH
if compare[name[locale_root] is constant[None]] begin[:]
variable[locale_root] assign[=] name[settings].LOCALE_ROOT
variable[proc] assign[=] call[name[Popen], parameter[binary_operation[binary_operation[binary_operation[constant[git checkout ] + name[git_branch]] + constant[ -- ]] + name[locale_root]]]]
<ast.Tuple object at 0x7da204347f70> assign[=] call[name[proc].communicate, parameter[]]
return[tuple[[<ast.Name object at 0x7da2043443d0>, <ast.Name object at 0x7da2043448e0>]]] | keyword[def] identifier[git_checkout] ( identifier[git_branch] = keyword[None] , identifier[locale_root] = keyword[None] ):
literal[string]
keyword[if] identifier[git_branch] keyword[is] keyword[None] :
identifier[git_branch] = identifier[settings] . identifier[GIT_BRANCH]
keyword[if] identifier[locale_root] keyword[is] keyword[None] :
identifier[locale_root] = identifier[settings] . identifier[LOCALE_ROOT]
identifier[proc] = identifier[Popen] ( literal[string] + identifier[git_branch] + literal[string] + identifier[locale_root] ,
identifier[shell] = keyword[True] , identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[PIPE] )
identifier[stdout] , identifier[stderr] = identifier[proc] . identifier[communicate] ()
keyword[return] identifier[stdout] , identifier[stderr] | def git_checkout(git_branch=None, locale_root=None):
"""
Checkouts branch to last commit
:param git_branch: branch to checkout
:param locale_root: locale folder path
:return: tuple stdout, stderr of completed command
"""
if git_branch is None:
git_branch = settings.GIT_BRANCH # depends on [control=['if'], data=['git_branch']]
if locale_root is None:
locale_root = settings.LOCALE_ROOT # depends on [control=['if'], data=['locale_root']]
proc = Popen('git checkout ' + git_branch + ' -- ' + locale_root, shell=True, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = proc.communicate()
return (stdout, stderr) |
def add_user(self, **kwargs):
"""Add a User object, with properties specified in ``**kwargs``."""
user = self.UserClass(**kwargs)
if hasattr(user, 'active'):
user.active = True
self.db_adapter.add_object(user)
return user | def function[add_user, parameter[self]]:
constant[Add a User object, with properties specified in ``**kwargs``.]
variable[user] assign[=] call[name[self].UserClass, parameter[]]
if call[name[hasattr], parameter[name[user], constant[active]]] begin[:]
name[user].active assign[=] constant[True]
call[name[self].db_adapter.add_object, parameter[name[user]]]
return[name[user]] | keyword[def] identifier[add_user] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[user] = identifier[self] . identifier[UserClass] (** identifier[kwargs] )
keyword[if] identifier[hasattr] ( identifier[user] , literal[string] ):
identifier[user] . identifier[active] = keyword[True]
identifier[self] . identifier[db_adapter] . identifier[add_object] ( identifier[user] )
keyword[return] identifier[user] | def add_user(self, **kwargs):
"""Add a User object, with properties specified in ``**kwargs``."""
user = self.UserClass(**kwargs)
if hasattr(user, 'active'):
user.active = True # depends on [control=['if'], data=[]]
self.db_adapter.add_object(user)
return user |
def neighbour_and_arc_simplices(self):
"""
Get indices of neighbour simplices for each simplex and arc indices.
Identical to get_neighbour_simplices() but also returns an array
of indices that reside on boundary hull, -1 denotes no neighbour.
"""
nt, ltri, lct, ierr = _tripack.trlist(self.lst, self.lptr, self.lend, nrow=9)
if ierr != 0:
raise ValueError('ierr={} in trlist\n{}'.format(ierr, _ier_codes[ierr]))
ltri = ltri.T[:nt] - 1
return ltri[:,3:6], ltri[:,6:] | def function[neighbour_and_arc_simplices, parameter[self]]:
constant[
Get indices of neighbour simplices for each simplex and arc indices.
Identical to get_neighbour_simplices() but also returns an array
of indices that reside on boundary hull, -1 denotes no neighbour.
]
<ast.Tuple object at 0x7da18eb56c50> assign[=] call[name[_tripack].trlist, parameter[name[self].lst, name[self].lptr, name[self].lend]]
if compare[name[ierr] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da18eb543d0>
variable[ltri] assign[=] binary_operation[call[name[ltri].T][<ast.Slice object at 0x7da18eb57790>] - constant[1]]
return[tuple[[<ast.Subscript object at 0x7da18dc99540>, <ast.Subscript object at 0x7da18dc9aa40>]]] | keyword[def] identifier[neighbour_and_arc_simplices] ( identifier[self] ):
literal[string]
identifier[nt] , identifier[ltri] , identifier[lct] , identifier[ierr] = identifier[_tripack] . identifier[trlist] ( identifier[self] . identifier[lst] , identifier[self] . identifier[lptr] , identifier[self] . identifier[lend] , identifier[nrow] = literal[int] )
keyword[if] identifier[ierr] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[ierr] , identifier[_ier_codes] [ identifier[ierr] ]))
identifier[ltri] = identifier[ltri] . identifier[T] [: identifier[nt] ]- literal[int]
keyword[return] identifier[ltri] [:, literal[int] : literal[int] ], identifier[ltri] [:, literal[int] :] | def neighbour_and_arc_simplices(self):
"""
Get indices of neighbour simplices for each simplex and arc indices.
Identical to get_neighbour_simplices() but also returns an array
of indices that reside on boundary hull, -1 denotes no neighbour.
"""
(nt, ltri, lct, ierr) = _tripack.trlist(self.lst, self.lptr, self.lend, nrow=9)
if ierr != 0:
raise ValueError('ierr={} in trlist\n{}'.format(ierr, _ier_codes[ierr])) # depends on [control=['if'], data=['ierr']]
ltri = ltri.T[:nt] - 1
return (ltri[:, 3:6], ltri[:, 6:]) |
def datetimeobj(value, fmt=None):
"""Parse a datetime to a datetime object.
Uses fast custom parsing for common datetime formats or the slow dateutil
parser for other formats. This is a trade off between ease of use and speed
and is very useful for fast parsing of timestamp strings whose format may
standard but varied or unknown prior to parsing.
Common formats include:
1 Feb 2010 12:00:00 GMT
Mon, 1 Feb 2010 22:00:00 +1000
20100201120000
1383470155 (seconds since epoch)
See the other datetimeobj_*() functions for more details.
Args:
value: A string representing a datetime.
Returns:
A datetime object.
"""
if fmt:
return _datetimeobj_formats.get(fmt,
lambda v: datetimeobj_fmt(v, fmt)
)(value)
l = len(value)
if 19 <= l <= 24 and value[3] == " ":
# '%d %b %Y %H:%M:%Sxxxx'
try:
return datetimeobj_d_b_Y_H_M_S(value)
except (KeyError, ValueError):
pass
if 30 <= l <= 31:
# '%a, %d %b %Y %H:%M:%S %z'
try:
return datetimeobj_a__d_b_Y_H_M_S_z(value)
except (KeyError, ValueError):
pass
if l == 14:
# '%Y%m%d%H%M%S'
try:
return datetimeobj_YmdHMS(value)
except ValueError:
pass
# epoch timestamp
try:
return datetimeobj_epoch(value)
except ValueError:
pass
# slow version
return datetimeobj_any(value) | def function[datetimeobj, parameter[value, fmt]]:
constant[Parse a datetime to a datetime object.
Uses fast custom parsing for common datetime formats or the slow dateutil
parser for other formats. This is a trade off between ease of use and speed
and is very useful for fast parsing of timestamp strings whose format may
standard but varied or unknown prior to parsing.
Common formats include:
1 Feb 2010 12:00:00 GMT
Mon, 1 Feb 2010 22:00:00 +1000
20100201120000
1383470155 (seconds since epoch)
See the other datetimeobj_*() functions for more details.
Args:
value: A string representing a datetime.
Returns:
A datetime object.
]
if name[fmt] begin[:]
return[call[call[name[_datetimeobj_formats].get, parameter[name[fmt], <ast.Lambda object at 0x7da1b0146b00>]], parameter[name[value]]]]
variable[l] assign[=] call[name[len], parameter[name[value]]]
if <ast.BoolOp object at 0x7da1b0146ec0> begin[:]
<ast.Try object at 0x7da1b01462f0>
if compare[constant[30] less_or_equal[<=] name[l]] begin[:]
<ast.Try object at 0x7da1b01473d0>
if compare[name[l] equal[==] constant[14]] begin[:]
<ast.Try object at 0x7da1b0145cc0>
<ast.Try object at 0x7da1b0145f30>
return[call[name[datetimeobj_any], parameter[name[value]]]] | keyword[def] identifier[datetimeobj] ( identifier[value] , identifier[fmt] = keyword[None] ):
literal[string]
keyword[if] identifier[fmt] :
keyword[return] identifier[_datetimeobj_formats] . identifier[get] ( identifier[fmt] ,
keyword[lambda] identifier[v] : identifier[datetimeobj_fmt] ( identifier[v] , identifier[fmt] )
)( identifier[value] )
identifier[l] = identifier[len] ( identifier[value] )
keyword[if] literal[int] <= identifier[l] <= literal[int] keyword[and] identifier[value] [ literal[int] ]== literal[string] :
keyword[try] :
keyword[return] identifier[datetimeobj_d_b_Y_H_M_S] ( identifier[value] )
keyword[except] ( identifier[KeyError] , identifier[ValueError] ):
keyword[pass]
keyword[if] literal[int] <= identifier[l] <= literal[int] :
keyword[try] :
keyword[return] identifier[datetimeobj_a__d_b_Y_H_M_S_z] ( identifier[value] )
keyword[except] ( identifier[KeyError] , identifier[ValueError] ):
keyword[pass]
keyword[if] identifier[l] == literal[int] :
keyword[try] :
keyword[return] identifier[datetimeobj_YmdHMS] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[try] :
keyword[return] identifier[datetimeobj_epoch] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[return] identifier[datetimeobj_any] ( identifier[value] ) | def datetimeobj(value, fmt=None):
"""Parse a datetime to a datetime object.
Uses fast custom parsing for common datetime formats or the slow dateutil
parser for other formats. This is a trade off between ease of use and speed
and is very useful for fast parsing of timestamp strings whose format may
standard but varied or unknown prior to parsing.
Common formats include:
1 Feb 2010 12:00:00 GMT
Mon, 1 Feb 2010 22:00:00 +1000
20100201120000
1383470155 (seconds since epoch)
See the other datetimeobj_*() functions for more details.
Args:
value: A string representing a datetime.
Returns:
A datetime object.
"""
if fmt:
return _datetimeobj_formats.get(fmt, lambda v: datetimeobj_fmt(v, fmt))(value) # depends on [control=['if'], data=[]]
l = len(value)
if 19 <= l <= 24 and value[3] == ' ':
# '%d %b %Y %H:%M:%Sxxxx'
try:
return datetimeobj_d_b_Y_H_M_S(value) # depends on [control=['try'], data=[]]
except (KeyError, ValueError):
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if 30 <= l <= 31:
# '%a, %d %b %Y %H:%M:%S %z'
try:
return datetimeobj_a__d_b_Y_H_M_S_z(value) # depends on [control=['try'], data=[]]
except (KeyError, ValueError):
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if l == 14:
# '%Y%m%d%H%M%S'
try:
return datetimeobj_YmdHMS(value) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# epoch timestamp
try:
return datetimeobj_epoch(value) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]]
# slow version
return datetimeobj_any(value) |
def deactivate(self, plugins=[]):
"""
Deactivates given plugins.
A given plugin must be activated, otherwise it is ignored and no action takes place (no signals are fired,
no deactivate functions are called.)
A deactivated plugin is still loaded and initialised and can be reactivated by calling :func:`activate` again.
It is also still registered in the :class:`.PluginManager` and can be requested via :func:`get`.
:param plugins: List of plugin names
:type plugins: list of strings
"""
self._log.debug("Plugins Deactivation started")
if not isinstance(plugins, list):
raise AttributeError("plugins must be a list, not %s" % type(plugins))
self._log.debug("Plugins to deactivate: %s" % ", ".join(plugins))
plugins_deactivated = []
for plugin_name in plugins:
if not isinstance(plugin_name, str):
raise AttributeError("plugin name must be a str, not %s" % type(plugin_name))
if plugin_name not in self._plugins.keys():
self._log.info("Unknown activated plugin %s" % plugin_name)
continue
else:
self._log.debug("Deactivating plugin %s" % plugin_name)
if not self._plugins[plugin_name].active:
self._log.warning("Plugin %s seems to be already deactivated" % plugin_name)
else:
try:
self._plugins[plugin_name].deactivate()
except Exception as e:
raise_from(
PluginNotDeactivatableException("Plugin %s could not be deactivated" % plugin_name), e)
else:
self._log.debug("Plugin %s deactivated" % plugin_name)
plugins_deactivated.append(plugin_name)
self._log.info("Plugins deactivated: %s" % ", ".join(plugins_deactivated)) | def function[deactivate, parameter[self, plugins]]:
constant[
Deactivates given plugins.
A given plugin must be activated, otherwise it is ignored and no action takes place (no signals are fired,
no deactivate functions are called.)
A deactivated plugin is still loaded and initialised and can be reactivated by calling :func:`activate` again.
It is also still registered in the :class:`.PluginManager` and can be requested via :func:`get`.
:param plugins: List of plugin names
:type plugins: list of strings
]
call[name[self]._log.debug, parameter[constant[Plugins Deactivation started]]]
if <ast.UnaryOp object at 0x7da1b2430430> begin[:]
<ast.Raise object at 0x7da1b24309a0>
call[name[self]._log.debug, parameter[binary_operation[constant[Plugins to deactivate: %s] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[plugins]]]]]]
variable[plugins_deactivated] assign[=] list[[]]
for taget[name[plugin_name]] in starred[name[plugins]] begin[:]
if <ast.UnaryOp object at 0x7da1b2430d90> begin[:]
<ast.Raise object at 0x7da1b2432fb0>
if compare[name[plugin_name] <ast.NotIn object at 0x7da2590d7190> call[name[self]._plugins.keys, parameter[]]] begin[:]
call[name[self]._log.info, parameter[binary_operation[constant[Unknown activated plugin %s] <ast.Mod object at 0x7da2590d6920> name[plugin_name]]]]
continue
call[name[self]._log.info, parameter[binary_operation[constant[Plugins deactivated: %s] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[plugins_deactivated]]]]]] | keyword[def] identifier[deactivate] ( identifier[self] , identifier[plugins] =[]):
literal[string]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[plugins] , identifier[list] ):
keyword[raise] identifier[AttributeError] ( literal[string] % identifier[type] ( identifier[plugins] ))
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] % literal[string] . identifier[join] ( identifier[plugins] ))
identifier[plugins_deactivated] =[]
keyword[for] identifier[plugin_name] keyword[in] identifier[plugins] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[plugin_name] , identifier[str] ):
keyword[raise] identifier[AttributeError] ( literal[string] % identifier[type] ( identifier[plugin_name] ))
keyword[if] identifier[plugin_name] keyword[not] keyword[in] identifier[self] . identifier[_plugins] . identifier[keys] ():
identifier[self] . identifier[_log] . identifier[info] ( literal[string] % identifier[plugin_name] )
keyword[continue]
keyword[else] :
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] % identifier[plugin_name] )
keyword[if] keyword[not] identifier[self] . identifier[_plugins] [ identifier[plugin_name] ]. identifier[active] :
identifier[self] . identifier[_log] . identifier[warning] ( literal[string] % identifier[plugin_name] )
keyword[else] :
keyword[try] :
identifier[self] . identifier[_plugins] [ identifier[plugin_name] ]. identifier[deactivate] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[raise_from] (
identifier[PluginNotDeactivatableException] ( literal[string] % identifier[plugin_name] ), identifier[e] )
keyword[else] :
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] % identifier[plugin_name] )
identifier[plugins_deactivated] . identifier[append] ( identifier[plugin_name] )
identifier[self] . identifier[_log] . identifier[info] ( literal[string] % literal[string] . identifier[join] ( identifier[plugins_deactivated] )) | def deactivate(self, plugins=[]):
"""
Deactivates given plugins.
A given plugin must be activated, otherwise it is ignored and no action takes place (no signals are fired,
no deactivate functions are called.)
A deactivated plugin is still loaded and initialised and can be reactivated by calling :func:`activate` again.
It is also still registered in the :class:`.PluginManager` and can be requested via :func:`get`.
:param plugins: List of plugin names
:type plugins: list of strings
"""
self._log.debug('Plugins Deactivation started')
if not isinstance(plugins, list):
raise AttributeError('plugins must be a list, not %s' % type(plugins)) # depends on [control=['if'], data=[]]
self._log.debug('Plugins to deactivate: %s' % ', '.join(plugins))
plugins_deactivated = []
for plugin_name in plugins:
if not isinstance(plugin_name, str):
raise AttributeError('plugin name must be a str, not %s' % type(plugin_name)) # depends on [control=['if'], data=[]]
if plugin_name not in self._plugins.keys():
self._log.info('Unknown activated plugin %s' % plugin_name)
continue # depends on [control=['if'], data=['plugin_name']]
else:
self._log.debug('Deactivating plugin %s' % plugin_name)
if not self._plugins[plugin_name].active:
self._log.warning('Plugin %s seems to be already deactivated' % plugin_name) # depends on [control=['if'], data=[]]
else:
try:
self._plugins[plugin_name].deactivate() # depends on [control=['try'], data=[]]
except Exception as e:
raise_from(PluginNotDeactivatableException('Plugin %s could not be deactivated' % plugin_name), e) # depends on [control=['except'], data=['e']]
else:
self._log.debug('Plugin %s deactivated' % plugin_name)
plugins_deactivated.append(plugin_name) # depends on [control=['for'], data=['plugin_name']]
self._log.info('Plugins deactivated: %s' % ', '.join(plugins_deactivated)) |
def get_volume(self):
"""
returns the current volume
"""
log.debug("getting volumne...")
cmd, url = DEVICE_URLS["get_volume"]
return self._exec(cmd, url) | def function[get_volume, parameter[self]]:
constant[
returns the current volume
]
call[name[log].debug, parameter[constant[getting volumne...]]]
<ast.Tuple object at 0x7da2054a5f00> assign[=] call[name[DEVICE_URLS]][constant[get_volume]]
return[call[name[self]._exec, parameter[name[cmd], name[url]]]] | keyword[def] identifier[get_volume] ( identifier[self] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] )
identifier[cmd] , identifier[url] = identifier[DEVICE_URLS] [ literal[string] ]
keyword[return] identifier[self] . identifier[_exec] ( identifier[cmd] , identifier[url] ) | def get_volume(self):
"""
returns the current volume
"""
log.debug('getting volumne...')
(cmd, url) = DEVICE_URLS['get_volume']
return self._exec(cmd, url) |
def simhash(self, content):
"""
Select policies for simhash on the different types of content.
"""
if content is None:
self.hash = -1
return
if isinstance(content, str):
features = self.tokenizer_func(content, self.keyword_weight_pari)
self.hash = self.build_from_features(features)
elif isinstance(content, collections.Iterable):
self.hash = self.build_from_features(content)
elif isinstance(content, int):
self.hash = content
else:
raise Exception("Unsupported parameter type %s" % type(content)) | def function[simhash, parameter[self, content]]:
constant[
Select policies for simhash on the different types of content.
]
if compare[name[content] is constant[None]] begin[:]
name[self].hash assign[=] <ast.UnaryOp object at 0x7da18ede51e0>
return[None]
if call[name[isinstance], parameter[name[content], name[str]]] begin[:]
variable[features] assign[=] call[name[self].tokenizer_func, parameter[name[content], name[self].keyword_weight_pari]]
name[self].hash assign[=] call[name[self].build_from_features, parameter[name[features]]] | keyword[def] identifier[simhash] ( identifier[self] , identifier[content] ):
literal[string]
keyword[if] identifier[content] keyword[is] keyword[None] :
identifier[self] . identifier[hash] =- literal[int]
keyword[return]
keyword[if] identifier[isinstance] ( identifier[content] , identifier[str] ):
identifier[features] = identifier[self] . identifier[tokenizer_func] ( identifier[content] , identifier[self] . identifier[keyword_weight_pari] )
identifier[self] . identifier[hash] = identifier[self] . identifier[build_from_features] ( identifier[features] )
keyword[elif] identifier[isinstance] ( identifier[content] , identifier[collections] . identifier[Iterable] ):
identifier[self] . identifier[hash] = identifier[self] . identifier[build_from_features] ( identifier[content] )
keyword[elif] identifier[isinstance] ( identifier[content] , identifier[int] ):
identifier[self] . identifier[hash] = identifier[content]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[type] ( identifier[content] )) | def simhash(self, content):
"""
Select policies for simhash on the different types of content.
"""
if content is None:
self.hash = -1
return # depends on [control=['if'], data=[]]
if isinstance(content, str):
features = self.tokenizer_func(content, self.keyword_weight_pari)
self.hash = self.build_from_features(features) # depends on [control=['if'], data=[]]
elif isinstance(content, collections.Iterable):
self.hash = self.build_from_features(content) # depends on [control=['if'], data=[]]
elif isinstance(content, int):
self.hash = content # depends on [control=['if'], data=[]]
else:
raise Exception('Unsupported parameter type %s' % type(content)) |
def get_time_estimate(self, start_latitude, start_longitude, customer_uuid=None, product_id=None):
"""
Get the ETA for Uber products.
:param start_latitude: Starting latitude.
:param start_longitude: Starting longitude.
:param customer_uuid: (Optional) Customer unique ID.
:param product_id: (Optional) If ETA is needed only for a specific product type.
:return: JSON
"""
endpoint = 'estimates/time'
query_parameters = {
'start_latitude': start_latitude,
'start_longitude': start_longitude
}
if customer_uuid is not None:
query_parameters['customer_uuid'] = customer_uuid
elif product_id is not None:
query_parameters['product_id'] = product_id
elif customer_uuid is not None and product_id is not None:
query_parameters['customer_uuid'] = customer_uuid
query_parameters['product_id'] = product_id
return self.get_json(endpoint, 'GET', query_parameters, None, None) | def function[get_time_estimate, parameter[self, start_latitude, start_longitude, customer_uuid, product_id]]:
constant[
Get the ETA for Uber products.
:param start_latitude: Starting latitude.
:param start_longitude: Starting longitude.
:param customer_uuid: (Optional) Customer unique ID.
:param product_id: (Optional) If ETA is needed only for a specific product type.
:return: JSON
]
variable[endpoint] assign[=] constant[estimates/time]
variable[query_parameters] assign[=] dictionary[[<ast.Constant object at 0x7da18f09d4e0>, <ast.Constant object at 0x7da18f09c1f0>], [<ast.Name object at 0x7da18f09dcc0>, <ast.Name object at 0x7da18f09dd20>]]
if compare[name[customer_uuid] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[customer_uuid]] assign[=] name[customer_uuid]
return[call[name[self].get_json, parameter[name[endpoint], constant[GET], name[query_parameters], constant[None], constant[None]]]] | keyword[def] identifier[get_time_estimate] ( identifier[self] , identifier[start_latitude] , identifier[start_longitude] , identifier[customer_uuid] = keyword[None] , identifier[product_id] = keyword[None] ):
literal[string]
identifier[endpoint] = literal[string]
identifier[query_parameters] ={
literal[string] : identifier[start_latitude] ,
literal[string] : identifier[start_longitude]
}
keyword[if] identifier[customer_uuid] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[customer_uuid]
keyword[elif] identifier[product_id] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[product_id]
keyword[elif] identifier[customer_uuid] keyword[is] keyword[not] keyword[None] keyword[and] identifier[product_id] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[customer_uuid]
identifier[query_parameters] [ literal[string] ]= identifier[product_id]
keyword[return] identifier[self] . identifier[get_json] ( identifier[endpoint] , literal[string] , identifier[query_parameters] , keyword[None] , keyword[None] ) | def get_time_estimate(self, start_latitude, start_longitude, customer_uuid=None, product_id=None):
"""
Get the ETA for Uber products.
:param start_latitude: Starting latitude.
:param start_longitude: Starting longitude.
:param customer_uuid: (Optional) Customer unique ID.
:param product_id: (Optional) If ETA is needed only for a specific product type.
:return: JSON
"""
endpoint = 'estimates/time'
query_parameters = {'start_latitude': start_latitude, 'start_longitude': start_longitude}
if customer_uuid is not None:
query_parameters['customer_uuid'] = customer_uuid # depends on [control=['if'], data=['customer_uuid']]
elif product_id is not None:
query_parameters['product_id'] = product_id # depends on [control=['if'], data=['product_id']]
elif customer_uuid is not None and product_id is not None:
query_parameters['customer_uuid'] = customer_uuid
query_parameters['product_id'] = product_id # depends on [control=['if'], data=[]]
return self.get_json(endpoint, 'GET', query_parameters, None, None) |
def residue_distances(res_1_num, res_1_chain, res_2_num, res_2_chain, model):
"""Distance between the last atom of 2 residues"""
res1 = model[res_1_chain][res_1_num].child_list[-1]
res2 = model[res_2_chain][res_2_num].child_list[-1]
distance = res1 - res2
return distance | def function[residue_distances, parameter[res_1_num, res_1_chain, res_2_num, res_2_chain, model]]:
constant[Distance between the last atom of 2 residues]
variable[res1] assign[=] call[call[call[name[model]][name[res_1_chain]]][name[res_1_num]].child_list][<ast.UnaryOp object at 0x7da1b0e6e500>]
variable[res2] assign[=] call[call[call[name[model]][name[res_2_chain]]][name[res_2_num]].child_list][<ast.UnaryOp object at 0x7da1b0e6dcf0>]
variable[distance] assign[=] binary_operation[name[res1] - name[res2]]
return[name[distance]] | keyword[def] identifier[residue_distances] ( identifier[res_1_num] , identifier[res_1_chain] , identifier[res_2_num] , identifier[res_2_chain] , identifier[model] ):
literal[string]
identifier[res1] = identifier[model] [ identifier[res_1_chain] ][ identifier[res_1_num] ]. identifier[child_list] [- literal[int] ]
identifier[res2] = identifier[model] [ identifier[res_2_chain] ][ identifier[res_2_num] ]. identifier[child_list] [- literal[int] ]
identifier[distance] = identifier[res1] - identifier[res2]
keyword[return] identifier[distance] | def residue_distances(res_1_num, res_1_chain, res_2_num, res_2_chain, model):
"""Distance between the last atom of 2 residues"""
res1 = model[res_1_chain][res_1_num].child_list[-1]
res2 = model[res_2_chain][res_2_num].child_list[-1]
distance = res1 - res2
return distance |
def _load_response(self, action):
"""
returns API reponse from cache or raises ValueError
"""
_query = self.cache[action]['query'].replace('&format=json', '')
response = self.cache[action]['response']
if not response:
raise ValueError("Empty response: %s" % self.params)
try:
data = utils.json_loads(response)
except ValueError:
raise ValueError(_query)
if data.get('warnings'):
if 'WARNINGS' in self.data:
self.data['WARNINGS'].update(data['warnings'])
else:
self.data['WARNINGS'] = data['warnings']
if data.get('error'):
utils.stderr("API error: %s" % data.get('error'))
raise LookupError(_query)
if 'query' in action and data.get('query'):
if data['query'].get('pages'):
if data['query']['pages'][0].get('missing'):
raise LookupError(_query)
if action == 'parse' and not data.get('parse'):
raise LookupError(_query)
if action == 'wikidata':
handle_wikidata_errors(data, _query)
return data | def function[_load_response, parameter[self, action]]:
constant[
returns API reponse from cache or raises ValueError
]
variable[_query] assign[=] call[call[call[name[self].cache][name[action]]][constant[query]].replace, parameter[constant[&format=json], constant[]]]
variable[response] assign[=] call[call[name[self].cache][name[action]]][constant[response]]
if <ast.UnaryOp object at 0x7da1b1206950> begin[:]
<ast.Raise object at 0x7da1b12046a0>
<ast.Try object at 0x7da1b1204520>
if call[name[data].get, parameter[constant[warnings]]] begin[:]
if compare[constant[WARNINGS] in name[self].data] begin[:]
call[call[name[self].data][constant[WARNINGS]].update, parameter[call[name[data]][constant[warnings]]]]
if call[name[data].get, parameter[constant[error]]] begin[:]
call[name[utils].stderr, parameter[binary_operation[constant[API error: %s] <ast.Mod object at 0x7da2590d6920> call[name[data].get, parameter[constant[error]]]]]]
<ast.Raise object at 0x7da1b12b4fd0>
if <ast.BoolOp object at 0x7da1b12b7bb0> begin[:]
if call[call[name[data]][constant[query]].get, parameter[constant[pages]]] begin[:]
if call[call[call[call[name[data]][constant[query]]][constant[pages]]][constant[0]].get, parameter[constant[missing]]] begin[:]
<ast.Raise object at 0x7da1b12b42b0>
if <ast.BoolOp object at 0x7da1b12b7520> begin[:]
<ast.Raise object at 0x7da1b12b7c40>
if compare[name[action] equal[==] constant[wikidata]] begin[:]
call[name[handle_wikidata_errors], parameter[name[data], name[_query]]]
return[name[data]] | keyword[def] identifier[_load_response] ( identifier[self] , identifier[action] ):
literal[string]
identifier[_query] = identifier[self] . identifier[cache] [ identifier[action] ][ literal[string] ]. identifier[replace] ( literal[string] , literal[string] )
identifier[response] = identifier[self] . identifier[cache] [ identifier[action] ][ literal[string] ]
keyword[if] keyword[not] identifier[response] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[self] . identifier[params] )
keyword[try] :
identifier[data] = identifier[utils] . identifier[json_loads] ( identifier[response] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( identifier[_query] )
keyword[if] identifier[data] . identifier[get] ( literal[string] ):
keyword[if] literal[string] keyword[in] identifier[self] . identifier[data] :
identifier[self] . identifier[data] [ literal[string] ]. identifier[update] ( identifier[data] [ literal[string] ])
keyword[else] :
identifier[self] . identifier[data] [ literal[string] ]= identifier[data] [ literal[string] ]
keyword[if] identifier[data] . identifier[get] ( literal[string] ):
identifier[utils] . identifier[stderr] ( literal[string] % identifier[data] . identifier[get] ( literal[string] ))
keyword[raise] identifier[LookupError] ( identifier[_query] )
keyword[if] literal[string] keyword[in] identifier[action] keyword[and] identifier[data] . identifier[get] ( literal[string] ):
keyword[if] identifier[data] [ literal[string] ]. identifier[get] ( literal[string] ):
keyword[if] identifier[data] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get] ( literal[string] ):
keyword[raise] identifier[LookupError] ( identifier[_query] )
keyword[if] identifier[action] == literal[string] keyword[and] keyword[not] identifier[data] . identifier[get] ( literal[string] ):
keyword[raise] identifier[LookupError] ( identifier[_query] )
keyword[if] identifier[action] == literal[string] :
identifier[handle_wikidata_errors] ( identifier[data] , identifier[_query] )
keyword[return] identifier[data] | def _load_response(self, action):
"""
returns API reponse from cache or raises ValueError
"""
_query = self.cache[action]['query'].replace('&format=json', '')
response = self.cache[action]['response']
if not response:
raise ValueError('Empty response: %s' % self.params) # depends on [control=['if'], data=[]]
try:
data = utils.json_loads(response) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError(_query) # depends on [control=['except'], data=[]]
if data.get('warnings'):
if 'WARNINGS' in self.data:
self.data['WARNINGS'].update(data['warnings']) # depends on [control=['if'], data=[]]
else:
self.data['WARNINGS'] = data['warnings'] # depends on [control=['if'], data=[]]
if data.get('error'):
utils.stderr('API error: %s' % data.get('error'))
raise LookupError(_query) # depends on [control=['if'], data=[]]
if 'query' in action and data.get('query'):
if data['query'].get('pages'):
if data['query']['pages'][0].get('missing'):
raise LookupError(_query) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if action == 'parse' and (not data.get('parse')):
raise LookupError(_query) # depends on [control=['if'], data=[]]
if action == 'wikidata':
handle_wikidata_errors(data, _query) # depends on [control=['if'], data=[]]
return data |
def lmx_relative():
"""Language model using relative attention."""
hparams = lmx_base()
hparams.self_attention_type = "dot_product_relative_v2"
hparams.activation_dtype = "float32"
hparams.weight_dtype = "float32"
return hparams | def function[lmx_relative, parameter[]]:
constant[Language model using relative attention.]
variable[hparams] assign[=] call[name[lmx_base], parameter[]]
name[hparams].self_attention_type assign[=] constant[dot_product_relative_v2]
name[hparams].activation_dtype assign[=] constant[float32]
name[hparams].weight_dtype assign[=] constant[float32]
return[name[hparams]] | keyword[def] identifier[lmx_relative] ():
literal[string]
identifier[hparams] = identifier[lmx_base] ()
identifier[hparams] . identifier[self_attention_type] = literal[string]
identifier[hparams] . identifier[activation_dtype] = literal[string]
identifier[hparams] . identifier[weight_dtype] = literal[string]
keyword[return] identifier[hparams] | def lmx_relative():
"""Language model using relative attention."""
hparams = lmx_base()
hparams.self_attention_type = 'dot_product_relative_v2'
hparams.activation_dtype = 'float32'
hparams.weight_dtype = 'float32'
return hparams |
def weld_str_lower(array):
"""Convert values to lowercase.
Parameters
----------
array : numpy.ndarray or WeldObject
Input data.
Returns
-------
WeldObject
Representation of this computation.
"""
obj_id, weld_obj = create_weld_object(array)
weld_template = """map(
{array},
|e: vec[i8]|
result(
for(e,
appender[i8],
|c: appender[i8], j: i64, f: i8|
if(f > 64c && f < 91c,
merge(c, f + 32c),
merge(c, f))
)
)
)"""
weld_obj.weld_code = weld_template.format(array=obj_id)
return weld_obj | def function[weld_str_lower, parameter[array]]:
constant[Convert values to lowercase.
Parameters
----------
array : numpy.ndarray or WeldObject
Input data.
Returns
-------
WeldObject
Representation of this computation.
]
<ast.Tuple object at 0x7da1b098a500> assign[=] call[name[create_weld_object], parameter[name[array]]]
variable[weld_template] assign[=] constant[map(
{array},
|e: vec[i8]|
result(
for(e,
appender[i8],
|c: appender[i8], j: i64, f: i8|
if(f > 64c && f < 91c,
merge(c, f + 32c),
merge(c, f))
)
)
)]
name[weld_obj].weld_code assign[=] call[name[weld_template].format, parameter[]]
return[name[weld_obj]] | keyword[def] identifier[weld_str_lower] ( identifier[array] ):
literal[string]
identifier[obj_id] , identifier[weld_obj] = identifier[create_weld_object] ( identifier[array] )
identifier[weld_template] = literal[string]
identifier[weld_obj] . identifier[weld_code] = identifier[weld_template] . identifier[format] ( identifier[array] = identifier[obj_id] )
keyword[return] identifier[weld_obj] | def weld_str_lower(array):
"""Convert values to lowercase.
Parameters
----------
array : numpy.ndarray or WeldObject
Input data.
Returns
-------
WeldObject
Representation of this computation.
"""
(obj_id, weld_obj) = create_weld_object(array)
weld_template = 'map(\n {array},\n |e: vec[i8]|\n result(\n for(e,\n appender[i8],\n |c: appender[i8], j: i64, f: i8|\n if(f > 64c && f < 91c,\n merge(c, f + 32c),\n merge(c, f))\n )\n )\n)'
weld_obj.weld_code = weld_template.format(array=obj_id)
return weld_obj |
def separated(p, sep, mint, maxt=None, end=None):
'''Repeat a parser `p` separated by `s` between `mint` and `maxt` times.
When `end` is None, a trailing separator is optional.
When `end` is True, a trailing separator is required.
When `end` is False, a trailing separator is not allowed.
MATCHES AS MUCH AS POSSIBLE.
Return list of values returned by `p`.'''
maxt = maxt if maxt else mint
@Parser
def sep_parser(text, index):
cnt, values, res = 0, Value.success(index, []), None
while cnt < maxt:
if end in [False, None] and cnt > 0:
res = sep(text, index)
if res.status: # `sep` found, consume it (advance index)
index, values = res.index, Value.success(
res.index, values.value)
elif cnt < mint:
return res # error: need more elemnts, but no `sep` found.
else:
break
res = p(text, index)
if res.status:
values = values.aggregate(
Value.success(res.index, [res.value]))
index, cnt = res.index, cnt + 1
elif cnt >= mint:
break
else:
return res # error: need more elements, but no `p` found.
if end is True:
res = sep(text, index)
if res.status:
index, values = res.index, Value.success(
res.index, values.value)
else:
return res # error: trailing `sep` not found
if cnt >= maxt:
break
return values
return sep_parser | def function[separated, parameter[p, sep, mint, maxt, end]]:
constant[Repeat a parser `p` separated by `s` between `mint` and `maxt` times.
When `end` is None, a trailing separator is optional.
When `end` is True, a trailing separator is required.
When `end` is False, a trailing separator is not allowed.
MATCHES AS MUCH AS POSSIBLE.
Return list of values returned by `p`.]
variable[maxt] assign[=] <ast.IfExp object at 0x7da18f722f50>
def function[sep_parser, parameter[text, index]]:
<ast.Tuple object at 0x7da18f7215d0> assign[=] tuple[[<ast.Constant object at 0x7da18f721cc0>, <ast.Call object at 0x7da18f720df0>, <ast.Constant object at 0x7da18f722380>]]
while compare[name[cnt] less[<] name[maxt]] begin[:]
if <ast.BoolOp object at 0x7da18f721540> begin[:]
variable[res] assign[=] call[name[sep], parameter[name[text], name[index]]]
if name[res].status begin[:]
<ast.Tuple object at 0x7da2044c0b20> assign[=] tuple[[<ast.Attribute object at 0x7da212db4ee0>, <ast.Call object at 0x7da212db4c70>]]
variable[res] assign[=] call[name[p], parameter[name[text], name[index]]]
if name[res].status begin[:]
variable[values] assign[=] call[name[values].aggregate, parameter[call[name[Value].success, parameter[name[res].index, list[[<ast.Attribute object at 0x7da20c6e74c0>]]]]]]
<ast.Tuple object at 0x7da20c6e5e10> assign[=] tuple[[<ast.Attribute object at 0x7da20c6e7c40>, <ast.BinOp object at 0x7da18c4cf7f0>]]
if compare[name[end] is constant[True]] begin[:]
variable[res] assign[=] call[name[sep], parameter[name[text], name[index]]]
if name[res].status begin[:]
<ast.Tuple object at 0x7da20c6e6dd0> assign[=] tuple[[<ast.Attribute object at 0x7da20c6e4340>, <ast.Call object at 0x7da20c6e4d30>]]
if compare[name[cnt] greater_or_equal[>=] name[maxt]] begin[:]
break
return[name[values]]
return[name[sep_parser]] | keyword[def] identifier[separated] ( identifier[p] , identifier[sep] , identifier[mint] , identifier[maxt] = keyword[None] , identifier[end] = keyword[None] ):
literal[string]
identifier[maxt] = identifier[maxt] keyword[if] identifier[maxt] keyword[else] identifier[mint]
@ identifier[Parser]
keyword[def] identifier[sep_parser] ( identifier[text] , identifier[index] ):
identifier[cnt] , identifier[values] , identifier[res] = literal[int] , identifier[Value] . identifier[success] ( identifier[index] ,[]), keyword[None]
keyword[while] identifier[cnt] < identifier[maxt] :
keyword[if] identifier[end] keyword[in] [ keyword[False] , keyword[None] ] keyword[and] identifier[cnt] > literal[int] :
identifier[res] = identifier[sep] ( identifier[text] , identifier[index] )
keyword[if] identifier[res] . identifier[status] :
identifier[index] , identifier[values] = identifier[res] . identifier[index] , identifier[Value] . identifier[success] (
identifier[res] . identifier[index] , identifier[values] . identifier[value] )
keyword[elif] identifier[cnt] < identifier[mint] :
keyword[return] identifier[res]
keyword[else] :
keyword[break]
identifier[res] = identifier[p] ( identifier[text] , identifier[index] )
keyword[if] identifier[res] . identifier[status] :
identifier[values] = identifier[values] . identifier[aggregate] (
identifier[Value] . identifier[success] ( identifier[res] . identifier[index] ,[ identifier[res] . identifier[value] ]))
identifier[index] , identifier[cnt] = identifier[res] . identifier[index] , identifier[cnt] + literal[int]
keyword[elif] identifier[cnt] >= identifier[mint] :
keyword[break]
keyword[else] :
keyword[return] identifier[res]
keyword[if] identifier[end] keyword[is] keyword[True] :
identifier[res] = identifier[sep] ( identifier[text] , identifier[index] )
keyword[if] identifier[res] . identifier[status] :
identifier[index] , identifier[values] = identifier[res] . identifier[index] , identifier[Value] . identifier[success] (
identifier[res] . identifier[index] , identifier[values] . identifier[value] )
keyword[else] :
keyword[return] identifier[res]
keyword[if] identifier[cnt] >= identifier[maxt] :
keyword[break]
keyword[return] identifier[values]
keyword[return] identifier[sep_parser] | def separated(p, sep, mint, maxt=None, end=None):
"""Repeat a parser `p` separated by `s` between `mint` and `maxt` times.
When `end` is None, a trailing separator is optional.
When `end` is True, a trailing separator is required.
When `end` is False, a trailing separator is not allowed.
MATCHES AS MUCH AS POSSIBLE.
Return list of values returned by `p`."""
maxt = maxt if maxt else mint
@Parser
def sep_parser(text, index):
(cnt, values, res) = (0, Value.success(index, []), None)
while cnt < maxt:
if end in [False, None] and cnt > 0:
res = sep(text, index)
if res.status: # `sep` found, consume it (advance index)
(index, values) = (res.index, Value.success(res.index, values.value)) # depends on [control=['if'], data=[]]
elif cnt < mint:
return res # error: need more elemnts, but no `sep` found. # depends on [control=['if'], data=[]]
else:
break # depends on [control=['if'], data=[]]
res = p(text, index)
if res.status:
values = values.aggregate(Value.success(res.index, [res.value]))
(index, cnt) = (res.index, cnt + 1) # depends on [control=['if'], data=[]]
elif cnt >= mint:
break # depends on [control=['if'], data=[]]
else:
return res # error: need more elements, but no `p` found.
if end is True:
res = sep(text, index)
if res.status:
(index, values) = (res.index, Value.success(res.index, values.value)) # depends on [control=['if'], data=[]]
else:
return res # error: trailing `sep` not found # depends on [control=['if'], data=[]]
if cnt >= maxt:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['cnt', 'maxt']]
return values
return sep_parser |
def delete_folder(self, folder_id, folder_etag=None, recursive=None):
'''Delete specified folder.
Pass folder_etag to avoid race conditions (raises error 412).
recursive keyword does just what it says on the tin.'''
return self( join('folders', folder_id),
dict(recursive=recursive), method='delete',
headers={'If-Match': folder_etag} if folder_etag else dict() ) | def function[delete_folder, parameter[self, folder_id, folder_etag, recursive]]:
constant[Delete specified folder.
Pass folder_etag to avoid race conditions (raises error 412).
recursive keyword does just what it says on the tin.]
return[call[name[self], parameter[call[name[join], parameter[constant[folders], name[folder_id]]], call[name[dict], parameter[]]]]] | keyword[def] identifier[delete_folder] ( identifier[self] , identifier[folder_id] , identifier[folder_etag] = keyword[None] , identifier[recursive] = keyword[None] ):
literal[string]
keyword[return] identifier[self] ( identifier[join] ( literal[string] , identifier[folder_id] ),
identifier[dict] ( identifier[recursive] = identifier[recursive] ), identifier[method] = literal[string] ,
identifier[headers] ={ literal[string] : identifier[folder_etag] } keyword[if] identifier[folder_etag] keyword[else] identifier[dict] ()) | def delete_folder(self, folder_id, folder_etag=None, recursive=None):
"""Delete specified folder.
Pass folder_etag to avoid race conditions (raises error 412).
recursive keyword does just what it says on the tin."""
return self(join('folders', folder_id), dict(recursive=recursive), method='delete', headers={'If-Match': folder_etag} if folder_etag else dict()) |
def prune(t):
"""Returns the currently defining instance of t.
As a side effect, collapses the list of type instances. The function Prune
is used whenever a type expression has to be inspected: it will always
return a type expression which is either an uninstantiated type variable or
a type operator; i.e. it will skip instantiated variables, and will
actually prune them from expressions to remove long chains of instantiated
variables.
Args:
t: The type to be pruned
Returns:
An uninstantiated TypeVariable or a TypeOperator
"""
if isinstance(t, TypeVariable):
if t.instance is not None:
t.instance = prune(t.instance)
return t.instance
return t | def function[prune, parameter[t]]:
constant[Returns the currently defining instance of t.
As a side effect, collapses the list of type instances. The function Prune
is used whenever a type expression has to be inspected: it will always
return a type expression which is either an uninstantiated type variable or
a type operator; i.e. it will skip instantiated variables, and will
actually prune them from expressions to remove long chains of instantiated
variables.
Args:
t: The type to be pruned
Returns:
An uninstantiated TypeVariable or a TypeOperator
]
if call[name[isinstance], parameter[name[t], name[TypeVariable]]] begin[:]
if compare[name[t].instance is_not constant[None]] begin[:]
name[t].instance assign[=] call[name[prune], parameter[name[t].instance]]
return[name[t].instance]
return[name[t]] | keyword[def] identifier[prune] ( identifier[t] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[t] , identifier[TypeVariable] ):
keyword[if] identifier[t] . identifier[instance] keyword[is] keyword[not] keyword[None] :
identifier[t] . identifier[instance] = identifier[prune] ( identifier[t] . identifier[instance] )
keyword[return] identifier[t] . identifier[instance]
keyword[return] identifier[t] | def prune(t):
"""Returns the currently defining instance of t.
As a side effect, collapses the list of type instances. The function Prune
is used whenever a type expression has to be inspected: it will always
return a type expression which is either an uninstantiated type variable or
a type operator; i.e. it will skip instantiated variables, and will
actually prune them from expressions to remove long chains of instantiated
variables.
Args:
t: The type to be pruned
Returns:
An uninstantiated TypeVariable or a TypeOperator
"""
if isinstance(t, TypeVariable):
if t.instance is not None:
t.instance = prune(t.instance)
return t.instance # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return t |
def enable_tk(self, app=None):
"""Enable event loop integration with Tk.
Parameters
----------
app : toplevel :class:`Tkinter.Tk` widget, optional.
Running toplevel widget to use. If not given, we probe Tk for an
existing one, and create a new one if none is found.
Notes
-----
If you have already created a :class:`Tkinter.Tk` object, the only
thing done by this method is to register with the
:class:`InputHookManager`, since creating that object automatically
sets ``PyOS_InputHook``.
"""
self._current_gui = GUI_TK
if app is None:
try:
import Tkinter as _TK
except:
# Python 3
import tkinter as _TK # @UnresolvedImport
app = _TK.Tk()
app.withdraw()
self._apps[GUI_TK] = app
from pydev_ipython.inputhooktk import create_inputhook_tk
self.set_inputhook(create_inputhook_tk(app))
return app | def function[enable_tk, parameter[self, app]]:
constant[Enable event loop integration with Tk.
Parameters
----------
app : toplevel :class:`Tkinter.Tk` widget, optional.
Running toplevel widget to use. If not given, we probe Tk for an
existing one, and create a new one if none is found.
Notes
-----
If you have already created a :class:`Tkinter.Tk` object, the only
thing done by this method is to register with the
:class:`InputHookManager`, since creating that object automatically
sets ``PyOS_InputHook``.
]
name[self]._current_gui assign[=] name[GUI_TK]
if compare[name[app] is constant[None]] begin[:]
<ast.Try object at 0x7da1b08d8370>
variable[app] assign[=] call[name[_TK].Tk, parameter[]]
call[name[app].withdraw, parameter[]]
call[name[self]._apps][name[GUI_TK]] assign[=] name[app]
from relative_module[pydev_ipython.inputhooktk] import module[create_inputhook_tk]
call[name[self].set_inputhook, parameter[call[name[create_inputhook_tk], parameter[name[app]]]]]
return[name[app]] | keyword[def] identifier[enable_tk] ( identifier[self] , identifier[app] = keyword[None] ):
literal[string]
identifier[self] . identifier[_current_gui] = identifier[GUI_TK]
keyword[if] identifier[app] keyword[is] keyword[None] :
keyword[try] :
keyword[import] identifier[Tkinter] keyword[as] identifier[_TK]
keyword[except] :
keyword[import] identifier[tkinter] keyword[as] identifier[_TK]
identifier[app] = identifier[_TK] . identifier[Tk] ()
identifier[app] . identifier[withdraw] ()
identifier[self] . identifier[_apps] [ identifier[GUI_TK] ]= identifier[app]
keyword[from] identifier[pydev_ipython] . identifier[inputhooktk] keyword[import] identifier[create_inputhook_tk]
identifier[self] . identifier[set_inputhook] ( identifier[create_inputhook_tk] ( identifier[app] ))
keyword[return] identifier[app] | def enable_tk(self, app=None):
"""Enable event loop integration with Tk.
Parameters
----------
app : toplevel :class:`Tkinter.Tk` widget, optional.
Running toplevel widget to use. If not given, we probe Tk for an
existing one, and create a new one if none is found.
Notes
-----
If you have already created a :class:`Tkinter.Tk` object, the only
thing done by this method is to register with the
:class:`InputHookManager`, since creating that object automatically
sets ``PyOS_InputHook``.
"""
self._current_gui = GUI_TK
if app is None:
try:
import Tkinter as _TK # depends on [control=['try'], data=[]]
except:
# Python 3
import tkinter as _TK # @UnresolvedImport # depends on [control=['except'], data=[]]
app = _TK.Tk()
app.withdraw()
self._apps[GUI_TK] = app # depends on [control=['if'], data=['app']]
from pydev_ipython.inputhooktk import create_inputhook_tk
self.set_inputhook(create_inputhook_tk(app))
return app |
def fetch(self, callback):
"""
Perform a full synchronization flow.
.. code-block:: python
:linenos:
>>> client = basecrm.Client(access_token='<YOUR_PERSONAL_ACCESS_TOKEN>')
>>> sync = basecrm.Sync(client=client, device_uuid='<YOUR_DEVICES_UUID>')
>>> sync.fetch(lambda meta, data: basecrm.Sync.ACK)
:param callback: Callback that will be called for every item in a queue.
Takes two input arguments: synchronization meta data and assodicated data.
It must return either ack or nack.
"""
# Set up a new synchronization session for a given device's UUID
session = self.client.sync.start(self.device_uuid)
# Check if there is anything to synchronize
if session is None or 'id' not in session:
return
# Drain the main queue until there is no more data (empty array)
while True:
# Fetch the main queue
queue_items = self.client.sync.fetch(self.device_uuid, session['id'])
# nothing more to synchronize ?
if not queue_items:
break
# let client know about both data and meta
ack_keys = []
for item in queue_items:
if callback(item['meta'], item['data']):
ack_keys.append(item['meta']['sync']['ack_key'])
# As we fetch new data, we need to send acknowledgement keys
# if any ..
if ack_keys:
self.client.sync.ack(self.device_uuid, ack_keys) | def function[fetch, parameter[self, callback]]:
constant[
Perform a full synchronization flow.
.. code-block:: python
:linenos:
>>> client = basecrm.Client(access_token='<YOUR_PERSONAL_ACCESS_TOKEN>')
>>> sync = basecrm.Sync(client=client, device_uuid='<YOUR_DEVICES_UUID>')
>>> sync.fetch(lambda meta, data: basecrm.Sync.ACK)
:param callback: Callback that will be called for every item in a queue.
Takes two input arguments: synchronization meta data and assodicated data.
It must return either ack or nack.
]
variable[session] assign[=] call[name[self].client.sync.start, parameter[name[self].device_uuid]]
if <ast.BoolOp object at 0x7da18dc075e0> begin[:]
return[None]
while constant[True] begin[:]
variable[queue_items] assign[=] call[name[self].client.sync.fetch, parameter[name[self].device_uuid, call[name[session]][constant[id]]]]
if <ast.UnaryOp object at 0x7da18dc04c40> begin[:]
break
variable[ack_keys] assign[=] list[[]]
for taget[name[item]] in starred[name[queue_items]] begin[:]
if call[name[callback], parameter[call[name[item]][constant[meta]], call[name[item]][constant[data]]]] begin[:]
call[name[ack_keys].append, parameter[call[call[call[name[item]][constant[meta]]][constant[sync]]][constant[ack_key]]]]
if name[ack_keys] begin[:]
call[name[self].client.sync.ack, parameter[name[self].device_uuid, name[ack_keys]]] | keyword[def] identifier[fetch] ( identifier[self] , identifier[callback] ):
literal[string]
identifier[session] = identifier[self] . identifier[client] . identifier[sync] . identifier[start] ( identifier[self] . identifier[device_uuid] )
keyword[if] identifier[session] keyword[is] keyword[None] keyword[or] literal[string] keyword[not] keyword[in] identifier[session] :
keyword[return]
keyword[while] keyword[True] :
identifier[queue_items] = identifier[self] . identifier[client] . identifier[sync] . identifier[fetch] ( identifier[self] . identifier[device_uuid] , identifier[session] [ literal[string] ])
keyword[if] keyword[not] identifier[queue_items] :
keyword[break]
identifier[ack_keys] =[]
keyword[for] identifier[item] keyword[in] identifier[queue_items] :
keyword[if] identifier[callback] ( identifier[item] [ literal[string] ], identifier[item] [ literal[string] ]):
identifier[ack_keys] . identifier[append] ( identifier[item] [ literal[string] ][ literal[string] ][ literal[string] ])
keyword[if] identifier[ack_keys] :
identifier[self] . identifier[client] . identifier[sync] . identifier[ack] ( identifier[self] . identifier[device_uuid] , identifier[ack_keys] ) | def fetch(self, callback):
"""
Perform a full synchronization flow.
.. code-block:: python
:linenos:
>>> client = basecrm.Client(access_token='<YOUR_PERSONAL_ACCESS_TOKEN>')
>>> sync = basecrm.Sync(client=client, device_uuid='<YOUR_DEVICES_UUID>')
>>> sync.fetch(lambda meta, data: basecrm.Sync.ACK)
:param callback: Callback that will be called for every item in a queue.
Takes two input arguments: synchronization meta data and assodicated data.
It must return either ack or nack.
"""
# Set up a new synchronization session for a given device's UUID
session = self.client.sync.start(self.device_uuid)
# Check if there is anything to synchronize
if session is None or 'id' not in session:
return # depends on [control=['if'], data=[]]
# Drain the main queue until there is no more data (empty array)
while True:
# Fetch the main queue
queue_items = self.client.sync.fetch(self.device_uuid, session['id'])
# nothing more to synchronize ?
if not queue_items:
break # depends on [control=['if'], data=[]]
# let client know about both data and meta
ack_keys = []
for item in queue_items:
if callback(item['meta'], item['data']):
ack_keys.append(item['meta']['sync']['ack_key']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
# As we fetch new data, we need to send acknowledgement keys
# if any ..
if ack_keys:
self.client.sync.ack(self.device_uuid, ack_keys) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def str_to_datetime(s, context='datetime'):
"""Set timestamp from an W3C Datetime Last-Modified value.
The sitemaps.org specification says that <lastmod> values
must comply with the W3C Datetime format
(http://www.w3.org/TR/NOTE-datetime). This is a restricted
subset of ISO8601. In particular, all forms that include a
time must include a timezone indication so there is no
notion of local time (which would be tricky on the web). The
forms allowed are:
Year:
YYYY (eg 1997)
Year and month:
YYYY-MM (eg 1997-07)
Complete date:
YYYY-MM-DD (eg 1997-07-16)
Complete date plus hours and minutes:
YYYY-MM-DDThh:mmTZD (eg 1997-07-16T19:20+01:00)
Complete date plus hours, minutes and seconds:
YYYY-MM-DDThh:mm:ssTZD (eg 1997-07-16T19:20:30+01:00)
Complete date plus hours, minutes, seconds and a decimal fraction
of a second
YYYY-MM-DDThh:mm:ss.sTZD (eg 1997-07-16T19:20:30.45+01:00)
where:
TZD = time zone designator (Z or +hh:mm or -hh:mm)
We do not anticipate the YYYY and YYYY-MM forms being used but
interpret them as YYYY-01-01 and YYYY-MM-01 respectively. All
dates are interpreted as having time 00:00:00.0 UTC.
Datetimes not specified to the level of seconds are intepreted
as 00.0 seconds.
"""
t = None
if (s is None):
return(t)
if (s == ''):
raise ValueError('Attempt to set empty %s' % (context))
# Make a date into a full datetime
m = re.match(r"\d\d\d\d(\-\d\d(\-\d\d)?)?$", s)
if (m is not None):
if (m.group(1) is None):
s += '-01-01'
elif (m.group(2) is None):
s += '-01'
s += 'T00:00:00Z'
# Now have datetime with timezone info
m = re.match(r"(.*\d{2}:\d{2}:\d{2})(\.\d+)([^\d].*)?$", s)
# Chop out fractional seconds if present
fractional_seconds = 0
if (m is not None):
s = m.group(1)
if (m.group(3) is not None):
s += m.group(3)
fractional_seconds = float(m.group(2))
# Now check that only allowed formats supplied (the parse
# function in dateutil is rather lax) and separate out
# timezone information to be handled separately
#
# Seems that one should be able to handle timezone offset
# with dt.tzinfo module but this has variation in behavior
# between python 2.6 and 2.7... so do here for now
m = re.match(r"(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d(:\d\d)?)(Z|([+-])"
"(\d\d):(\d\d))$", s)
if (m is None):
raise ValueError("Bad datetime format (%s)" % s)
str = m.group(1) + 'Z'
dt = dateutil_parser.parse(str)
offset_seconds = 0
if (m.group(3) != 'Z'):
hh = int(m.group(5))
mm = int(m.group(6))
if (hh > 23 or mm > 59):
raise ValueError("Bad timezone offset (%s)" % s)
offset_seconds = hh * 3600 + mm * 60
if (m.group(4) == '-'):
offset_seconds = -offset_seconds
# timetuple() ignores timezone information so we have to add in
# the offset here, and any fractional component of the seconds
return(timegm(dt.timetuple()) + offset_seconds + fractional_seconds) | def function[str_to_datetime, parameter[s, context]]:
constant[Set timestamp from an W3C Datetime Last-Modified value.
The sitemaps.org specification says that <lastmod> values
must comply with the W3C Datetime format
(http://www.w3.org/TR/NOTE-datetime). This is a restricted
subset of ISO8601. In particular, all forms that include a
time must include a timezone indication so there is no
notion of local time (which would be tricky on the web). The
forms allowed are:
Year:
YYYY (eg 1997)
Year and month:
YYYY-MM (eg 1997-07)
Complete date:
YYYY-MM-DD (eg 1997-07-16)
Complete date plus hours and minutes:
YYYY-MM-DDThh:mmTZD (eg 1997-07-16T19:20+01:00)
Complete date plus hours, minutes and seconds:
YYYY-MM-DDThh:mm:ssTZD (eg 1997-07-16T19:20:30+01:00)
Complete date plus hours, minutes, seconds and a decimal fraction
of a second
YYYY-MM-DDThh:mm:ss.sTZD (eg 1997-07-16T19:20:30.45+01:00)
where:
TZD = time zone designator (Z or +hh:mm or -hh:mm)
We do not anticipate the YYYY and YYYY-MM forms being used but
interpret them as YYYY-01-01 and YYYY-MM-01 respectively. All
dates are interpreted as having time 00:00:00.0 UTC.
Datetimes not specified to the level of seconds are intepreted
as 00.0 seconds.
]
variable[t] assign[=] constant[None]
if compare[name[s] is constant[None]] begin[:]
return[name[t]]
if compare[name[s] equal[==] constant[]] begin[:]
<ast.Raise object at 0x7da1b253ff70>
variable[m] assign[=] call[name[re].match, parameter[constant[\d\d\d\d(\-\d\d(\-\d\d)?)?$], name[s]]]
if compare[name[m] is_not constant[None]] begin[:]
if compare[call[name[m].group, parameter[constant[1]]] is constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b253f280>
<ast.AugAssign object at 0x7da1b253ed70>
variable[m] assign[=] call[name[re].match, parameter[constant[(.*\d{2}:\d{2}:\d{2})(\.\d+)([^\d].*)?$], name[s]]]
variable[fractional_seconds] assign[=] constant[0]
if compare[name[m] is_not constant[None]] begin[:]
variable[s] assign[=] call[name[m].group, parameter[constant[1]]]
if compare[call[name[m].group, parameter[constant[3]]] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b253fb80>
variable[fractional_seconds] assign[=] call[name[float], parameter[call[name[m].group, parameter[constant[2]]]]]
variable[m] assign[=] call[name[re].match, parameter[constant[(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d(:\d\d)?)(Z|([+-])(\d\d):(\d\d))$], name[s]]]
if compare[name[m] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b253d960>
variable[str] assign[=] binary_operation[call[name[m].group, parameter[constant[1]]] + constant[Z]]
variable[dt] assign[=] call[name[dateutil_parser].parse, parameter[name[str]]]
variable[offset_seconds] assign[=] constant[0]
if compare[call[name[m].group, parameter[constant[3]]] not_equal[!=] constant[Z]] begin[:]
variable[hh] assign[=] call[name[int], parameter[call[name[m].group, parameter[constant[5]]]]]
variable[mm] assign[=] call[name[int], parameter[call[name[m].group, parameter[constant[6]]]]]
if <ast.BoolOp object at 0x7da1b253d480> begin[:]
<ast.Raise object at 0x7da1b253f730>
variable[offset_seconds] assign[=] binary_operation[binary_operation[name[hh] * constant[3600]] + binary_operation[name[mm] * constant[60]]]
if compare[call[name[m].group, parameter[constant[4]]] equal[==] constant[-]] begin[:]
variable[offset_seconds] assign[=] <ast.UnaryOp object at 0x7da1b253ec50>
return[binary_operation[binary_operation[call[name[timegm], parameter[call[name[dt].timetuple, parameter[]]]] + name[offset_seconds]] + name[fractional_seconds]]] | keyword[def] identifier[str_to_datetime] ( identifier[s] , identifier[context] = literal[string] ):
literal[string]
identifier[t] = keyword[None]
keyword[if] ( identifier[s] keyword[is] keyword[None] ):
keyword[return] ( identifier[t] )
keyword[if] ( identifier[s] == literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[context] ))
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[s] )
keyword[if] ( identifier[m] keyword[is] keyword[not] keyword[None] ):
keyword[if] ( identifier[m] . identifier[group] ( literal[int] ) keyword[is] keyword[None] ):
identifier[s] += literal[string]
keyword[elif] ( identifier[m] . identifier[group] ( literal[int] ) keyword[is] keyword[None] ):
identifier[s] += literal[string]
identifier[s] += literal[string]
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[s] )
identifier[fractional_seconds] = literal[int]
keyword[if] ( identifier[m] keyword[is] keyword[not] keyword[None] ):
identifier[s] = identifier[m] . identifier[group] ( literal[int] )
keyword[if] ( identifier[m] . identifier[group] ( literal[int] ) keyword[is] keyword[not] keyword[None] ):
identifier[s] += identifier[m] . identifier[group] ( literal[int] )
identifier[fractional_seconds] = identifier[float] ( identifier[m] . identifier[group] ( literal[int] ))
identifier[m] = identifier[re] . identifier[match] ( literal[string]
literal[string] , identifier[s] )
keyword[if] ( identifier[m] keyword[is] keyword[None] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[s] )
identifier[str] = identifier[m] . identifier[group] ( literal[int] )+ literal[string]
identifier[dt] = identifier[dateutil_parser] . identifier[parse] ( identifier[str] )
identifier[offset_seconds] = literal[int]
keyword[if] ( identifier[m] . identifier[group] ( literal[int] )!= literal[string] ):
identifier[hh] = identifier[int] ( identifier[m] . identifier[group] ( literal[int] ))
identifier[mm] = identifier[int] ( identifier[m] . identifier[group] ( literal[int] ))
keyword[if] ( identifier[hh] > literal[int] keyword[or] identifier[mm] > literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[s] )
identifier[offset_seconds] = identifier[hh] * literal[int] + identifier[mm] * literal[int]
keyword[if] ( identifier[m] . identifier[group] ( literal[int] )== literal[string] ):
identifier[offset_seconds] =- identifier[offset_seconds]
keyword[return] ( identifier[timegm] ( identifier[dt] . identifier[timetuple] ())+ identifier[offset_seconds] + identifier[fractional_seconds] ) | def str_to_datetime(s, context='datetime'):
"""Set timestamp from an W3C Datetime Last-Modified value.
The sitemaps.org specification says that <lastmod> values
must comply with the W3C Datetime format
(http://www.w3.org/TR/NOTE-datetime). This is a restricted
subset of ISO8601. In particular, all forms that include a
time must include a timezone indication so there is no
notion of local time (which would be tricky on the web). The
forms allowed are:
Year:
YYYY (eg 1997)
Year and month:
YYYY-MM (eg 1997-07)
Complete date:
YYYY-MM-DD (eg 1997-07-16)
Complete date plus hours and minutes:
YYYY-MM-DDThh:mmTZD (eg 1997-07-16T19:20+01:00)
Complete date plus hours, minutes and seconds:
YYYY-MM-DDThh:mm:ssTZD (eg 1997-07-16T19:20:30+01:00)
Complete date plus hours, minutes, seconds and a decimal fraction
of a second
YYYY-MM-DDThh:mm:ss.sTZD (eg 1997-07-16T19:20:30.45+01:00)
where:
TZD = time zone designator (Z or +hh:mm or -hh:mm)
We do not anticipate the YYYY and YYYY-MM forms being used but
interpret them as YYYY-01-01 and YYYY-MM-01 respectively. All
dates are interpreted as having time 00:00:00.0 UTC.
Datetimes not specified to the level of seconds are intepreted
as 00.0 seconds.
"""
t = None
if s is None:
return t # depends on [control=['if'], data=[]]
if s == '':
raise ValueError('Attempt to set empty %s' % context) # depends on [control=['if'], data=[]]
# Make a date into a full datetime
m = re.match('\\d\\d\\d\\d(\\-\\d\\d(\\-\\d\\d)?)?$', s)
if m is not None:
if m.group(1) is None:
s += '-01-01' # depends on [control=['if'], data=[]]
elif m.group(2) is None:
s += '-01' # depends on [control=['if'], data=[]]
s += 'T00:00:00Z' # depends on [control=['if'], data=['m']]
# Now have datetime with timezone info
m = re.match('(.*\\d{2}:\\d{2}:\\d{2})(\\.\\d+)([^\\d].*)?$', s)
# Chop out fractional seconds if present
fractional_seconds = 0
if m is not None:
s = m.group(1)
if m.group(3) is not None:
s += m.group(3) # depends on [control=['if'], data=[]]
fractional_seconds = float(m.group(2)) # depends on [control=['if'], data=['m']]
# Now check that only allowed formats supplied (the parse
# function in dateutil is rather lax) and separate out
# timezone information to be handled separately
#
# Seems that one should be able to handle timezone offset
# with dt.tzinfo module but this has variation in behavior
# between python 2.6 and 2.7... so do here for now
m = re.match('(\\d\\d\\d\\d\\-\\d\\d\\-\\d\\dT\\d\\d:\\d\\d(:\\d\\d)?)(Z|([+-])(\\d\\d):(\\d\\d))$', s)
if m is None:
raise ValueError('Bad datetime format (%s)' % s) # depends on [control=['if'], data=[]]
str = m.group(1) + 'Z'
dt = dateutil_parser.parse(str)
offset_seconds = 0
if m.group(3) != 'Z':
hh = int(m.group(5))
mm = int(m.group(6))
if hh > 23 or mm > 59:
raise ValueError('Bad timezone offset (%s)' % s) # depends on [control=['if'], data=[]]
offset_seconds = hh * 3600 + mm * 60
if m.group(4) == '-':
offset_seconds = -offset_seconds # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# timetuple() ignores timezone information so we have to add in
# the offset here, and any fractional component of the seconds
return timegm(dt.timetuple()) + offset_seconds + fractional_seconds |
def create_build_config(self, build_config_json):
"""
:return:
"""
url = self._build_url("buildconfigs/")
return self._post(url, data=build_config_json,
headers={"Content-Type": "application/json"}) | def function[create_build_config, parameter[self, build_config_json]]:
constant[
:return:
]
variable[url] assign[=] call[name[self]._build_url, parameter[constant[buildconfigs/]]]
return[call[name[self]._post, parameter[name[url]]]] | keyword[def] identifier[create_build_config] ( identifier[self] , identifier[build_config_json] ):
literal[string]
identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] )
keyword[return] identifier[self] . identifier[_post] ( identifier[url] , identifier[data] = identifier[build_config_json] ,
identifier[headers] ={ literal[string] : literal[string] }) | def create_build_config(self, build_config_json):
"""
:return:
"""
url = self._build_url('buildconfigs/')
return self._post(url, data=build_config_json, headers={'Content-Type': 'application/json'}) |
def doc_string(cls):
"""Get the doc string of this class.
If this class does not have a doc string or the doc string is empty, try
its base classes until the root base class, _ShellBase, is reached.
CAVEAT:
This method assumes that this class and all its super classes are
derived from _ShellBase or object.
"""
clz = cls
while not clz.__doc__:
clz = clz.__bases__[0]
return clz.__doc__ | def function[doc_string, parameter[cls]]:
constant[Get the doc string of this class.
If this class does not have a doc string or the doc string is empty, try
its base classes until the root base class, _ShellBase, is reached.
CAVEAT:
This method assumes that this class and all its super classes are
derived from _ShellBase or object.
]
variable[clz] assign[=] name[cls]
while <ast.UnaryOp object at 0x7da1b13ce770> begin[:]
variable[clz] assign[=] call[name[clz].__bases__][constant[0]]
return[name[clz].__doc__] | keyword[def] identifier[doc_string] ( identifier[cls] ):
literal[string]
identifier[clz] = identifier[cls]
keyword[while] keyword[not] identifier[clz] . identifier[__doc__] :
identifier[clz] = identifier[clz] . identifier[__bases__] [ literal[int] ]
keyword[return] identifier[clz] . identifier[__doc__] | def doc_string(cls):
"""Get the doc string of this class.
If this class does not have a doc string or the doc string is empty, try
its base classes until the root base class, _ShellBase, is reached.
CAVEAT:
This method assumes that this class and all its super classes are
derived from _ShellBase or object.
"""
clz = cls
while not clz.__doc__:
clz = clz.__bases__[0] # depends on [control=['while'], data=[]]
return clz.__doc__ |
def from_string(cls, string, *, default_func=None):
'''Construct a Service from a string.
If default_func is provided and any ServicePart is missing, it is called with
default_func(protocol, part) to obtain the missing part.
'''
if not isinstance(string, str):
raise TypeError(f'service must be a string: {string}')
parts = string.split('://', 1)
if len(parts) == 2:
protocol, address = parts
else:
item, = parts
protocol = None
if default_func:
if default_func(item, ServicePart.HOST) and default_func(item, ServicePart.PORT):
protocol, address = item, ''
else:
protocol, address = default_func(None, ServicePart.PROTOCOL), item
if not protocol:
raise ValueError(f'invalid service string: {string}')
if default_func:
default_func = partial(default_func, protocol.lower())
address = NetAddress.from_string(address, default_func=default_func)
return cls(protocol, address) | def function[from_string, parameter[cls, string]]:
constant[Construct a Service from a string.
If default_func is provided and any ServicePart is missing, it is called with
default_func(protocol, part) to obtain the missing part.
]
if <ast.UnaryOp object at 0x7da18eb55750> begin[:]
<ast.Raise object at 0x7da18eb57e20>
variable[parts] assign[=] call[name[string].split, parameter[constant[://], constant[1]]]
if compare[call[name[len], parameter[name[parts]]] equal[==] constant[2]] begin[:]
<ast.Tuple object at 0x7da18eb54f70> assign[=] name[parts]
if name[default_func] begin[:]
variable[default_func] assign[=] call[name[partial], parameter[name[default_func], call[name[protocol].lower, parameter[]]]]
variable[address] assign[=] call[name[NetAddress].from_string, parameter[name[address]]]
return[call[name[cls], parameter[name[protocol], name[address]]]] | keyword[def] identifier[from_string] ( identifier[cls] , identifier[string] ,*, identifier[default_func] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[string] , identifier[str] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[parts] = identifier[string] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[len] ( identifier[parts] )== literal[int] :
identifier[protocol] , identifier[address] = identifier[parts]
keyword[else] :
identifier[item] ,= identifier[parts]
identifier[protocol] = keyword[None]
keyword[if] identifier[default_func] :
keyword[if] identifier[default_func] ( identifier[item] , identifier[ServicePart] . identifier[HOST] ) keyword[and] identifier[default_func] ( identifier[item] , identifier[ServicePart] . identifier[PORT] ):
identifier[protocol] , identifier[address] = identifier[item] , literal[string]
keyword[else] :
identifier[protocol] , identifier[address] = identifier[default_func] ( keyword[None] , identifier[ServicePart] . identifier[PROTOCOL] ), identifier[item]
keyword[if] keyword[not] identifier[protocol] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[default_func] :
identifier[default_func] = identifier[partial] ( identifier[default_func] , identifier[protocol] . identifier[lower] ())
identifier[address] = identifier[NetAddress] . identifier[from_string] ( identifier[address] , identifier[default_func] = identifier[default_func] )
keyword[return] identifier[cls] ( identifier[protocol] , identifier[address] ) | def from_string(cls, string, *, default_func=None):
"""Construct a Service from a string.
If default_func is provided and any ServicePart is missing, it is called with
default_func(protocol, part) to obtain the missing part.
"""
if not isinstance(string, str):
raise TypeError(f'service must be a string: {string}') # depends on [control=['if'], data=[]]
parts = string.split('://', 1)
if len(parts) == 2:
(protocol, address) = parts # depends on [control=['if'], data=[]]
else:
(item,) = parts
protocol = None
if default_func:
if default_func(item, ServicePart.HOST) and default_func(item, ServicePart.PORT):
(protocol, address) = (item, '') # depends on [control=['if'], data=[]]
else:
(protocol, address) = (default_func(None, ServicePart.PROTOCOL), item) # depends on [control=['if'], data=[]]
if not protocol:
raise ValueError(f'invalid service string: {string}') # depends on [control=['if'], data=[]]
if default_func:
default_func = partial(default_func, protocol.lower()) # depends on [control=['if'], data=[]]
address = NetAddress.from_string(address, default_func=default_func)
return cls(protocol, address) |
def getReadGroupSet(self, id_):
"""
Returns the readgroup set with the specified ID.
"""
compoundId = datamodel.ReadGroupSetCompoundId.parse(id_)
dataset = self.getDataset(compoundId.dataset_id)
return dataset.getReadGroupSet(id_) | def function[getReadGroupSet, parameter[self, id_]]:
constant[
Returns the readgroup set with the specified ID.
]
variable[compoundId] assign[=] call[name[datamodel].ReadGroupSetCompoundId.parse, parameter[name[id_]]]
variable[dataset] assign[=] call[name[self].getDataset, parameter[name[compoundId].dataset_id]]
return[call[name[dataset].getReadGroupSet, parameter[name[id_]]]] | keyword[def] identifier[getReadGroupSet] ( identifier[self] , identifier[id_] ):
literal[string]
identifier[compoundId] = identifier[datamodel] . identifier[ReadGroupSetCompoundId] . identifier[parse] ( identifier[id_] )
identifier[dataset] = identifier[self] . identifier[getDataset] ( identifier[compoundId] . identifier[dataset_id] )
keyword[return] identifier[dataset] . identifier[getReadGroupSet] ( identifier[id_] ) | def getReadGroupSet(self, id_):
"""
Returns the readgroup set with the specified ID.
"""
compoundId = datamodel.ReadGroupSetCompoundId.parse(id_)
dataset = self.getDataset(compoundId.dataset_id)
return dataset.getReadGroupSet(id_) |
def attribute_as_str(path, name):
"""Returns the two numbers found behind --[A-Z] in path. If several matches
are found, the last one is returned.
Parameters
----------
path : string
String with path of file/folder to get attribute from.
name : string
Name of attribute to get. Should be A-Z or a-z (implicit converted to
uppercase).
Returns
-------
string
Returns two digit number found in path behind --name.
"""
matches = re.findall('--' + name.upper() + '([0-9]{2})', path)
if matches:
return matches[-1]
else:
return None | def function[attribute_as_str, parameter[path, name]]:
constant[Returns the two numbers found behind --[A-Z] in path. If several matches
are found, the last one is returned.
Parameters
----------
path : string
String with path of file/folder to get attribute from.
name : string
Name of attribute to get. Should be A-Z or a-z (implicit converted to
uppercase).
Returns
-------
string
Returns two digit number found in path behind --name.
]
variable[matches] assign[=] call[name[re].findall, parameter[binary_operation[binary_operation[constant[--] + call[name[name].upper, parameter[]]] + constant[([0-9]{2})]], name[path]]]
if name[matches] begin[:]
return[call[name[matches]][<ast.UnaryOp object at 0x7da1afef8f70>]] | keyword[def] identifier[attribute_as_str] ( identifier[path] , identifier[name] ):
literal[string]
identifier[matches] = identifier[re] . identifier[findall] ( literal[string] + identifier[name] . identifier[upper] ()+ literal[string] , identifier[path] )
keyword[if] identifier[matches] :
keyword[return] identifier[matches] [- literal[int] ]
keyword[else] :
keyword[return] keyword[None] | def attribute_as_str(path, name):
"""Returns the two numbers found behind --[A-Z] in path. If several matches
are found, the last one is returned.
Parameters
----------
path : string
String with path of file/folder to get attribute from.
name : string
Name of attribute to get. Should be A-Z or a-z (implicit converted to
uppercase).
Returns
-------
string
Returns two digit number found in path behind --name.
"""
matches = re.findall('--' + name.upper() + '([0-9]{2})', path)
if matches:
return matches[-1] # depends on [control=['if'], data=[]]
else:
return None |
def generate(env):
"Add RPCGEN Builders and construction variables for an Environment."
client = Builder(action=rpcgen_client, suffix='_clnt.c', src_suffix='.x')
header = Builder(action=rpcgen_header, suffix='.h', src_suffix='.x')
service = Builder(action=rpcgen_service, suffix='_svc.c', src_suffix='.x')
xdr = Builder(action=rpcgen_xdr, suffix='_xdr.c', src_suffix='.x')
env.Append(BUILDERS={'RPCGenClient' : client,
'RPCGenHeader' : header,
'RPCGenService' : service,
'RPCGenXDR' : xdr})
env['RPCGEN'] = 'rpcgen'
env['RPCGENFLAGS'] = SCons.Util.CLVar('')
env['RPCGENCLIENTFLAGS'] = SCons.Util.CLVar('')
env['RPCGENHEADERFLAGS'] = SCons.Util.CLVar('')
env['RPCGENSERVICEFLAGS'] = SCons.Util.CLVar('')
env['RPCGENXDRFLAGS'] = SCons.Util.CLVar('') | def function[generate, parameter[env]]:
constant[Add RPCGEN Builders and construction variables for an Environment.]
variable[client] assign[=] call[name[Builder], parameter[]]
variable[header] assign[=] call[name[Builder], parameter[]]
variable[service] assign[=] call[name[Builder], parameter[]]
variable[xdr] assign[=] call[name[Builder], parameter[]]
call[name[env].Append, parameter[]]
call[name[env]][constant[RPCGEN]] assign[=] constant[rpcgen]
call[name[env]][constant[RPCGENFLAGS]] assign[=] call[name[SCons].Util.CLVar, parameter[constant[]]]
call[name[env]][constant[RPCGENCLIENTFLAGS]] assign[=] call[name[SCons].Util.CLVar, parameter[constant[]]]
call[name[env]][constant[RPCGENHEADERFLAGS]] assign[=] call[name[SCons].Util.CLVar, parameter[constant[]]]
call[name[env]][constant[RPCGENSERVICEFLAGS]] assign[=] call[name[SCons].Util.CLVar, parameter[constant[]]]
call[name[env]][constant[RPCGENXDRFLAGS]] assign[=] call[name[SCons].Util.CLVar, parameter[constant[]]] | keyword[def] identifier[generate] ( identifier[env] ):
literal[string]
identifier[client] = identifier[Builder] ( identifier[action] = identifier[rpcgen_client] , identifier[suffix] = literal[string] , identifier[src_suffix] = literal[string] )
identifier[header] = identifier[Builder] ( identifier[action] = identifier[rpcgen_header] , identifier[suffix] = literal[string] , identifier[src_suffix] = literal[string] )
identifier[service] = identifier[Builder] ( identifier[action] = identifier[rpcgen_service] , identifier[suffix] = literal[string] , identifier[src_suffix] = literal[string] )
identifier[xdr] = identifier[Builder] ( identifier[action] = identifier[rpcgen_xdr] , identifier[suffix] = literal[string] , identifier[src_suffix] = literal[string] )
identifier[env] . identifier[Append] ( identifier[BUILDERS] ={ literal[string] : identifier[client] ,
literal[string] : identifier[header] ,
literal[string] : identifier[service] ,
literal[string] : identifier[xdr] })
identifier[env] [ literal[string] ]= literal[string]
identifier[env] [ literal[string] ]= identifier[SCons] . identifier[Util] . identifier[CLVar] ( literal[string] )
identifier[env] [ literal[string] ]= identifier[SCons] . identifier[Util] . identifier[CLVar] ( literal[string] )
identifier[env] [ literal[string] ]= identifier[SCons] . identifier[Util] . identifier[CLVar] ( literal[string] )
identifier[env] [ literal[string] ]= identifier[SCons] . identifier[Util] . identifier[CLVar] ( literal[string] )
identifier[env] [ literal[string] ]= identifier[SCons] . identifier[Util] . identifier[CLVar] ( literal[string] ) | def generate(env):
"""Add RPCGEN Builders and construction variables for an Environment."""
client = Builder(action=rpcgen_client, suffix='_clnt.c', src_suffix='.x')
header = Builder(action=rpcgen_header, suffix='.h', src_suffix='.x')
service = Builder(action=rpcgen_service, suffix='_svc.c', src_suffix='.x')
xdr = Builder(action=rpcgen_xdr, suffix='_xdr.c', src_suffix='.x')
env.Append(BUILDERS={'RPCGenClient': client, 'RPCGenHeader': header, 'RPCGenService': service, 'RPCGenXDR': xdr})
env['RPCGEN'] = 'rpcgen'
env['RPCGENFLAGS'] = SCons.Util.CLVar('')
env['RPCGENCLIENTFLAGS'] = SCons.Util.CLVar('')
env['RPCGENHEADERFLAGS'] = SCons.Util.CLVar('')
env['RPCGENSERVICEFLAGS'] = SCons.Util.CLVar('')
env['RPCGENXDRFLAGS'] = SCons.Util.CLVar('') |
def bin_spikes(spike_times, binsz):
"""Sort spike times into bins
:param spike_times: times of spike instances
:type spike_times: list
:param binsz: length of time bin to use
:type binsz: float
:returns: list of bin indicies, one for each element in spike_times
"""
bins = np.empty((len(spike_times),), dtype=int)
for i, stime in enumerate(spike_times):
# around to fix rounding errors
bins[i] = np.floor(np.around(stime/binsz, 5))
return bins | def function[bin_spikes, parameter[spike_times, binsz]]:
constant[Sort spike times into bins
:param spike_times: times of spike instances
:type spike_times: list
:param binsz: length of time bin to use
:type binsz: float
:returns: list of bin indicies, one for each element in spike_times
]
variable[bins] assign[=] call[name[np].empty, parameter[tuple[[<ast.Call object at 0x7da1b1eef1f0>]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1eee650>, <ast.Name object at 0x7da1b1eed180>]]] in starred[call[name[enumerate], parameter[name[spike_times]]]] begin[:]
call[name[bins]][name[i]] assign[=] call[name[np].floor, parameter[call[name[np].around, parameter[binary_operation[name[stime] / name[binsz]], constant[5]]]]]
return[name[bins]] | keyword[def] identifier[bin_spikes] ( identifier[spike_times] , identifier[binsz] ):
literal[string]
identifier[bins] = identifier[np] . identifier[empty] (( identifier[len] ( identifier[spike_times] ),), identifier[dtype] = identifier[int] )
keyword[for] identifier[i] , identifier[stime] keyword[in] identifier[enumerate] ( identifier[spike_times] ):
identifier[bins] [ identifier[i] ]= identifier[np] . identifier[floor] ( identifier[np] . identifier[around] ( identifier[stime] / identifier[binsz] , literal[int] ))
keyword[return] identifier[bins] | def bin_spikes(spike_times, binsz):
"""Sort spike times into bins
:param spike_times: times of spike instances
:type spike_times: list
:param binsz: length of time bin to use
:type binsz: float
:returns: list of bin indicies, one for each element in spike_times
"""
bins = np.empty((len(spike_times),), dtype=int)
for (i, stime) in enumerate(spike_times):
# around to fix rounding errors
bins[i] = np.floor(np.around(stime / binsz, 5)) # depends on [control=['for'], data=[]]
return bins |
def _yarn_node_metrics(self, rm_address, instance, addl_tags):
"""
Get metrics related to YARN nodes
"""
metrics_json = self._rest_request_to_json(rm_address, instance, YARN_NODES_PATH, addl_tags)
if metrics_json and metrics_json['nodes'] is not None and metrics_json['nodes']['node'] is not None:
for node_json in metrics_json['nodes']['node']:
node_id = node_json['id']
tags = ['node_id:{}'.format(str(node_id))]
tags.extend(addl_tags)
self._set_yarn_metrics_from_json(tags, node_json, YARN_NODE_METRICS) | def function[_yarn_node_metrics, parameter[self, rm_address, instance, addl_tags]]:
constant[
Get metrics related to YARN nodes
]
variable[metrics_json] assign[=] call[name[self]._rest_request_to_json, parameter[name[rm_address], name[instance], name[YARN_NODES_PATH], name[addl_tags]]]
if <ast.BoolOp object at 0x7da207f03940> begin[:]
for taget[name[node_json]] in starred[call[call[name[metrics_json]][constant[nodes]]][constant[node]]] begin[:]
variable[node_id] assign[=] call[name[node_json]][constant[id]]
variable[tags] assign[=] list[[<ast.Call object at 0x7da18dc98850>]]
call[name[tags].extend, parameter[name[addl_tags]]]
call[name[self]._set_yarn_metrics_from_json, parameter[name[tags], name[node_json], name[YARN_NODE_METRICS]]] | keyword[def] identifier[_yarn_node_metrics] ( identifier[self] , identifier[rm_address] , identifier[instance] , identifier[addl_tags] ):
literal[string]
identifier[metrics_json] = identifier[self] . identifier[_rest_request_to_json] ( identifier[rm_address] , identifier[instance] , identifier[YARN_NODES_PATH] , identifier[addl_tags] )
keyword[if] identifier[metrics_json] keyword[and] identifier[metrics_json] [ literal[string] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[metrics_json] [ literal[string] ][ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[node_json] keyword[in] identifier[metrics_json] [ literal[string] ][ literal[string] ]:
identifier[node_id] = identifier[node_json] [ literal[string] ]
identifier[tags] =[ literal[string] . identifier[format] ( identifier[str] ( identifier[node_id] ))]
identifier[tags] . identifier[extend] ( identifier[addl_tags] )
identifier[self] . identifier[_set_yarn_metrics_from_json] ( identifier[tags] , identifier[node_json] , identifier[YARN_NODE_METRICS] ) | def _yarn_node_metrics(self, rm_address, instance, addl_tags):
"""
Get metrics related to YARN nodes
"""
metrics_json = self._rest_request_to_json(rm_address, instance, YARN_NODES_PATH, addl_tags)
if metrics_json and metrics_json['nodes'] is not None and (metrics_json['nodes']['node'] is not None):
for node_json in metrics_json['nodes']['node']:
node_id = node_json['id']
tags = ['node_id:{}'.format(str(node_id))]
tags.extend(addl_tags)
self._set_yarn_metrics_from_json(tags, node_json, YARN_NODE_METRICS) # depends on [control=['for'], data=['node_json']] # depends on [control=['if'], data=[]] |
def parse_div(soup, metadata, target):
"""Parse a <div> tag from the file.
The sections in XML files are given in <div1>, <div2> and <div3>
tags. Each such tag has a type and name (plus possibly more extra attributes).
If the div type is found in target variable, the div is parsed
into structured paragraphs, sentences and words.
Otherwise, the type and name are added as metadata to subdivs
and stored in.
Parameters
----------
soup: bs4.BeautifulSoup
The parsed XML data.
metdata: dict
The metadata for parent divs.
target: list of str
List of <div> types, that are considered documents in the XML files.
"""
documents = []
div_type = soup.get('type', None)
div_title = list(soup.children)[0].string.strip()
if div_type in target:
div_authors = soup.find_all('author')
document = {
'type': div_type,
'title': div_title,
'paragraphs': parse_paragraphs(soup)
}
# add author, if it exists
if len(div_authors) > 0:
div_author = div_authors[0].text.strip()
document['author'] = div_author
# add collected metadata
for k, v in metadata.items():
document[k] = v
documents.append(document)
else:
metadata[div_type] = div_title
# recurse subdivs
subdiv_name = get_subdiv(soup.name)
subdivs = []
if subdiv_name is not None:
subdivs = soup.find_all(subdiv_name)
if len(subdivs) > 0:
for subdiv in subdivs:
documents.extend(parse_div(subdiv, deepcopy(metadata), target))
return documents | def function[parse_div, parameter[soup, metadata, target]]:
constant[Parse a <div> tag from the file.
The sections in XML files are given in <div1>, <div2> and <div3>
tags. Each such tag has a type and name (plus possibly more extra attributes).
If the div type is found in target variable, the div is parsed
into structured paragraphs, sentences and words.
Otherwise, the type and name are added as metadata to subdivs
and stored in.
Parameters
----------
soup: bs4.BeautifulSoup
The parsed XML data.
metdata: dict
The metadata for parent divs.
target: list of str
List of <div> types, that are considered documents in the XML files.
]
variable[documents] assign[=] list[[]]
variable[div_type] assign[=] call[name[soup].get, parameter[constant[type], constant[None]]]
variable[div_title] assign[=] call[call[call[name[list], parameter[name[soup].children]]][constant[0]].string.strip, parameter[]]
if compare[name[div_type] in name[target]] begin[:]
variable[div_authors] assign[=] call[name[soup].find_all, parameter[constant[author]]]
variable[document] assign[=] dictionary[[<ast.Constant object at 0x7da18f58c580>, <ast.Constant object at 0x7da18f58d120>, <ast.Constant object at 0x7da18f58ce20>], [<ast.Name object at 0x7da204963e80>, <ast.Name object at 0x7da204961bd0>, <ast.Call object at 0x7da2049605b0>]]
if compare[call[name[len], parameter[name[div_authors]]] greater[>] constant[0]] begin[:]
variable[div_author] assign[=] call[call[name[div_authors]][constant[0]].text.strip, parameter[]]
call[name[document]][constant[author]] assign[=] name[div_author]
for taget[tuple[[<ast.Name object at 0x7da204961a50>, <ast.Name object at 0x7da204961750>]]] in starred[call[name[metadata].items, parameter[]]] begin[:]
call[name[document]][name[k]] assign[=] name[v]
call[name[documents].append, parameter[name[document]]]
return[name[documents]] | keyword[def] identifier[parse_div] ( identifier[soup] , identifier[metadata] , identifier[target] ):
literal[string]
identifier[documents] =[]
identifier[div_type] = identifier[soup] . identifier[get] ( literal[string] , keyword[None] )
identifier[div_title] = identifier[list] ( identifier[soup] . identifier[children] )[ literal[int] ]. identifier[string] . identifier[strip] ()
keyword[if] identifier[div_type] keyword[in] identifier[target] :
identifier[div_authors] = identifier[soup] . identifier[find_all] ( literal[string] )
identifier[document] ={
literal[string] : identifier[div_type] ,
literal[string] : identifier[div_title] ,
literal[string] : identifier[parse_paragraphs] ( identifier[soup] )
}
keyword[if] identifier[len] ( identifier[div_authors] )> literal[int] :
identifier[div_author] = identifier[div_authors] [ literal[int] ]. identifier[text] . identifier[strip] ()
identifier[document] [ literal[string] ]= identifier[div_author]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[metadata] . identifier[items] ():
identifier[document] [ identifier[k] ]= identifier[v]
identifier[documents] . identifier[append] ( identifier[document] )
keyword[else] :
identifier[metadata] [ identifier[div_type] ]= identifier[div_title]
identifier[subdiv_name] = identifier[get_subdiv] ( identifier[soup] . identifier[name] )
identifier[subdivs] =[]
keyword[if] identifier[subdiv_name] keyword[is] keyword[not] keyword[None] :
identifier[subdivs] = identifier[soup] . identifier[find_all] ( identifier[subdiv_name] )
keyword[if] identifier[len] ( identifier[subdivs] )> literal[int] :
keyword[for] identifier[subdiv] keyword[in] identifier[subdivs] :
identifier[documents] . identifier[extend] ( identifier[parse_div] ( identifier[subdiv] , identifier[deepcopy] ( identifier[metadata] ), identifier[target] ))
keyword[return] identifier[documents] | def parse_div(soup, metadata, target):
"""Parse a <div> tag from the file.
The sections in XML files are given in <div1>, <div2> and <div3>
tags. Each such tag has a type and name (plus possibly more extra attributes).
If the div type is found in target variable, the div is parsed
into structured paragraphs, sentences and words.
Otherwise, the type and name are added as metadata to subdivs
and stored in.
Parameters
----------
soup: bs4.BeautifulSoup
The parsed XML data.
metdata: dict
The metadata for parent divs.
target: list of str
List of <div> types, that are considered documents in the XML files.
"""
documents = []
div_type = soup.get('type', None)
div_title = list(soup.children)[0].string.strip()
if div_type in target:
div_authors = soup.find_all('author')
document = {'type': div_type, 'title': div_title, 'paragraphs': parse_paragraphs(soup)}
# add author, if it exists
if len(div_authors) > 0:
div_author = div_authors[0].text.strip()
document['author'] = div_author # depends on [control=['if'], data=[]]
# add collected metadata
for (k, v) in metadata.items():
document[k] = v # depends on [control=['for'], data=[]]
documents.append(document) # depends on [control=['if'], data=['div_type']]
else:
metadata[div_type] = div_title
# recurse subdivs
subdiv_name = get_subdiv(soup.name)
subdivs = []
if subdiv_name is not None:
subdivs = soup.find_all(subdiv_name) # depends on [control=['if'], data=['subdiv_name']]
if len(subdivs) > 0:
for subdiv in subdivs:
documents.extend(parse_div(subdiv, deepcopy(metadata), target)) # depends on [control=['for'], data=['subdiv']] # depends on [control=['if'], data=[]]
return documents |
def _preSynapticTRNCells(self, i, j):
"""
Given a relay cell at the given coordinate, return a list of the (x,y)
coordinates of all TRN cells that project to it. This assumes a 3X3 fan-in.
:param i, j: relay cell Coordinates
:return:
"""
xmin = max(i - 1, 0)
xmax = min(i + 2, self.trnWidth)
ymin = max(j - 1, 0)
ymax = min(j + 2, self.trnHeight)
trnCells = [
(x, y) for x in range(xmin, xmax) for y in range(ymin, ymax)
]
return trnCells | def function[_preSynapticTRNCells, parameter[self, i, j]]:
constant[
Given a relay cell at the given coordinate, return a list of the (x,y)
coordinates of all TRN cells that project to it. This assumes a 3X3 fan-in.
:param i, j: relay cell Coordinates
:return:
]
variable[xmin] assign[=] call[name[max], parameter[binary_operation[name[i] - constant[1]], constant[0]]]
variable[xmax] assign[=] call[name[min], parameter[binary_operation[name[i] + constant[2]], name[self].trnWidth]]
variable[ymin] assign[=] call[name[max], parameter[binary_operation[name[j] - constant[1]], constant[0]]]
variable[ymax] assign[=] call[name[min], parameter[binary_operation[name[j] + constant[2]], name[self].trnHeight]]
variable[trnCells] assign[=] <ast.ListComp object at 0x7da2043467a0>
return[name[trnCells]] | keyword[def] identifier[_preSynapticTRNCells] ( identifier[self] , identifier[i] , identifier[j] ):
literal[string]
identifier[xmin] = identifier[max] ( identifier[i] - literal[int] , literal[int] )
identifier[xmax] = identifier[min] ( identifier[i] + literal[int] , identifier[self] . identifier[trnWidth] )
identifier[ymin] = identifier[max] ( identifier[j] - literal[int] , literal[int] )
identifier[ymax] = identifier[min] ( identifier[j] + literal[int] , identifier[self] . identifier[trnHeight] )
identifier[trnCells] =[
( identifier[x] , identifier[y] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[xmin] , identifier[xmax] ) keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[ymin] , identifier[ymax] )
]
keyword[return] identifier[trnCells] | def _preSynapticTRNCells(self, i, j):
"""
Given a relay cell at the given coordinate, return a list of the (x,y)
coordinates of all TRN cells that project to it. This assumes a 3X3 fan-in.
:param i, j: relay cell Coordinates
:return:
"""
xmin = max(i - 1, 0)
xmax = min(i + 2, self.trnWidth)
ymin = max(j - 1, 0)
ymax = min(j + 2, self.trnHeight)
trnCells = [(x, y) for x in range(xmin, xmax) for y in range(ymin, ymax)]
return trnCells |
def get_loginclass(name):
'''
Get the login class of the user
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' user.get_loginclass foo
'''
userinfo = __salt__['cmd.run_stdout'](['pw', 'usershow', '-n', name])
userinfo = userinfo.split(':')
return userinfo[4] if len(userinfo) == 10 else '' | def function[get_loginclass, parameter[name]]:
constant[
Get the login class of the user
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' user.get_loginclass foo
]
variable[userinfo] assign[=] call[call[name[__salt__]][constant[cmd.run_stdout]], parameter[list[[<ast.Constant object at 0x7da204567070>, <ast.Constant object at 0x7da204564d30>, <ast.Constant object at 0x7da204566710>, <ast.Name object at 0x7da2045679a0>]]]]
variable[userinfo] assign[=] call[name[userinfo].split, parameter[constant[:]]]
return[<ast.IfExp object at 0x7da1b2022230>] | keyword[def] identifier[get_loginclass] ( identifier[name] ):
literal[string]
identifier[userinfo] = identifier[__salt__] [ literal[string] ]([ literal[string] , literal[string] , literal[string] , identifier[name] ])
identifier[userinfo] = identifier[userinfo] . identifier[split] ( literal[string] )
keyword[return] identifier[userinfo] [ literal[int] ] keyword[if] identifier[len] ( identifier[userinfo] )== literal[int] keyword[else] literal[string] | def get_loginclass(name):
"""
Get the login class of the user
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' user.get_loginclass foo
"""
userinfo = __salt__['cmd.run_stdout'](['pw', 'usershow', '-n', name])
userinfo = userinfo.split(':')
return userinfo[4] if len(userinfo) == 10 else '' |
def remove(self, child):
"""Remove a child element."""
for i in range(len(self)):
if self[i] == child:
del self[i] | def function[remove, parameter[self, child]]:
constant[Remove a child element.]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self]]]]]] begin[:]
if compare[call[name[self]][name[i]] equal[==] name[child]] begin[:]
<ast.Delete object at 0x7da1b0b72320> | keyword[def] identifier[remove] ( identifier[self] , identifier[child] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] )):
keyword[if] identifier[self] [ identifier[i] ]== identifier[child] :
keyword[del] identifier[self] [ identifier[i] ] | def remove(self, child):
"""Remove a child element."""
for i in range(len(self)):
if self[i] == child:
del self[i] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] |
def get_console_size():
"""Return console size as tuple = (width, height).
Returns (None,None) in non-interactive session.
"""
from pandas import get_option
display_width = get_option('display.width')
# deprecated.
display_height = get_option('display.max_rows')
# Consider
# interactive shell terminal, can detect term size
# interactive non-shell terminal (ipnb/ipqtconsole), cannot detect term
# size non-interactive script, should disregard term size
# in addition
# width,height have default values, but setting to 'None' signals
# should use Auto-Detection, But only in interactive shell-terminal.
# Simple. yeah.
if in_interactive_session():
if in_ipython_frontend():
# sane defaults for interactive non-shell terminal
# match default for width,height in config_init
from pandas._config.config import get_default_val
terminal_width = get_default_val('display.width')
terminal_height = get_default_val('display.max_rows')
else:
# pure terminal
terminal_width, terminal_height = get_terminal_size()
else:
terminal_width, terminal_height = None, None
# Note if the User sets width/Height to None (auto-detection)
# and we're in a script (non-inter), this will return (None,None)
# caller needs to deal.
return (display_width or terminal_width, display_height or terminal_height) | def function[get_console_size, parameter[]]:
constant[Return console size as tuple = (width, height).
Returns (None,None) in non-interactive session.
]
from relative_module[pandas] import module[get_option]
variable[display_width] assign[=] call[name[get_option], parameter[constant[display.width]]]
variable[display_height] assign[=] call[name[get_option], parameter[constant[display.max_rows]]]
if call[name[in_interactive_session], parameter[]] begin[:]
if call[name[in_ipython_frontend], parameter[]] begin[:]
from relative_module[pandas._config.config] import module[get_default_val]
variable[terminal_width] assign[=] call[name[get_default_val], parameter[constant[display.width]]]
variable[terminal_height] assign[=] call[name[get_default_val], parameter[constant[display.max_rows]]]
return[tuple[[<ast.BoolOp object at 0x7da18dc9bb50>, <ast.BoolOp object at 0x7da18dc989a0>]]] | keyword[def] identifier[get_console_size] ():
literal[string]
keyword[from] identifier[pandas] keyword[import] identifier[get_option]
identifier[display_width] = identifier[get_option] ( literal[string] )
identifier[display_height] = identifier[get_option] ( literal[string] )
keyword[if] identifier[in_interactive_session] ():
keyword[if] identifier[in_ipython_frontend] ():
keyword[from] identifier[pandas] . identifier[_config] . identifier[config] keyword[import] identifier[get_default_val]
identifier[terminal_width] = identifier[get_default_val] ( literal[string] )
identifier[terminal_height] = identifier[get_default_val] ( literal[string] )
keyword[else] :
identifier[terminal_width] , identifier[terminal_height] = identifier[get_terminal_size] ()
keyword[else] :
identifier[terminal_width] , identifier[terminal_height] = keyword[None] , keyword[None]
keyword[return] ( identifier[display_width] keyword[or] identifier[terminal_width] , identifier[display_height] keyword[or] identifier[terminal_height] ) | def get_console_size():
"""Return console size as tuple = (width, height).
Returns (None,None) in non-interactive session.
"""
from pandas import get_option
display_width = get_option('display.width')
# deprecated.
display_height = get_option('display.max_rows')
# Consider
# interactive shell terminal, can detect term size
# interactive non-shell terminal (ipnb/ipqtconsole), cannot detect term
# size non-interactive script, should disregard term size
# in addition
# width,height have default values, but setting to 'None' signals
# should use Auto-Detection, But only in interactive shell-terminal.
# Simple. yeah.
if in_interactive_session():
if in_ipython_frontend():
# sane defaults for interactive non-shell terminal
# match default for width,height in config_init
from pandas._config.config import get_default_val
terminal_width = get_default_val('display.width')
terminal_height = get_default_val('display.max_rows') # depends on [control=['if'], data=[]]
else:
# pure terminal
(terminal_width, terminal_height) = get_terminal_size() # depends on [control=['if'], data=[]]
else:
(terminal_width, terminal_height) = (None, None)
# Note if the User sets width/Height to None (auto-detection)
# and we're in a script (non-inter), this will return (None,None)
# caller needs to deal.
return (display_width or terminal_width, display_height or terminal_height) |
def is_short(self, word):
"""
Determine if the word is short. Short words
are ones that end in a short syllable and
have an empty R1 region.
"""
short = False
length = len(word)
if self.r1 >= length:
if length > 2:
ending = word[length - 3:]
if re.match("[^aeiouy][aeiouy][^aeiouwxY]", ending):
short = True
else:
if re.match("[aeiouy][^aeiouy]", word):
short = True
return short | def function[is_short, parameter[self, word]]:
constant[
Determine if the word is short. Short words
are ones that end in a short syllable and
have an empty R1 region.
]
variable[short] assign[=] constant[False]
variable[length] assign[=] call[name[len], parameter[name[word]]]
if compare[name[self].r1 greater_or_equal[>=] name[length]] begin[:]
if compare[name[length] greater[>] constant[2]] begin[:]
variable[ending] assign[=] call[name[word]][<ast.Slice object at 0x7da1b0a4ed40>]
if call[name[re].match, parameter[constant[[^aeiouy][aeiouy][^aeiouwxY]], name[ending]]] begin[:]
variable[short] assign[=] constant[True]
return[name[short]] | keyword[def] identifier[is_short] ( identifier[self] , identifier[word] ):
literal[string]
identifier[short] = keyword[False]
identifier[length] = identifier[len] ( identifier[word] )
keyword[if] identifier[self] . identifier[r1] >= identifier[length] :
keyword[if] identifier[length] > literal[int] :
identifier[ending] = identifier[word] [ identifier[length] - literal[int] :]
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[ending] ):
identifier[short] = keyword[True]
keyword[else] :
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[word] ):
identifier[short] = keyword[True]
keyword[return] identifier[short] | def is_short(self, word):
"""
Determine if the word is short. Short words
are ones that end in a short syllable and
have an empty R1 region.
"""
short = False
length = len(word)
if self.r1 >= length:
if length > 2:
ending = word[length - 3:]
if re.match('[^aeiouy][aeiouy][^aeiouwxY]', ending):
short = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['length']]
elif re.match('[aeiouy][^aeiouy]', word):
short = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['length']]
return short |
def scan(s):
"""Return a list of tokens, or throw SyntaxError on failure.
"""
line = 1
linepos = 1
pos = 0
toks = []
while pos < len(s):
matched = False
for (tokname, r) in patterns:
m = r.match(s, pos)
if m is not None:
# found a matching token
v = m.group(0)
prec = _preceding_token(toks)
if tokname == 'STAR' and prec is not None and _is_special(prec):
# XPath 1.0 spec, 3.7 special rule 1a
# interpret '*' as a wildcard
tok = XPathTok('wildcard', v, line, linepos)
elif (tokname == 'name' and
prec is not None and not _is_special(prec) and
v in operators):
# XPath 1.0 spec, 3.7 special rule 1b
# interpret the name as an operator
tok = XPathTok(operators[v], v, line, linepos)
elif tokname == 'name':
# check if next token is '('
if re_open_para.match(s, pos + len(v)):
# XPath 1.0 spec, 3.7 special rule 2
if v in node_types:
# XPath 1.0 spec, 3.7 special rule 2a
tok = XPathTok('node_type', v, line, linepos)
else:
# XPath 1.0 spec, 3.7 special rule 2b
tok = XPathTok('function_name', v, line, linepos)
# check if next token is '::'
elif re_axis.match(s, pos + len(v)):
# XPath 1.0 spec, 3.7 special rule 3
if v in axes:
tok = XPathTok('axis', v, line, linepos)
else:
e = "unknown axis %s" % v
raise XPathError(e, line, linepos)
else:
tok = XPathTok('name', v, line, linepos)
else:
tok = XPathTok(tokname, v, line, linepos)
if tokname == '_whitespace':
n = v.count('\n')
if n > 0:
line = line + n
linepos = len(v) - v.rfind('\n')
else:
linepos += len(v)
else:
linepos += len(v)
pos += len(v)
toks.append(tok)
matched = True
break
if matched == False:
# no patterns matched
raise XPathError('syntax error', line, linepos)
return toks | def function[scan, parameter[s]]:
constant[Return a list of tokens, or throw SyntaxError on failure.
]
variable[line] assign[=] constant[1]
variable[linepos] assign[=] constant[1]
variable[pos] assign[=] constant[0]
variable[toks] assign[=] list[[]]
while compare[name[pos] less[<] call[name[len], parameter[name[s]]]] begin[:]
variable[matched] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b17d7b20>, <ast.Name object at 0x7da1b17d74f0>]]] in starred[name[patterns]] begin[:]
variable[m] assign[=] call[name[r].match, parameter[name[s], name[pos]]]
if compare[name[m] is_not constant[None]] begin[:]
variable[v] assign[=] call[name[m].group, parameter[constant[0]]]
variable[prec] assign[=] call[name[_preceding_token], parameter[name[toks]]]
if <ast.BoolOp object at 0x7da204566aa0> begin[:]
variable[tok] assign[=] call[name[XPathTok], parameter[constant[wildcard], name[v], name[line], name[linepos]]]
if compare[name[tokname] equal[==] constant[_whitespace]] begin[:]
variable[n] assign[=] call[name[v].count, parameter[constant[
]]]
if compare[name[n] greater[>] constant[0]] begin[:]
variable[line] assign[=] binary_operation[name[line] + name[n]]
variable[linepos] assign[=] binary_operation[call[name[len], parameter[name[v]]] - call[name[v].rfind, parameter[constant[
]]]]
<ast.AugAssign object at 0x7da2045672b0>
call[name[toks].append, parameter[name[tok]]]
variable[matched] assign[=] constant[True]
break
if compare[name[matched] equal[==] constant[False]] begin[:]
<ast.Raise object at 0x7da204565180>
return[name[toks]] | keyword[def] identifier[scan] ( identifier[s] ):
literal[string]
identifier[line] = literal[int]
identifier[linepos] = literal[int]
identifier[pos] = literal[int]
identifier[toks] =[]
keyword[while] identifier[pos] < identifier[len] ( identifier[s] ):
identifier[matched] = keyword[False]
keyword[for] ( identifier[tokname] , identifier[r] ) keyword[in] identifier[patterns] :
identifier[m] = identifier[r] . identifier[match] ( identifier[s] , identifier[pos] )
keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] :
identifier[v] = identifier[m] . identifier[group] ( literal[int] )
identifier[prec] = identifier[_preceding_token] ( identifier[toks] )
keyword[if] identifier[tokname] == literal[string] keyword[and] identifier[prec] keyword[is] keyword[not] keyword[None] keyword[and] identifier[_is_special] ( identifier[prec] ):
identifier[tok] = identifier[XPathTok] ( literal[string] , identifier[v] , identifier[line] , identifier[linepos] )
keyword[elif] ( identifier[tokname] == literal[string] keyword[and]
identifier[prec] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[_is_special] ( identifier[prec] ) keyword[and]
identifier[v] keyword[in] identifier[operators] ):
identifier[tok] = identifier[XPathTok] ( identifier[operators] [ identifier[v] ], identifier[v] , identifier[line] , identifier[linepos] )
keyword[elif] identifier[tokname] == literal[string] :
keyword[if] identifier[re_open_para] . identifier[match] ( identifier[s] , identifier[pos] + identifier[len] ( identifier[v] )):
keyword[if] identifier[v] keyword[in] identifier[node_types] :
identifier[tok] = identifier[XPathTok] ( literal[string] , identifier[v] , identifier[line] , identifier[linepos] )
keyword[else] :
identifier[tok] = identifier[XPathTok] ( literal[string] , identifier[v] , identifier[line] , identifier[linepos] )
keyword[elif] identifier[re_axis] . identifier[match] ( identifier[s] , identifier[pos] + identifier[len] ( identifier[v] )):
keyword[if] identifier[v] keyword[in] identifier[axes] :
identifier[tok] = identifier[XPathTok] ( literal[string] , identifier[v] , identifier[line] , identifier[linepos] )
keyword[else] :
identifier[e] = literal[string] % identifier[v]
keyword[raise] identifier[XPathError] ( identifier[e] , identifier[line] , identifier[linepos] )
keyword[else] :
identifier[tok] = identifier[XPathTok] ( literal[string] , identifier[v] , identifier[line] , identifier[linepos] )
keyword[else] :
identifier[tok] = identifier[XPathTok] ( identifier[tokname] , identifier[v] , identifier[line] , identifier[linepos] )
keyword[if] identifier[tokname] == literal[string] :
identifier[n] = identifier[v] . identifier[count] ( literal[string] )
keyword[if] identifier[n] > literal[int] :
identifier[line] = identifier[line] + identifier[n]
identifier[linepos] = identifier[len] ( identifier[v] )- identifier[v] . identifier[rfind] ( literal[string] )
keyword[else] :
identifier[linepos] += identifier[len] ( identifier[v] )
keyword[else] :
identifier[linepos] += identifier[len] ( identifier[v] )
identifier[pos] += identifier[len] ( identifier[v] )
identifier[toks] . identifier[append] ( identifier[tok] )
identifier[matched] = keyword[True]
keyword[break]
keyword[if] identifier[matched] == keyword[False] :
keyword[raise] identifier[XPathError] ( literal[string] , identifier[line] , identifier[linepos] )
keyword[return] identifier[toks] | def scan(s):
"""Return a list of tokens, or throw SyntaxError on failure.
"""
line = 1
linepos = 1
pos = 0
toks = []
while pos < len(s):
matched = False
for (tokname, r) in patterns:
m = r.match(s, pos)
if m is not None:
# found a matching token
v = m.group(0)
prec = _preceding_token(toks)
if tokname == 'STAR' and prec is not None and _is_special(prec):
# XPath 1.0 spec, 3.7 special rule 1a
# interpret '*' as a wildcard
tok = XPathTok('wildcard', v, line, linepos) # depends on [control=['if'], data=[]]
elif tokname == 'name' and prec is not None and (not _is_special(prec)) and (v in operators):
# XPath 1.0 spec, 3.7 special rule 1b
# interpret the name as an operator
tok = XPathTok(operators[v], v, line, linepos) # depends on [control=['if'], data=[]]
elif tokname == 'name':
# check if next token is '('
if re_open_para.match(s, pos + len(v)):
# XPath 1.0 spec, 3.7 special rule 2
if v in node_types:
# XPath 1.0 spec, 3.7 special rule 2a
tok = XPathTok('node_type', v, line, linepos) # depends on [control=['if'], data=['v']]
else:
# XPath 1.0 spec, 3.7 special rule 2b
tok = XPathTok('function_name', v, line, linepos) # depends on [control=['if'], data=[]]
# check if next token is '::'
elif re_axis.match(s, pos + len(v)):
# XPath 1.0 spec, 3.7 special rule 3
if v in axes:
tok = XPathTok('axis', v, line, linepos) # depends on [control=['if'], data=['v']]
else:
e = 'unknown axis %s' % v
raise XPathError(e, line, linepos) # depends on [control=['if'], data=[]]
else:
tok = XPathTok('name', v, line, linepos) # depends on [control=['if'], data=[]]
else:
tok = XPathTok(tokname, v, line, linepos)
if tokname == '_whitespace':
n = v.count('\n')
if n > 0:
line = line + n
linepos = len(v) - v.rfind('\n') # depends on [control=['if'], data=['n']]
else:
linepos += len(v) # depends on [control=['if'], data=[]]
else:
linepos += len(v)
pos += len(v)
toks.append(tok)
matched = True
break # depends on [control=['if'], data=['m']] # depends on [control=['for'], data=[]]
if matched == False:
# no patterns matched
raise XPathError('syntax error', line, linepos) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['pos']]
return toks |
def add_progress(kwargs, git, progress):
"""Add the --progress flag to the given kwargs dict if supported by the
git command. If the actual progress in the given progress instance is not
given, we do not request any progress
:return: possibly altered kwargs"""
if progress is not None:
v = git.version_info[:2]
if v >= (1, 7):
kwargs['progress'] = True
# END handle --progress
# END handle progress
return kwargs | def function[add_progress, parameter[kwargs, git, progress]]:
constant[Add the --progress flag to the given kwargs dict if supported by the
git command. If the actual progress in the given progress instance is not
given, we do not request any progress
:return: possibly altered kwargs]
if compare[name[progress] is_not constant[None]] begin[:]
variable[v] assign[=] call[name[git].version_info][<ast.Slice object at 0x7da18c4cdd50>]
if compare[name[v] greater_or_equal[>=] tuple[[<ast.Constant object at 0x7da18c4cf4f0>, <ast.Constant object at 0x7da18c4ceef0>]]] begin[:]
call[name[kwargs]][constant[progress]] assign[=] constant[True]
return[name[kwargs]] | keyword[def] identifier[add_progress] ( identifier[kwargs] , identifier[git] , identifier[progress] ):
literal[string]
keyword[if] identifier[progress] keyword[is] keyword[not] keyword[None] :
identifier[v] = identifier[git] . identifier[version_info] [: literal[int] ]
keyword[if] identifier[v] >=( literal[int] , literal[int] ):
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[return] identifier[kwargs] | def add_progress(kwargs, git, progress):
"""Add the --progress flag to the given kwargs dict if supported by the
git command. If the actual progress in the given progress instance is not
given, we do not request any progress
:return: possibly altered kwargs"""
if progress is not None:
v = git.version_info[:2]
if v >= (1, 7):
kwargs['progress'] = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# END handle --progress
# END handle progress
return kwargs |
def cell_exception(self, cell, cell_index=None, **kwargs):
"""
Set metadata when an exception is raised.
Called by engines when an exception is raised within a notebook to
set the metadata on the notebook indicating the location of the
failure.
"""
cell.metadata.papermill['exception'] = True
cell.metadata.papermill['status'] = self.FAILED
self.nb.metadata.papermill['exception'] = True | def function[cell_exception, parameter[self, cell, cell_index]]:
constant[
Set metadata when an exception is raised.
Called by engines when an exception is raised within a notebook to
set the metadata on the notebook indicating the location of the
failure.
]
call[name[cell].metadata.papermill][constant[exception]] assign[=] constant[True]
call[name[cell].metadata.papermill][constant[status]] assign[=] name[self].FAILED
call[name[self].nb.metadata.papermill][constant[exception]] assign[=] constant[True] | keyword[def] identifier[cell_exception] ( identifier[self] , identifier[cell] , identifier[cell_index] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[cell] . identifier[metadata] . identifier[papermill] [ literal[string] ]= keyword[True]
identifier[cell] . identifier[metadata] . identifier[papermill] [ literal[string] ]= identifier[self] . identifier[FAILED]
identifier[self] . identifier[nb] . identifier[metadata] . identifier[papermill] [ literal[string] ]= keyword[True] | def cell_exception(self, cell, cell_index=None, **kwargs):
"""
Set metadata when an exception is raised.
Called by engines when an exception is raised within a notebook to
set the metadata on the notebook indicating the location of the
failure.
"""
cell.metadata.papermill['exception'] = True
cell.metadata.papermill['status'] = self.FAILED
self.nb.metadata.papermill['exception'] = True |
def _set_motion_detection(self, enable):
"""Set desired motion detection state on camera"""
url = ('%s/ISAPI/System/Video/inputs/'
'channels/1/motionDetection') % self.root_url
enabled = self._motion_detection_xml.find(self.element_query('enabled'))
if enabled is None:
_LOGGING.error("Couldn't find 'enabled' in the xml")
_LOGGING.error('XML: %s', ET.tostring(self._motion_detection_xml))
return
enabled.text = 'true' if enable else 'false'
xml = ET.tostring(self._motion_detection_xml)
try:
response = self.hik_request.put(url, data=xml, timeout=CONNECT_TIMEOUT)
except (requests.exceptions.RequestException,
requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to set MotionDetection, error: %s', err)
return
if response.status_code == requests.codes.unauthorized:
_LOGGING.error('Authentication failed')
return
if response.status_code != requests.codes.ok:
# If we didn't receive 200, abort
_LOGGING.error('Unable to set motion detection: %s', response.text)
self.motion_detection = enable | def function[_set_motion_detection, parameter[self, enable]]:
constant[Set desired motion detection state on camera]
variable[url] assign[=] binary_operation[constant[%s/ISAPI/System/Video/inputs/channels/1/motionDetection] <ast.Mod object at 0x7da2590d6920> name[self].root_url]
variable[enabled] assign[=] call[name[self]._motion_detection_xml.find, parameter[call[name[self].element_query, parameter[constant[enabled]]]]]
if compare[name[enabled] is constant[None]] begin[:]
call[name[_LOGGING].error, parameter[constant[Couldn't find 'enabled' in the xml]]]
call[name[_LOGGING].error, parameter[constant[XML: %s], call[name[ET].tostring, parameter[name[self]._motion_detection_xml]]]]
return[None]
name[enabled].text assign[=] <ast.IfExp object at 0x7da1b0d0ce50>
variable[xml] assign[=] call[name[ET].tostring, parameter[name[self]._motion_detection_xml]]
<ast.Try object at 0x7da1b0d0fe80>
if compare[name[response].status_code equal[==] name[requests].codes.unauthorized] begin[:]
call[name[_LOGGING].error, parameter[constant[Authentication failed]]]
return[None]
if compare[name[response].status_code not_equal[!=] name[requests].codes.ok] begin[:]
call[name[_LOGGING].error, parameter[constant[Unable to set motion detection: %s], name[response].text]]
name[self].motion_detection assign[=] name[enable] | keyword[def] identifier[_set_motion_detection] ( identifier[self] , identifier[enable] ):
literal[string]
identifier[url] =( literal[string]
literal[string] )% identifier[self] . identifier[root_url]
identifier[enabled] = identifier[self] . identifier[_motion_detection_xml] . identifier[find] ( identifier[self] . identifier[element_query] ( literal[string] ))
keyword[if] identifier[enabled] keyword[is] keyword[None] :
identifier[_LOGGING] . identifier[error] ( literal[string] )
identifier[_LOGGING] . identifier[error] ( literal[string] , identifier[ET] . identifier[tostring] ( identifier[self] . identifier[_motion_detection_xml] ))
keyword[return]
identifier[enabled] . identifier[text] = literal[string] keyword[if] identifier[enable] keyword[else] literal[string]
identifier[xml] = identifier[ET] . identifier[tostring] ( identifier[self] . identifier[_motion_detection_xml] )
keyword[try] :
identifier[response] = identifier[self] . identifier[hik_request] . identifier[put] ( identifier[url] , identifier[data] = identifier[xml] , identifier[timeout] = identifier[CONNECT_TIMEOUT] )
keyword[except] ( identifier[requests] . identifier[exceptions] . identifier[RequestException] ,
identifier[requests] . identifier[exceptions] . identifier[ConnectionError] ) keyword[as] identifier[err] :
identifier[_LOGGING] . identifier[error] ( literal[string] , identifier[err] )
keyword[return]
keyword[if] identifier[response] . identifier[status_code] == identifier[requests] . identifier[codes] . identifier[unauthorized] :
identifier[_LOGGING] . identifier[error] ( literal[string] )
keyword[return]
keyword[if] identifier[response] . identifier[status_code] != identifier[requests] . identifier[codes] . identifier[ok] :
identifier[_LOGGING] . identifier[error] ( literal[string] , identifier[response] . identifier[text] )
identifier[self] . identifier[motion_detection] = identifier[enable] | def _set_motion_detection(self, enable):
"""Set desired motion detection state on camera"""
url = '%s/ISAPI/System/Video/inputs/channels/1/motionDetection' % self.root_url
enabled = self._motion_detection_xml.find(self.element_query('enabled'))
if enabled is None:
_LOGGING.error("Couldn't find 'enabled' in the xml")
_LOGGING.error('XML: %s', ET.tostring(self._motion_detection_xml))
return # depends on [control=['if'], data=[]]
enabled.text = 'true' if enable else 'false'
xml = ET.tostring(self._motion_detection_xml)
try:
response = self.hik_request.put(url, data=xml, timeout=CONNECT_TIMEOUT) # depends on [control=['try'], data=[]]
except (requests.exceptions.RequestException, requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to set MotionDetection, error: %s', err)
return # depends on [control=['except'], data=['err']]
if response.status_code == requests.codes.unauthorized:
_LOGGING.error('Authentication failed')
return # depends on [control=['if'], data=[]]
if response.status_code != requests.codes.ok:
# If we didn't receive 200, abort
_LOGGING.error('Unable to set motion detection: %s', response.text) # depends on [control=['if'], data=[]]
self.motion_detection = enable |
def entropy(data):
"""
Compute the Shannon entropy, a measure of uncertainty.
"""
if len(data) == 0:
return None
n = sum(data)
_op = lambda f: f * math.log(f)
return - sum(_op(float(i) / n) for i in data) | def function[entropy, parameter[data]]:
constant[
Compute the Shannon entropy, a measure of uncertainty.
]
if compare[call[name[len], parameter[name[data]]] equal[==] constant[0]] begin[:]
return[constant[None]]
variable[n] assign[=] call[name[sum], parameter[name[data]]]
variable[_op] assign[=] <ast.Lambda object at 0x7da1b0df5090>
return[<ast.UnaryOp object at 0x7da1b0df66b0>] | keyword[def] identifier[entropy] ( identifier[data] ):
literal[string]
keyword[if] identifier[len] ( identifier[data] )== literal[int] :
keyword[return] keyword[None]
identifier[n] = identifier[sum] ( identifier[data] )
identifier[_op] = keyword[lambda] identifier[f] : identifier[f] * identifier[math] . identifier[log] ( identifier[f] )
keyword[return] - identifier[sum] ( identifier[_op] ( identifier[float] ( identifier[i] )/ identifier[n] ) keyword[for] identifier[i] keyword[in] identifier[data] ) | def entropy(data):
"""
Compute the Shannon entropy, a measure of uncertainty.
"""
if len(data) == 0:
return None # depends on [control=['if'], data=[]]
n = sum(data)
_op = lambda f: f * math.log(f)
return -sum((_op(float(i) / n) for i in data)) |
def _set_audit_file_data(option, value):
'''
Helper function that sets the Advanced Audit settings in the two .csv files
on Windows. Those files are located at:
C:\\Windows\\Security\\Audit\\audit.csv
C:\\Windows\\System32\\GroupPolicy\\Machine\\Microsoft\\Windows NT\\Audit\\audit.csv
Args:
option (str): The name of the option to set
value (str): The value to set. ['None', '0', '1', '2', '3']
Returns:
bool: ``True`` if successful, otherwise ``False``
'''
# Set up some paths here
system_root = os.environ.get('SystemRoot', 'C:\\Windows')
f_audit = os.path.join(system_root, 'security', 'audit', 'audit.csv')
f_audit_gpo = os.path.join(system_root, 'System32', 'GroupPolicy',
'Machine', 'Microsoft', 'Windows NT',
'Audit', 'audit.csv')
f_temp = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv',
prefix='audit')
# Lookup dict for "Inclusion Setting" field
auditpol_values = {'None': 'No Auditing',
'0': 'No Auditing',
'1': 'Success',
'2': 'Failure',
'3': 'Success and Failure'}
try:
# Open the existing audit.csv and load the csv `reader`
with salt.utils.files.fopen(f_audit, mode='r') as csv_file:
reader = csv.DictReader(csv_file)
# Open the temporary .csv and load the csv `writer`
with salt.utils.files.fopen(f_temp.name, mode='w') as tmp_file:
writer = csv.DictWriter(tmp_file, fieldnames=reader.fieldnames)
# Write the header values (labels)
writer.writeheader()
value_written = False
# Loop through the current audit.csv and write the changes to
# the temp csv file for existing settings
for row in reader:
# If the row matches the value we're setting, update it with
# the new value
if row['Subcategory'] == option:
if not value == 'None':
# The value is not None, make the change
row['Inclusion Setting'] = auditpol_values[value]
row['Setting Value'] = value
log.debug('LGPO: Setting {0} to {1}'
''.format(option, value))
writer.writerow(row)
else:
# value is None, remove it by not writing it to the
# temp file
log.debug('LGPO: Removing {0}'.format(option))
value_written = True
# If it's not the value we're setting, just write it
else:
writer.writerow(row)
# If a value was not written, it is a new setting not found in
# the existing audit.cvs file. Add the new setting with values
# from the defaults
if not value_written:
if not value == 'None':
# value is not None, write the new value
log.debug('LGPO: Setting {0} to {1}'
''.format(option, value))
defaults = _get_audit_defaults(option)
writer.writerow({
'Machine Name': defaults['Machine Name'],
'Policy Target': defaults['Policy Target'],
'Subcategory': defaults['Subcategory'],
'Subcategory GUID': defaults['Subcategory GUID'],
'Inclusion Setting': auditpol_values[value],
'Exclusion Setting': defaults['Exclusion Setting'],
'Setting Value': value})
value_written = True
if value_written:
# Copy the temporary csv file over the existing audit.csv in both
# locations if a value was written
__salt__['file.copy'](f_temp.name, f_audit, remove_existing=True)
__salt__['file.makedirs'](f_audit_gpo)
__salt__['file.copy'](f_temp.name, f_audit_gpo, remove_existing=True)
finally:
f_temp.close()
__salt__['file.remove'](f_temp.name)
return value_written | def function[_set_audit_file_data, parameter[option, value]]:
constant[
Helper function that sets the Advanced Audit settings in the two .csv files
on Windows. Those files are located at:
C:\Windows\Security\Audit\audit.csv
C:\Windows\System32\GroupPolicy\Machine\Microsoft\Windows NT\Audit\audit.csv
Args:
option (str): The name of the option to set
value (str): The value to set. ['None', '0', '1', '2', '3']
Returns:
bool: ``True`` if successful, otherwise ``False``
]
variable[system_root] assign[=] call[name[os].environ.get, parameter[constant[SystemRoot], constant[C:\Windows]]]
variable[f_audit] assign[=] call[name[os].path.join, parameter[name[system_root], constant[security], constant[audit], constant[audit.csv]]]
variable[f_audit_gpo] assign[=] call[name[os].path.join, parameter[name[system_root], constant[System32], constant[GroupPolicy], constant[Machine], constant[Microsoft], constant[Windows NT], constant[Audit], constant[audit.csv]]]
variable[f_temp] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]]
variable[auditpol_values] assign[=] dictionary[[<ast.Constant object at 0x7da1b1cb3010>, <ast.Constant object at 0x7da1b1cb1090>, <ast.Constant object at 0x7da1b1cb38e0>, <ast.Constant object at 0x7da1b1cb3fd0>, <ast.Constant object at 0x7da1b1cb1e40>], [<ast.Constant object at 0x7da1b1cb1b10>, <ast.Constant object at 0x7da1b1cb1de0>, <ast.Constant object at 0x7da1b1cb3e80>, <ast.Constant object at 0x7da1b1cb2350>, <ast.Constant object at 0x7da1b1cb3190>]]
<ast.Try object at 0x7da1b1cb1810>
return[name[value_written]] | keyword[def] identifier[_set_audit_file_data] ( identifier[option] , identifier[value] ):
literal[string]
identifier[system_root] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] )
identifier[f_audit] = identifier[os] . identifier[path] . identifier[join] ( identifier[system_root] , literal[string] , literal[string] , literal[string] )
identifier[f_audit_gpo] = identifier[os] . identifier[path] . identifier[join] ( identifier[system_root] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] )
identifier[f_temp] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[mode] = literal[string] , identifier[delete] = keyword[False] , identifier[suffix] = literal[string] ,
identifier[prefix] = literal[string] )
identifier[auditpol_values] ={ literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] }
keyword[try] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[f_audit] , identifier[mode] = literal[string] ) keyword[as] identifier[csv_file] :
identifier[reader] = identifier[csv] . identifier[DictReader] ( identifier[csv_file] )
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[f_temp] . identifier[name] , identifier[mode] = literal[string] ) keyword[as] identifier[tmp_file] :
identifier[writer] = identifier[csv] . identifier[DictWriter] ( identifier[tmp_file] , identifier[fieldnames] = identifier[reader] . identifier[fieldnames] )
identifier[writer] . identifier[writeheader] ()
identifier[value_written] = keyword[False]
keyword[for] identifier[row] keyword[in] identifier[reader] :
keyword[if] identifier[row] [ literal[string] ]== identifier[option] :
keyword[if] keyword[not] identifier[value] == literal[string] :
identifier[row] [ literal[string] ]= identifier[auditpol_values] [ identifier[value] ]
identifier[row] [ literal[string] ]= identifier[value]
identifier[log] . identifier[debug] ( literal[string]
literal[string] . identifier[format] ( identifier[option] , identifier[value] ))
identifier[writer] . identifier[writerow] ( identifier[row] )
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[option] ))
identifier[value_written] = keyword[True]
keyword[else] :
identifier[writer] . identifier[writerow] ( identifier[row] )
keyword[if] keyword[not] identifier[value_written] :
keyword[if] keyword[not] identifier[value] == literal[string] :
identifier[log] . identifier[debug] ( literal[string]
literal[string] . identifier[format] ( identifier[option] , identifier[value] ))
identifier[defaults] = identifier[_get_audit_defaults] ( identifier[option] )
identifier[writer] . identifier[writerow] ({
literal[string] : identifier[defaults] [ literal[string] ],
literal[string] : identifier[defaults] [ literal[string] ],
literal[string] : identifier[defaults] [ literal[string] ],
literal[string] : identifier[defaults] [ literal[string] ],
literal[string] : identifier[auditpol_values] [ identifier[value] ],
literal[string] : identifier[defaults] [ literal[string] ],
literal[string] : identifier[value] })
identifier[value_written] = keyword[True]
keyword[if] identifier[value_written] :
identifier[__salt__] [ literal[string] ]( identifier[f_temp] . identifier[name] , identifier[f_audit] , identifier[remove_existing] = keyword[True] )
identifier[__salt__] [ literal[string] ]( identifier[f_audit_gpo] )
identifier[__salt__] [ literal[string] ]( identifier[f_temp] . identifier[name] , identifier[f_audit_gpo] , identifier[remove_existing] = keyword[True] )
keyword[finally] :
identifier[f_temp] . identifier[close] ()
identifier[__salt__] [ literal[string] ]( identifier[f_temp] . identifier[name] )
keyword[return] identifier[value_written] | def _set_audit_file_data(option, value):
"""
Helper function that sets the Advanced Audit settings in the two .csv files
on Windows. Those files are located at:
C:\\Windows\\Security\\Audit\\audit.csv
C:\\Windows\\System32\\GroupPolicy\\Machine\\Microsoft\\Windows NT\\Audit\\audit.csv
Args:
option (str): The name of the option to set
value (str): The value to set. ['None', '0', '1', '2', '3']
Returns:
bool: ``True`` if successful, otherwise ``False``
"""
# Set up some paths here
system_root = os.environ.get('SystemRoot', 'C:\\Windows')
f_audit = os.path.join(system_root, 'security', 'audit', 'audit.csv')
f_audit_gpo = os.path.join(system_root, 'System32', 'GroupPolicy', 'Machine', 'Microsoft', 'Windows NT', 'Audit', 'audit.csv')
f_temp = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv', prefix='audit')
# Lookup dict for "Inclusion Setting" field
auditpol_values = {'None': 'No Auditing', '0': 'No Auditing', '1': 'Success', '2': 'Failure', '3': 'Success and Failure'}
try:
# Open the existing audit.csv and load the csv `reader`
with salt.utils.files.fopen(f_audit, mode='r') as csv_file:
reader = csv.DictReader(csv_file)
# Open the temporary .csv and load the csv `writer`
with salt.utils.files.fopen(f_temp.name, mode='w') as tmp_file:
writer = csv.DictWriter(tmp_file, fieldnames=reader.fieldnames)
# Write the header values (labels)
writer.writeheader()
value_written = False
# Loop through the current audit.csv and write the changes to
# the temp csv file for existing settings
for row in reader:
# If the row matches the value we're setting, update it with
# the new value
if row['Subcategory'] == option:
if not value == 'None':
# The value is not None, make the change
row['Inclusion Setting'] = auditpol_values[value]
row['Setting Value'] = value
log.debug('LGPO: Setting {0} to {1}'.format(option, value))
writer.writerow(row) # depends on [control=['if'], data=[]]
else:
# value is None, remove it by not writing it to the
# temp file
log.debug('LGPO: Removing {0}'.format(option))
value_written = True # depends on [control=['if'], data=['option']]
else:
# If it's not the value we're setting, just write it
writer.writerow(row) # depends on [control=['for'], data=['row']]
# If a value was not written, it is a new setting not found in
# the existing audit.cvs file. Add the new setting with values
# from the defaults
if not value_written:
if not value == 'None':
# value is not None, write the new value
log.debug('LGPO: Setting {0} to {1}'.format(option, value))
defaults = _get_audit_defaults(option)
writer.writerow({'Machine Name': defaults['Machine Name'], 'Policy Target': defaults['Policy Target'], 'Subcategory': defaults['Subcategory'], 'Subcategory GUID': defaults['Subcategory GUID'], 'Inclusion Setting': auditpol_values[value], 'Exclusion Setting': defaults['Exclusion Setting'], 'Setting Value': value}) # depends on [control=['if'], data=[]]
value_written = True # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['tmp_file']] # depends on [control=['with'], data=['csv_file']]
if value_written:
# Copy the temporary csv file over the existing audit.csv in both
# locations if a value was written
__salt__['file.copy'](f_temp.name, f_audit, remove_existing=True)
__salt__['file.makedirs'](f_audit_gpo)
__salt__['file.copy'](f_temp.name, f_audit_gpo, remove_existing=True) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
finally:
f_temp.close()
__salt__['file.remove'](f_temp.name)
return value_written |
def get_package_data():
"""get data files which will be included in the main tcod/ directory"""
BITSIZE, LINKAGE = platform.architecture()
files = [
"py.typed",
"lib/LIBTCOD-CREDITS.txt",
"lib/LIBTCOD-LICENSE.txt",
"lib/README-SDL.txt",
]
if "win32" in sys.platform:
if BITSIZE == "32bit":
files += ["x86/SDL2.dll"]
else:
files += ["x64/SDL2.dll"]
if sys.platform == "darwin":
files += ["SDL2.framework/Versions/A/SDL2"]
return files | def function[get_package_data, parameter[]]:
constant[get data files which will be included in the main tcod/ directory]
<ast.Tuple object at 0x7da18bccbfd0> assign[=] call[name[platform].architecture, parameter[]]
variable[files] assign[=] list[[<ast.Constant object at 0x7da18bcca470>, <ast.Constant object at 0x7da18bcc9630>, <ast.Constant object at 0x7da18bccbca0>, <ast.Constant object at 0x7da18bcc8880>]]
if compare[constant[win32] in name[sys].platform] begin[:]
if compare[name[BITSIZE] equal[==] constant[32bit]] begin[:]
<ast.AugAssign object at 0x7da18bcc9ff0>
if compare[name[sys].platform equal[==] constant[darwin]] begin[:]
<ast.AugAssign object at 0x7da1b1179d50>
return[name[files]] | keyword[def] identifier[get_package_data] ():
literal[string]
identifier[BITSIZE] , identifier[LINKAGE] = identifier[platform] . identifier[architecture] ()
identifier[files] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[platform] :
keyword[if] identifier[BITSIZE] == literal[string] :
identifier[files] +=[ literal[string] ]
keyword[else] :
identifier[files] +=[ literal[string] ]
keyword[if] identifier[sys] . identifier[platform] == literal[string] :
identifier[files] +=[ literal[string] ]
keyword[return] identifier[files] | def get_package_data():
"""get data files which will be included in the main tcod/ directory"""
(BITSIZE, LINKAGE) = platform.architecture()
files = ['py.typed', 'lib/LIBTCOD-CREDITS.txt', 'lib/LIBTCOD-LICENSE.txt', 'lib/README-SDL.txt']
if 'win32' in sys.platform:
if BITSIZE == '32bit':
files += ['x86/SDL2.dll'] # depends on [control=['if'], data=[]]
else:
files += ['x64/SDL2.dll'] # depends on [control=['if'], data=[]]
if sys.platform == 'darwin':
files += ['SDL2.framework/Versions/A/SDL2'] # depends on [control=['if'], data=[]]
return files |
def prior_H0(self, H0, H0_min=0, H0_max=200):
"""
checks whether the parameter vector has left its bound, if so, adds a big number
"""
if H0 < H0_min or H0 > H0_max:
penalty = -10**15
return penalty, False
else:
return 0, True | def function[prior_H0, parameter[self, H0, H0_min, H0_max]]:
constant[
checks whether the parameter vector has left its bound, if so, adds a big number
]
if <ast.BoolOp object at 0x7da18bccbdf0> begin[:]
variable[penalty] assign[=] <ast.UnaryOp object at 0x7da20e955540>
return[tuple[[<ast.Name object at 0x7da20e957010>, <ast.Constant object at 0x7da20e955330>]]] | keyword[def] identifier[prior_H0] ( identifier[self] , identifier[H0] , identifier[H0_min] = literal[int] , identifier[H0_max] = literal[int] ):
literal[string]
keyword[if] identifier[H0] < identifier[H0_min] keyword[or] identifier[H0] > identifier[H0_max] :
identifier[penalty] =- literal[int] ** literal[int]
keyword[return] identifier[penalty] , keyword[False]
keyword[else] :
keyword[return] literal[int] , keyword[True] | def prior_H0(self, H0, H0_min=0, H0_max=200):
"""
checks whether the parameter vector has left its bound, if so, adds a big number
"""
if H0 < H0_min or H0 > H0_max:
penalty = -10 ** 15
return (penalty, False) # depends on [control=['if'], data=[]]
else:
return (0, True) |
def group_perms_for_user(cls, instance, user, db_session=None):
"""
returns permissions that given user has for this resource
that are inherited from groups
:param instance:
:param user:
:param db_session:
:return:
"""
db_session = get_db_session(db_session, instance)
perms = resource_permissions_for_users(
cls.models_proxy,
ANY_PERMISSION,
resource_ids=[instance.resource_id],
user_ids=[user.id],
db_session=db_session,
)
perms = [p for p in perms if p.type == "group"]
# include all perms if user is the owner of this resource
groups_dict = dict([(g.id, g) for g in user.groups])
if instance.owner_group_id in groups_dict:
perms.append(
PermissionTuple(
user,
ALL_PERMISSIONS,
"group",
groups_dict.get(instance.owner_group_id),
instance,
True,
True,
)
)
return perms | def function[group_perms_for_user, parameter[cls, instance, user, db_session]]:
constant[
returns permissions that given user has for this resource
that are inherited from groups
:param instance:
:param user:
:param db_session:
:return:
]
variable[db_session] assign[=] call[name[get_db_session], parameter[name[db_session], name[instance]]]
variable[perms] assign[=] call[name[resource_permissions_for_users], parameter[name[cls].models_proxy, name[ANY_PERMISSION]]]
variable[perms] assign[=] <ast.ListComp object at 0x7da1b0fc5d80>
variable[groups_dict] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da1b0fc49a0>]]
if compare[name[instance].owner_group_id in name[groups_dict]] begin[:]
call[name[perms].append, parameter[call[name[PermissionTuple], parameter[name[user], name[ALL_PERMISSIONS], constant[group], call[name[groups_dict].get, parameter[name[instance].owner_group_id]], name[instance], constant[True], constant[True]]]]]
return[name[perms]] | keyword[def] identifier[group_perms_for_user] ( identifier[cls] , identifier[instance] , identifier[user] , identifier[db_session] = keyword[None] ):
literal[string]
identifier[db_session] = identifier[get_db_session] ( identifier[db_session] , identifier[instance] )
identifier[perms] = identifier[resource_permissions_for_users] (
identifier[cls] . identifier[models_proxy] ,
identifier[ANY_PERMISSION] ,
identifier[resource_ids] =[ identifier[instance] . identifier[resource_id] ],
identifier[user_ids] =[ identifier[user] . identifier[id] ],
identifier[db_session] = identifier[db_session] ,
)
identifier[perms] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[perms] keyword[if] identifier[p] . identifier[type] == literal[string] ]
identifier[groups_dict] = identifier[dict] ([( identifier[g] . identifier[id] , identifier[g] ) keyword[for] identifier[g] keyword[in] identifier[user] . identifier[groups] ])
keyword[if] identifier[instance] . identifier[owner_group_id] keyword[in] identifier[groups_dict] :
identifier[perms] . identifier[append] (
identifier[PermissionTuple] (
identifier[user] ,
identifier[ALL_PERMISSIONS] ,
literal[string] ,
identifier[groups_dict] . identifier[get] ( identifier[instance] . identifier[owner_group_id] ),
identifier[instance] ,
keyword[True] ,
keyword[True] ,
)
)
keyword[return] identifier[perms] | def group_perms_for_user(cls, instance, user, db_session=None):
"""
returns permissions that given user has for this resource
that are inherited from groups
:param instance:
:param user:
:param db_session:
:return:
"""
db_session = get_db_session(db_session, instance)
perms = resource_permissions_for_users(cls.models_proxy, ANY_PERMISSION, resource_ids=[instance.resource_id], user_ids=[user.id], db_session=db_session)
perms = [p for p in perms if p.type == 'group']
# include all perms if user is the owner of this resource
groups_dict = dict([(g.id, g) for g in user.groups])
if instance.owner_group_id in groups_dict:
perms.append(PermissionTuple(user, ALL_PERMISSIONS, 'group', groups_dict.get(instance.owner_group_id), instance, True, True)) # depends on [control=['if'], data=['groups_dict']]
return perms |
def add_substitution(self, short, medium, long, module):
"""Add the given substitutions both as a `short2long` and a
`medium2long` mapping.
Assume `variable1` is defined in the hydpy module `module1` and the
short and medium descriptions are `var1` and `mod1.var1`:
>>> import types
>>> module1 = types.ModuleType('hydpy.module1')
>>> from hydpy.core.autodoctools import Substituter
>>> substituter = Substituter()
>>> substituter.add_substitution(
... 'var1', 'mod1.var1', 'module1.variable1', module1)
>>> print(substituter.get_commands())
.. var1 replace:: module1.variable1
.. mod1.var1 replace:: module1.variable1
Adding `variable2` of `module2` has no effect on the predefined
substitutions:
>>> module2 = types.ModuleType('hydpy.module2')
>>> substituter.add_substitution(
... 'var2', 'mod2.var2', 'module2.variable2', module2)
>>> print(substituter.get_commands())
.. var1 replace:: module1.variable1
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var2 replace:: module2.variable2
But when adding `variable1` of `module2`, the `short2long` mapping
of `variable1` would become inconclusive, which is why the new
one (related to `module2`) is not stored and the old one (related
to `module1`) is removed:
>>> substituter.add_substitution(
... 'var1', 'mod2.var1', 'module2.variable1', module2)
>>> print(substituter.get_commands())
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
Adding `variable2` of `module2` accidentally again, does not
result in any undesired side-effects:
>>> substituter.add_substitution(
... 'var2', 'mod2.var2', 'module2.variable2', module2)
>>> print(substituter.get_commands())
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
In order to reduce the risk of name conflicts, only the
`medium2long` mapping is supported for modules not part of the
*HydPy* package:
>>> module3 = types.ModuleType('module3')
>>> substituter.add_substitution(
... 'var3', 'mod3.var3', 'module3.variable3', module3)
>>> print(substituter.get_commands())
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
.. mod3.var3 replace:: module3.variable3
The only exception to this rule is |builtins|, for which only
the `short2long` mapping is supported (note also, that the
module name `builtins` is removed from string `long`):
>>> import builtins
>>> substituter.add_substitution(
... 'str', 'blt.str', ':func:`~builtins.str`', builtins)
>>> print(substituter.get_commands())
.. str replace:: :func:`str`
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
.. mod3.var3 replace:: module3.variable3
"""
name = module.__name__
if 'builtin' in name:
self._short2long[short] = long.split('~')[0] + long.split('.')[-1]
else:
if ('hydpy' in name) and (short not in self._blacklist):
if short in self._short2long:
if self._short2long[short] != long:
self._blacklist.add(short)
del self._short2long[short]
else:
self._short2long[short] = long
self._medium2long[medium] = long | def function[add_substitution, parameter[self, short, medium, long, module]]:
constant[Add the given substitutions both as a `short2long` and a
`medium2long` mapping.
Assume `variable1` is defined in the hydpy module `module1` and the
short and medium descriptions are `var1` and `mod1.var1`:
>>> import types
>>> module1 = types.ModuleType('hydpy.module1')
>>> from hydpy.core.autodoctools import Substituter
>>> substituter = Substituter()
>>> substituter.add_substitution(
... 'var1', 'mod1.var1', 'module1.variable1', module1)
>>> print(substituter.get_commands())
.. var1 replace:: module1.variable1
.. mod1.var1 replace:: module1.variable1
Adding `variable2` of `module2` has no effect on the predefined
substitutions:
>>> module2 = types.ModuleType('hydpy.module2')
>>> substituter.add_substitution(
... 'var2', 'mod2.var2', 'module2.variable2', module2)
>>> print(substituter.get_commands())
.. var1 replace:: module1.variable1
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var2 replace:: module2.variable2
But when adding `variable1` of `module2`, the `short2long` mapping
of `variable1` would become inconclusive, which is why the new
one (related to `module2`) is not stored and the old one (related
to `module1`) is removed:
>>> substituter.add_substitution(
... 'var1', 'mod2.var1', 'module2.variable1', module2)
>>> print(substituter.get_commands())
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
Adding `variable2` of `module2` accidentally again, does not
result in any undesired side-effects:
>>> substituter.add_substitution(
... 'var2', 'mod2.var2', 'module2.variable2', module2)
>>> print(substituter.get_commands())
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
In order to reduce the risk of name conflicts, only the
`medium2long` mapping is supported for modules not part of the
*HydPy* package:
>>> module3 = types.ModuleType('module3')
>>> substituter.add_substitution(
... 'var3', 'mod3.var3', 'module3.variable3', module3)
>>> print(substituter.get_commands())
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
.. mod3.var3 replace:: module3.variable3
The only exception to this rule is |builtins|, for which only
the `short2long` mapping is supported (note also, that the
module name `builtins` is removed from string `long`):
>>> import builtins
>>> substituter.add_substitution(
... 'str', 'blt.str', ':func:`~builtins.str`', builtins)
>>> print(substituter.get_commands())
.. str replace:: :func:`str`
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
.. mod3.var3 replace:: module3.variable3
]
variable[name] assign[=] name[module].__name__
if compare[constant[builtin] in name[name]] begin[:]
call[name[self]._short2long][name[short]] assign[=] binary_operation[call[call[name[long].split, parameter[constant[~]]]][constant[0]] + call[call[name[long].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da2054a6d70>]] | keyword[def] identifier[add_substitution] ( identifier[self] , identifier[short] , identifier[medium] , identifier[long] , identifier[module] ):
literal[string]
identifier[name] = identifier[module] . identifier[__name__]
keyword[if] literal[string] keyword[in] identifier[name] :
identifier[self] . identifier[_short2long] [ identifier[short] ]= identifier[long] . identifier[split] ( literal[string] )[ literal[int] ]+ identifier[long] . identifier[split] ( literal[string] )[- literal[int] ]
keyword[else] :
keyword[if] ( literal[string] keyword[in] identifier[name] ) keyword[and] ( identifier[short] keyword[not] keyword[in] identifier[self] . identifier[_blacklist] ):
keyword[if] identifier[short] keyword[in] identifier[self] . identifier[_short2long] :
keyword[if] identifier[self] . identifier[_short2long] [ identifier[short] ]!= identifier[long] :
identifier[self] . identifier[_blacklist] . identifier[add] ( identifier[short] )
keyword[del] identifier[self] . identifier[_short2long] [ identifier[short] ]
keyword[else] :
identifier[self] . identifier[_short2long] [ identifier[short] ]= identifier[long]
identifier[self] . identifier[_medium2long] [ identifier[medium] ]= identifier[long] | def add_substitution(self, short, medium, long, module):
"""Add the given substitutions both as a `short2long` and a
`medium2long` mapping.
Assume `variable1` is defined in the hydpy module `module1` and the
short and medium descriptions are `var1` and `mod1.var1`:
>>> import types
>>> module1 = types.ModuleType('hydpy.module1')
>>> from hydpy.core.autodoctools import Substituter
>>> substituter = Substituter()
>>> substituter.add_substitution(
... 'var1', 'mod1.var1', 'module1.variable1', module1)
>>> print(substituter.get_commands())
.. var1 replace:: module1.variable1
.. mod1.var1 replace:: module1.variable1
Adding `variable2` of `module2` has no effect on the predefined
substitutions:
>>> module2 = types.ModuleType('hydpy.module2')
>>> substituter.add_substitution(
... 'var2', 'mod2.var2', 'module2.variable2', module2)
>>> print(substituter.get_commands())
.. var1 replace:: module1.variable1
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var2 replace:: module2.variable2
But when adding `variable1` of `module2`, the `short2long` mapping
of `variable1` would become inconclusive, which is why the new
one (related to `module2`) is not stored and the old one (related
to `module1`) is removed:
>>> substituter.add_substitution(
... 'var1', 'mod2.var1', 'module2.variable1', module2)
>>> print(substituter.get_commands())
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
Adding `variable2` of `module2` accidentally again, does not
result in any undesired side-effects:
>>> substituter.add_substitution(
... 'var2', 'mod2.var2', 'module2.variable2', module2)
>>> print(substituter.get_commands())
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
In order to reduce the risk of name conflicts, only the
`medium2long` mapping is supported for modules not part of the
*HydPy* package:
>>> module3 = types.ModuleType('module3')
>>> substituter.add_substitution(
... 'var3', 'mod3.var3', 'module3.variable3', module3)
>>> print(substituter.get_commands())
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
.. mod3.var3 replace:: module3.variable3
The only exception to this rule is |builtins|, for which only
the `short2long` mapping is supported (note also, that the
module name `builtins` is removed from string `long`):
>>> import builtins
>>> substituter.add_substitution(
... 'str', 'blt.str', ':func:`~builtins.str`', builtins)
>>> print(substituter.get_commands())
.. str replace:: :func:`str`
.. var2 replace:: module2.variable2
.. mod1.var1 replace:: module1.variable1
.. mod2.var1 replace:: module2.variable1
.. mod2.var2 replace:: module2.variable2
.. mod3.var3 replace:: module3.variable3
"""
name = module.__name__
if 'builtin' in name:
self._short2long[short] = long.split('~')[0] + long.split('.')[-1] # depends on [control=['if'], data=[]]
else:
if 'hydpy' in name and short not in self._blacklist:
if short in self._short2long:
if self._short2long[short] != long:
self._blacklist.add(short)
del self._short2long[short] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['short']]
else:
self._short2long[short] = long # depends on [control=['if'], data=[]]
self._medium2long[medium] = long |
def format_blocks_section(self):
"""format blocks section.
assign_vertexid() should be called before this method, because
vertices reffered by blocks should have valid index.
"""
buf = io.StringIO()
buf.write('blocks\n')
buf.write('(\n')
for b in self.blocks.values():
buf.write(' ' + b.format(self.vertices) + '\n')
buf.write(');')
return buf.getvalue() | def function[format_blocks_section, parameter[self]]:
constant[format blocks section.
assign_vertexid() should be called before this method, because
vertices reffered by blocks should have valid index.
]
variable[buf] assign[=] call[name[io].StringIO, parameter[]]
call[name[buf].write, parameter[constant[blocks
]]]
call[name[buf].write, parameter[constant[(
]]]
for taget[name[b]] in starred[call[name[self].blocks.values, parameter[]]] begin[:]
call[name[buf].write, parameter[binary_operation[binary_operation[constant[ ] + call[name[b].format, parameter[name[self].vertices]]] + constant[
]]]]
call[name[buf].write, parameter[constant[);]]]
return[call[name[buf].getvalue, parameter[]]] | keyword[def] identifier[format_blocks_section] ( identifier[self] ):
literal[string]
identifier[buf] = identifier[io] . identifier[StringIO] ()
identifier[buf] . identifier[write] ( literal[string] )
identifier[buf] . identifier[write] ( literal[string] )
keyword[for] identifier[b] keyword[in] identifier[self] . identifier[blocks] . identifier[values] ():
identifier[buf] . identifier[write] ( literal[string] + identifier[b] . identifier[format] ( identifier[self] . identifier[vertices] )+ literal[string] )
identifier[buf] . identifier[write] ( literal[string] )
keyword[return] identifier[buf] . identifier[getvalue] () | def format_blocks_section(self):
"""format blocks section.
assign_vertexid() should be called before this method, because
vertices reffered by blocks should have valid index.
"""
buf = io.StringIO()
buf.write('blocks\n')
buf.write('(\n')
for b in self.blocks.values():
buf.write(' ' + b.format(self.vertices) + '\n') # depends on [control=['for'], data=['b']]
buf.write(');')
return buf.getvalue() |
def get_config():
"""Get the configuration from file"""
if CONFIG_FILE is not None:
configfile = CONFIG_FILE
else:
configfile = BUILTIN_CONFIG_FILE
config = {}
with open(configfile, 'r') as fp_:
config = recursive_dict_update(config, yaml.load(fp_, Loader=UnsafeLoader))
app_dirs = AppDirs('pyspectral', 'pytroll')
user_datadir = app_dirs.user_data_dir
config['rsr_dir'] = expanduser(config.get('rsr_dir', user_datadir))
config['rayleigh_dir'] = expanduser(config.get('rayleigh_dir', user_datadir))
return config | def function[get_config, parameter[]]:
constant[Get the configuration from file]
if compare[name[CONFIG_FILE] is_not constant[None]] begin[:]
variable[configfile] assign[=] name[CONFIG_FILE]
variable[config] assign[=] dictionary[[], []]
with call[name[open], parameter[name[configfile], constant[r]]] begin[:]
variable[config] assign[=] call[name[recursive_dict_update], parameter[name[config], call[name[yaml].load, parameter[name[fp_]]]]]
variable[app_dirs] assign[=] call[name[AppDirs], parameter[constant[pyspectral], constant[pytroll]]]
variable[user_datadir] assign[=] name[app_dirs].user_data_dir
call[name[config]][constant[rsr_dir]] assign[=] call[name[expanduser], parameter[call[name[config].get, parameter[constant[rsr_dir], name[user_datadir]]]]]
call[name[config]][constant[rayleigh_dir]] assign[=] call[name[expanduser], parameter[call[name[config].get, parameter[constant[rayleigh_dir], name[user_datadir]]]]]
return[name[config]] | keyword[def] identifier[get_config] ():
literal[string]
keyword[if] identifier[CONFIG_FILE] keyword[is] keyword[not] keyword[None] :
identifier[configfile] = identifier[CONFIG_FILE]
keyword[else] :
identifier[configfile] = identifier[BUILTIN_CONFIG_FILE]
identifier[config] ={}
keyword[with] identifier[open] ( identifier[configfile] , literal[string] ) keyword[as] identifier[fp_] :
identifier[config] = identifier[recursive_dict_update] ( identifier[config] , identifier[yaml] . identifier[load] ( identifier[fp_] , identifier[Loader] = identifier[UnsafeLoader] ))
identifier[app_dirs] = identifier[AppDirs] ( literal[string] , literal[string] )
identifier[user_datadir] = identifier[app_dirs] . identifier[user_data_dir]
identifier[config] [ literal[string] ]= identifier[expanduser] ( identifier[config] . identifier[get] ( literal[string] , identifier[user_datadir] ))
identifier[config] [ literal[string] ]= identifier[expanduser] ( identifier[config] . identifier[get] ( literal[string] , identifier[user_datadir] ))
keyword[return] identifier[config] | def get_config():
"""Get the configuration from file"""
if CONFIG_FILE is not None:
configfile = CONFIG_FILE # depends on [control=['if'], data=['CONFIG_FILE']]
else:
configfile = BUILTIN_CONFIG_FILE
config = {}
with open(configfile, 'r') as fp_:
config = recursive_dict_update(config, yaml.load(fp_, Loader=UnsafeLoader)) # depends on [control=['with'], data=['fp_']]
app_dirs = AppDirs('pyspectral', 'pytroll')
user_datadir = app_dirs.user_data_dir
config['rsr_dir'] = expanduser(config.get('rsr_dir', user_datadir))
config['rayleigh_dir'] = expanduser(config.get('rayleigh_dir', user_datadir))
return config |
def to_sysbase(self):
"""
Convert model parameters to system base. This function calls the
``data_to_sys_base`` function of the loaded models.
Returns
-------
None
"""
if self.config.base:
for item in self.devman.devices:
self.__dict__[item].data_to_sys_base() | def function[to_sysbase, parameter[self]]:
constant[
Convert model parameters to system base. This function calls the
``data_to_sys_base`` function of the loaded models.
Returns
-------
None
]
if name[self].config.base begin[:]
for taget[name[item]] in starred[name[self].devman.devices] begin[:]
call[call[name[self].__dict__][name[item]].data_to_sys_base, parameter[]] | keyword[def] identifier[to_sysbase] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[config] . identifier[base] :
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[devman] . identifier[devices] :
identifier[self] . identifier[__dict__] [ identifier[item] ]. identifier[data_to_sys_base] () | def to_sysbase(self):
"""
Convert model parameters to system base. This function calls the
``data_to_sys_base`` function of the loaded models.
Returns
-------
None
"""
if self.config.base:
for item in self.devman.devices:
self.__dict__[item].data_to_sys_base() # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]] |
def vrf_l3vni(self, **kwargs):
"""Configure Layer3 vni under vrf.
Args:
rbridge_id (str): rbridge-id for device.
vrf_name (str): Name of the vrf (vrf101, vrf-1 etc).
l3vni (str): <NUMBER:1-16777215> Layer 3 VNI.
get (bool): Get config instead of editing config. (True, False)
delete (bool): False the L3 vni is configured and True if its to
be deleted (True, False). Default value will be False if not
specified.
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `rbridge_id`,`vrf_name`, 'l3vni' is not passed.
ValueError: if `rbridge_id`, `vrf_name`, 'l3vni' is invalid.
Examples:
>>> import pynos.device
>>> switches = ['10.24.39.211', '10.24.39.203']
>>> auth = ('admin', 'password')
>>> for switch in switches:
... conn = (switch, '22')
... with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.interface.vrf_vni(
... vrf_name=vrf1, rbridge_id='2', l3vni ='7201')
... output = dev.interface.vrf_vni(rbridge_id='2',
... get=True)
... output = dev.interface.vrf_vni(rbridge_id='2',
... , vrf_name='vrf2' get=True)
... output = dev.interface.vrf_vni(vrf_name=vrf1,
... rbridge_id='2', l3vni ='7201', delete=True)
"""
rbridge_id = kwargs['rbridge_id']
get_config = kwargs.pop('get', False)
delete = kwargs.pop('delete', False)
callback = kwargs.pop('callback', self._callback)
result = []
method_class = self._rbridge
method_name = 'rbridge_id_vrf_vni'
vrf_vni = getattr(method_class, method_name)
if not get_config:
vrf_name = kwargs['vrf_name']
l3vni = kwargs['l3vni']
vni_args = dict(rbridge_id=rbridge_id, vrf_name=vrf_name,
vni=l3vni)
config = vrf_vni(**vni_args)
if delete:
config.find('.//*vni').set('operation', 'delete')
result = callback(config)
elif get_config:
vrf_name = kwargs.pop('vrf_name', '')
vni_args = dict(rbridge_id=rbridge_id, vrf_name=vrf_name,
vni='')
config = vrf_vni(**vni_args)
output = callback(config, handler='get_config')
for item in output.data.findall('.//{*}vrf'):
vrfname = item.find('.//{*}vrf-name').text
if item.find('.//{*}vni') is not None:
vrfvni = item.find('.//{*}vni').text
else:
vrfvni = ''
tmp = {'rbridge_id': rbridge_id, 'vrf_name': vrfname,
'l3vni': vrfvni}
result.append(tmp)
return result | def function[vrf_l3vni, parameter[self]]:
constant[Configure Layer3 vni under vrf.
Args:
rbridge_id (str): rbridge-id for device.
vrf_name (str): Name of the vrf (vrf101, vrf-1 etc).
l3vni (str): <NUMBER:1-16777215> Layer 3 VNI.
get (bool): Get config instead of editing config. (True, False)
delete (bool): False the L3 vni is configured and True if its to
be deleted (True, False). Default value will be False if not
specified.
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `rbridge_id`,`vrf_name`, 'l3vni' is not passed.
ValueError: if `rbridge_id`, `vrf_name`, 'l3vni' is invalid.
Examples:
>>> import pynos.device
>>> switches = ['10.24.39.211', '10.24.39.203']
>>> auth = ('admin', 'password')
>>> for switch in switches:
... conn = (switch, '22')
... with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.interface.vrf_vni(
... vrf_name=vrf1, rbridge_id='2', l3vni ='7201')
... output = dev.interface.vrf_vni(rbridge_id='2',
... get=True)
... output = dev.interface.vrf_vni(rbridge_id='2',
... , vrf_name='vrf2' get=True)
... output = dev.interface.vrf_vni(vrf_name=vrf1,
... rbridge_id='2', l3vni ='7201', delete=True)
]
variable[rbridge_id] assign[=] call[name[kwargs]][constant[rbridge_id]]
variable[get_config] assign[=] call[name[kwargs].pop, parameter[constant[get], constant[False]]]
variable[delete] assign[=] call[name[kwargs].pop, parameter[constant[delete], constant[False]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
variable[result] assign[=] list[[]]
variable[method_class] assign[=] name[self]._rbridge
variable[method_name] assign[=] constant[rbridge_id_vrf_vni]
variable[vrf_vni] assign[=] call[name[getattr], parameter[name[method_class], name[method_name]]]
if <ast.UnaryOp object at 0x7da20c6c4970> begin[:]
variable[vrf_name] assign[=] call[name[kwargs]][constant[vrf_name]]
variable[l3vni] assign[=] call[name[kwargs]][constant[l3vni]]
variable[vni_args] assign[=] call[name[dict], parameter[]]
variable[config] assign[=] call[name[vrf_vni], parameter[]]
if name[delete] begin[:]
call[call[name[config].find, parameter[constant[.//*vni]]].set, parameter[constant[operation], constant[delete]]]
variable[result] assign[=] call[name[callback], parameter[name[config]]]
return[name[result]] | keyword[def] identifier[vrf_l3vni] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[rbridge_id] = identifier[kwargs] [ literal[string] ]
identifier[get_config] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
identifier[delete] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
identifier[result] =[]
identifier[method_class] = identifier[self] . identifier[_rbridge]
identifier[method_name] = literal[string]
identifier[vrf_vni] = identifier[getattr] ( identifier[method_class] , identifier[method_name] )
keyword[if] keyword[not] identifier[get_config] :
identifier[vrf_name] = identifier[kwargs] [ literal[string] ]
identifier[l3vni] = identifier[kwargs] [ literal[string] ]
identifier[vni_args] = identifier[dict] ( identifier[rbridge_id] = identifier[rbridge_id] , identifier[vrf_name] = identifier[vrf_name] ,
identifier[vni] = identifier[l3vni] )
identifier[config] = identifier[vrf_vni] (** identifier[vni_args] )
keyword[if] identifier[delete] :
identifier[config] . identifier[find] ( literal[string] ). identifier[set] ( literal[string] , literal[string] )
identifier[result] = identifier[callback] ( identifier[config] )
keyword[elif] identifier[get_config] :
identifier[vrf_name] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
identifier[vni_args] = identifier[dict] ( identifier[rbridge_id] = identifier[rbridge_id] , identifier[vrf_name] = identifier[vrf_name] ,
identifier[vni] = literal[string] )
identifier[config] = identifier[vrf_vni] (** identifier[vni_args] )
identifier[output] = identifier[callback] ( identifier[config] , identifier[handler] = literal[string] )
keyword[for] identifier[item] keyword[in] identifier[output] . identifier[data] . identifier[findall] ( literal[string] ):
identifier[vrfname] = identifier[item] . identifier[find] ( literal[string] ). identifier[text]
keyword[if] identifier[item] . identifier[find] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[vrfvni] = identifier[item] . identifier[find] ( literal[string] ). identifier[text]
keyword[else] :
identifier[vrfvni] = literal[string]
identifier[tmp] ={ literal[string] : identifier[rbridge_id] , literal[string] : identifier[vrfname] ,
literal[string] : identifier[vrfvni] }
identifier[result] . identifier[append] ( identifier[tmp] )
keyword[return] identifier[result] | def vrf_l3vni(self, **kwargs):
"""Configure Layer3 vni under vrf.
Args:
rbridge_id (str): rbridge-id for device.
vrf_name (str): Name of the vrf (vrf101, vrf-1 etc).
l3vni (str): <NUMBER:1-16777215> Layer 3 VNI.
get (bool): Get config instead of editing config. (True, False)
delete (bool): False the L3 vni is configured and True if its to
be deleted (True, False). Default value will be False if not
specified.
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `rbridge_id`,`vrf_name`, 'l3vni' is not passed.
ValueError: if `rbridge_id`, `vrf_name`, 'l3vni' is invalid.
Examples:
>>> import pynos.device
>>> switches = ['10.24.39.211', '10.24.39.203']
>>> auth = ('admin', 'password')
>>> for switch in switches:
... conn = (switch, '22')
... with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.interface.vrf_vni(
... vrf_name=vrf1, rbridge_id='2', l3vni ='7201')
... output = dev.interface.vrf_vni(rbridge_id='2',
... get=True)
... output = dev.interface.vrf_vni(rbridge_id='2',
... , vrf_name='vrf2' get=True)
... output = dev.interface.vrf_vni(vrf_name=vrf1,
... rbridge_id='2', l3vni ='7201', delete=True)
"""
rbridge_id = kwargs['rbridge_id']
get_config = kwargs.pop('get', False)
delete = kwargs.pop('delete', False)
callback = kwargs.pop('callback', self._callback)
result = []
method_class = self._rbridge
method_name = 'rbridge_id_vrf_vni'
vrf_vni = getattr(method_class, method_name)
if not get_config:
vrf_name = kwargs['vrf_name']
l3vni = kwargs['l3vni']
vni_args = dict(rbridge_id=rbridge_id, vrf_name=vrf_name, vni=l3vni)
config = vrf_vni(**vni_args)
if delete:
config.find('.//*vni').set('operation', 'delete') # depends on [control=['if'], data=[]]
result = callback(config) # depends on [control=['if'], data=[]]
elif get_config:
vrf_name = kwargs.pop('vrf_name', '')
vni_args = dict(rbridge_id=rbridge_id, vrf_name=vrf_name, vni='')
config = vrf_vni(**vni_args)
output = callback(config, handler='get_config')
for item in output.data.findall('.//{*}vrf'):
vrfname = item.find('.//{*}vrf-name').text
if item.find('.//{*}vni') is not None:
vrfvni = item.find('.//{*}vni').text # depends on [control=['if'], data=[]]
else:
vrfvni = ''
tmp = {'rbridge_id': rbridge_id, 'vrf_name': vrfname, 'l3vni': vrfvni}
result.append(tmp) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
return result |
def _lookup_dig(name, rdtype, timeout=None, servers=None, secure=None):
'''
Use dig to lookup addresses
:param name: Name of record to search
:param rdtype: DNS record type
:param timeout: server response timeout
:param servers: [] of servers to use
:return: [] of records or False if error
'''
cmd = 'dig {0} -t {1} '.format(DIG_OPTIONS, rdtype)
if servers:
cmd += ''.join(['@{0} '.format(srv) for srv in servers])
if timeout is not None:
if servers:
timeout = int(float(timeout) / len(servers))
else:
timeout = int(timeout)
cmd += '+time={0} '.format(timeout)
if secure:
cmd += '+dnssec +adflag '
cmd = __salt__['cmd.run_all']('{0} {1}'.format(cmd, name), python_shell=False, output_loglevel='quiet')
if 'ignoring invalid type' in cmd['stderr']:
raise ValueError('Invalid DNS type {}'.format(rdtype))
elif cmd['retcode'] != 0:
log.warning(
'dig returned (%s): %s',
cmd['retcode'], cmd['stderr'].strip(string.whitespace + ';')
)
return False
elif not cmd['stdout']:
return []
validated = False
res = []
for line in cmd['stdout'].splitlines():
_, rtype, rdata = line.split(None, 2)
if rtype == 'CNAME' and rdtype != 'CNAME':
continue
elif rtype == 'RRSIG':
validated = True
continue
res.append(_data_clean(rdata))
if res and secure and not validated:
return False
else:
return res | def function[_lookup_dig, parameter[name, rdtype, timeout, servers, secure]]:
constant[
Use dig to lookup addresses
:param name: Name of record to search
:param rdtype: DNS record type
:param timeout: server response timeout
:param servers: [] of servers to use
:return: [] of records or False if error
]
variable[cmd] assign[=] call[constant[dig {0} -t {1} ].format, parameter[name[DIG_OPTIONS], name[rdtype]]]
if name[servers] begin[:]
<ast.AugAssign object at 0x7da1b1f9b190>
if compare[name[timeout] is_not constant[None]] begin[:]
if name[servers] begin[:]
variable[timeout] assign[=] call[name[int], parameter[binary_operation[call[name[float], parameter[name[timeout]]] / call[name[len], parameter[name[servers]]]]]]
<ast.AugAssign object at 0x7da18f811660>
if name[secure] begin[:]
<ast.AugAssign object at 0x7da18f8135e0>
variable[cmd] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[constant[{0} {1}].format, parameter[name[cmd], name[name]]]]]
if compare[constant[ignoring invalid type] in call[name[cmd]][constant[stderr]]] begin[:]
<ast.Raise object at 0x7da1b20217e0>
variable[validated] assign[=] constant[False]
variable[res] assign[=] list[[]]
for taget[name[line]] in starred[call[call[name[cmd]][constant[stdout]].splitlines, parameter[]]] begin[:]
<ast.Tuple object at 0x7da1b2022ad0> assign[=] call[name[line].split, parameter[constant[None], constant[2]]]
if <ast.BoolOp object at 0x7da1b20227a0> begin[:]
continue
call[name[res].append, parameter[call[name[_data_clean], parameter[name[rdata]]]]]
if <ast.BoolOp object at 0x7da1b20217b0> begin[:]
return[constant[False]] | keyword[def] identifier[_lookup_dig] ( identifier[name] , identifier[rdtype] , identifier[timeout] = keyword[None] , identifier[servers] = keyword[None] , identifier[secure] = keyword[None] ):
literal[string]
identifier[cmd] = literal[string] . identifier[format] ( identifier[DIG_OPTIONS] , identifier[rdtype] )
keyword[if] identifier[servers] :
identifier[cmd] += literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[srv] ) keyword[for] identifier[srv] keyword[in] identifier[servers] ])
keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[servers] :
identifier[timeout] = identifier[int] ( identifier[float] ( identifier[timeout] )/ identifier[len] ( identifier[servers] ))
keyword[else] :
identifier[timeout] = identifier[int] ( identifier[timeout] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[timeout] )
keyword[if] identifier[secure] :
identifier[cmd] += literal[string]
identifier[cmd] = identifier[__salt__] [ literal[string] ]( literal[string] . identifier[format] ( identifier[cmd] , identifier[name] ), identifier[python_shell] = keyword[False] , identifier[output_loglevel] = literal[string] )
keyword[if] literal[string] keyword[in] identifier[cmd] [ literal[string] ]:
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[rdtype] ))
keyword[elif] identifier[cmd] [ literal[string] ]!= literal[int] :
identifier[log] . identifier[warning] (
literal[string] ,
identifier[cmd] [ literal[string] ], identifier[cmd] [ literal[string] ]. identifier[strip] ( identifier[string] . identifier[whitespace] + literal[string] )
)
keyword[return] keyword[False]
keyword[elif] keyword[not] identifier[cmd] [ literal[string] ]:
keyword[return] []
identifier[validated] = keyword[False]
identifier[res] =[]
keyword[for] identifier[line] keyword[in] identifier[cmd] [ literal[string] ]. identifier[splitlines] ():
identifier[_] , identifier[rtype] , identifier[rdata] = identifier[line] . identifier[split] ( keyword[None] , literal[int] )
keyword[if] identifier[rtype] == literal[string] keyword[and] identifier[rdtype] != literal[string] :
keyword[continue]
keyword[elif] identifier[rtype] == literal[string] :
identifier[validated] = keyword[True]
keyword[continue]
identifier[res] . identifier[append] ( identifier[_data_clean] ( identifier[rdata] ))
keyword[if] identifier[res] keyword[and] identifier[secure] keyword[and] keyword[not] identifier[validated] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] identifier[res] | def _lookup_dig(name, rdtype, timeout=None, servers=None, secure=None):
"""
Use dig to lookup addresses
:param name: Name of record to search
:param rdtype: DNS record type
:param timeout: server response timeout
:param servers: [] of servers to use
:return: [] of records or False if error
"""
cmd = 'dig {0} -t {1} '.format(DIG_OPTIONS, rdtype)
if servers:
cmd += ''.join(['@{0} '.format(srv) for srv in servers]) # depends on [control=['if'], data=[]]
if timeout is not None:
if servers:
timeout = int(float(timeout) / len(servers)) # depends on [control=['if'], data=[]]
else:
timeout = int(timeout)
cmd += '+time={0} '.format(timeout) # depends on [control=['if'], data=['timeout']]
if secure:
cmd += '+dnssec +adflag ' # depends on [control=['if'], data=[]]
cmd = __salt__['cmd.run_all']('{0} {1}'.format(cmd, name), python_shell=False, output_loglevel='quiet')
if 'ignoring invalid type' in cmd['stderr']:
raise ValueError('Invalid DNS type {}'.format(rdtype)) # depends on [control=['if'], data=[]]
elif cmd['retcode'] != 0:
log.warning('dig returned (%s): %s', cmd['retcode'], cmd['stderr'].strip(string.whitespace + ';'))
return False # depends on [control=['if'], data=[]]
elif not cmd['stdout']:
return [] # depends on [control=['if'], data=[]]
validated = False
res = []
for line in cmd['stdout'].splitlines():
(_, rtype, rdata) = line.split(None, 2)
if rtype == 'CNAME' and rdtype != 'CNAME':
continue # depends on [control=['if'], data=[]]
elif rtype == 'RRSIG':
validated = True
continue # depends on [control=['if'], data=[]]
res.append(_data_clean(rdata)) # depends on [control=['for'], data=['line']]
if res and secure and (not validated):
return False # depends on [control=['if'], data=[]]
else:
return res |
def process_txt(data, template):
"""
Replace {txt} in template with the serialized TXT records
"""
if data is None:
to_process = None
else:
# quote txt
to_process = copy.deepcopy(data)
for datum in to_process:
if isinstance(datum["txt"], list):
datum["txt"] = " ".join(['"%s"' % entry.replace(";", "\;")
for entry in datum["txt"]])
else:
datum["txt"] = '"%s"' % datum["txt"].replace(";", "\;")
return process_rr(to_process, "TXT", "txt", "{txt}", template) | def function[process_txt, parameter[data, template]]:
constant[
Replace {txt} in template with the serialized TXT records
]
if compare[name[data] is constant[None]] begin[:]
variable[to_process] assign[=] constant[None]
return[call[name[process_rr], parameter[name[to_process], constant[TXT], constant[txt], constant[{txt}], name[template]]]] | keyword[def] identifier[process_txt] ( identifier[data] , identifier[template] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[to_process] = keyword[None]
keyword[else] :
identifier[to_process] = identifier[copy] . identifier[deepcopy] ( identifier[data] )
keyword[for] identifier[datum] keyword[in] identifier[to_process] :
keyword[if] identifier[isinstance] ( identifier[datum] [ literal[string] ], identifier[list] ):
identifier[datum] [ literal[string] ]= literal[string] . identifier[join] ([ literal[string] % identifier[entry] . identifier[replace] ( literal[string] , literal[string] )
keyword[for] identifier[entry] keyword[in] identifier[datum] [ literal[string] ]])
keyword[else] :
identifier[datum] [ literal[string] ]= literal[string] % identifier[datum] [ literal[string] ]. identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[process_rr] ( identifier[to_process] , literal[string] , literal[string] , literal[string] , identifier[template] ) | def process_txt(data, template):
"""
Replace {txt} in template with the serialized TXT records
"""
if data is None:
to_process = None # depends on [control=['if'], data=[]]
else:
# quote txt
to_process = copy.deepcopy(data)
for datum in to_process:
if isinstance(datum['txt'], list):
datum['txt'] = ' '.join(['"%s"' % entry.replace(';', '\\;') for entry in datum['txt']]) # depends on [control=['if'], data=[]]
else:
datum['txt'] = '"%s"' % datum['txt'].replace(';', '\\;') # depends on [control=['for'], data=['datum']]
return process_rr(to_process, 'TXT', 'txt', '{txt}', template) |
def get_logstore(self, project_name, logstore_name):
""" get the logstore meta info
Unsuccessful opertaion will cause an LogException.
:type project_name: string
:param project_name: the Project name
:type logstore_name: string
:param logstore_name: the logstore name
:return: GetLogStoreResponse
:raise: LogException
"""
headers = {}
params = {}
resource = "/logstores/" + logstore_name
(resp, header) = self._send("GET", project_name, None, resource, params, headers)
return GetLogStoreResponse(resp, header) | def function[get_logstore, parameter[self, project_name, logstore_name]]:
constant[ get the logstore meta info
Unsuccessful opertaion will cause an LogException.
:type project_name: string
:param project_name: the Project name
:type logstore_name: string
:param logstore_name: the logstore name
:return: GetLogStoreResponse
:raise: LogException
]
variable[headers] assign[=] dictionary[[], []]
variable[params] assign[=] dictionary[[], []]
variable[resource] assign[=] binary_operation[constant[/logstores/] + name[logstore_name]]
<ast.Tuple object at 0x7da1b08ba8c0> assign[=] call[name[self]._send, parameter[constant[GET], name[project_name], constant[None], name[resource], name[params], name[headers]]]
return[call[name[GetLogStoreResponse], parameter[name[resp], name[header]]]] | keyword[def] identifier[get_logstore] ( identifier[self] , identifier[project_name] , identifier[logstore_name] ):
literal[string]
identifier[headers] ={}
identifier[params] ={}
identifier[resource] = literal[string] + identifier[logstore_name]
( identifier[resp] , identifier[header] )= identifier[self] . identifier[_send] ( literal[string] , identifier[project_name] , keyword[None] , identifier[resource] , identifier[params] , identifier[headers] )
keyword[return] identifier[GetLogStoreResponse] ( identifier[resp] , identifier[header] ) | def get_logstore(self, project_name, logstore_name):
""" get the logstore meta info
Unsuccessful opertaion will cause an LogException.
:type project_name: string
:param project_name: the Project name
:type logstore_name: string
:param logstore_name: the logstore name
:return: GetLogStoreResponse
:raise: LogException
"""
headers = {}
params = {}
resource = '/logstores/' + logstore_name
(resp, header) = self._send('GET', project_name, None, resource, params, headers)
return GetLogStoreResponse(resp, header) |
def stop(self):
"""
Stops the service.
"""
if self.log_file != PIPE and not (self.log_file == DEVNULL and _HAS_NATIVE_DEVNULL):
try:
self.log_file.close()
except Exception:
pass
if self.process is None:
return
try:
self.send_remote_shutdown_command()
except TypeError:
pass
try:
if self.process:
for stream in [self.process.stdin,
self.process.stdout,
self.process.stderr]:
try:
stream.close()
except AttributeError:
pass
self.process.terminate()
self.process.wait()
self.process.kill()
self.process = None
except OSError:
pass | def function[stop, parameter[self]]:
constant[
Stops the service.
]
if <ast.BoolOp object at 0x7da1b1ef8670> begin[:]
<ast.Try object at 0x7da1b1ef8250>
if compare[name[self].process is constant[None]] begin[:]
return[None]
<ast.Try object at 0x7da1b1ef99f0>
<ast.Try object at 0x7da1b1ef98d0> | keyword[def] identifier[stop] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[log_file] != identifier[PIPE] keyword[and] keyword[not] ( identifier[self] . identifier[log_file] == identifier[DEVNULL] keyword[and] identifier[_HAS_NATIVE_DEVNULL] ):
keyword[try] :
identifier[self] . identifier[log_file] . identifier[close] ()
keyword[except] identifier[Exception] :
keyword[pass]
keyword[if] identifier[self] . identifier[process] keyword[is] keyword[None] :
keyword[return]
keyword[try] :
identifier[self] . identifier[send_remote_shutdown_command] ()
keyword[except] identifier[TypeError] :
keyword[pass]
keyword[try] :
keyword[if] identifier[self] . identifier[process] :
keyword[for] identifier[stream] keyword[in] [ identifier[self] . identifier[process] . identifier[stdin] ,
identifier[self] . identifier[process] . identifier[stdout] ,
identifier[self] . identifier[process] . identifier[stderr] ]:
keyword[try] :
identifier[stream] . identifier[close] ()
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[self] . identifier[process] . identifier[terminate] ()
identifier[self] . identifier[process] . identifier[wait] ()
identifier[self] . identifier[process] . identifier[kill] ()
identifier[self] . identifier[process] = keyword[None]
keyword[except] identifier[OSError] :
keyword[pass] | def stop(self):
"""
Stops the service.
"""
if self.log_file != PIPE and (not (self.log_file == DEVNULL and _HAS_NATIVE_DEVNULL)):
try:
self.log_file.close() # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if self.process is None:
return # depends on [control=['if'], data=[]]
try:
self.send_remote_shutdown_command() # depends on [control=['try'], data=[]]
except TypeError:
pass # depends on [control=['except'], data=[]]
try:
if self.process:
for stream in [self.process.stdin, self.process.stdout, self.process.stderr]:
try:
stream.close() # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['stream']]
self.process.terminate()
self.process.wait()
self.process.kill()
self.process = None # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]] |
def json_dumps(self, obj):
"""Serializer for consistency"""
return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': ')) | def function[json_dumps, parameter[self, obj]]:
constant[Serializer for consistency]
return[call[name[json].dumps, parameter[name[obj]]]] | keyword[def] identifier[json_dumps] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[return] identifier[json] . identifier[dumps] ( identifier[obj] , identifier[sort_keys] = keyword[True] , identifier[indent] = literal[int] , identifier[separators] =( literal[string] , literal[string] )) | def json_dumps(self, obj):
"""Serializer for consistency"""
return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': ')) |
def load_data_crt_files(self, data_dict):
"""Load sEIT data from .ctr files (volt.dat files readable by CRTomo,
produced by CRMod)
Parameters
----------
data_dict : dict
Data files that are imported. See example down below
Examples
--------
>>> import glob
data_files = {}
data_files['frequencies'] = 'data/frequencies.dat'
files = sorted(glob.glob('data/volt_*.crt'))
data_files['crt'] = files
"""
if isinstance(data_dict, str):
raise Exception('Parameter must be a dict!')
frequency_data = data_dict['frequencies']
if isinstance(frequency_data, str):
frequencies = np.loadtxt(data_dict['frequencies'])
else:
# if this is not a string, assume it to be the data
frequencies = frequency_data
if frequencies.size != len(data_dict['crt']):
raise Exception(
'number of frequencies does not match the number of data files'
)
self._init_frequencies(frequencies)
for frequency, filename in zip(frequencies, data_dict['crt']):
subdata = np.atleast_2d(np.loadtxt(filename, skiprows=1))
if subdata.size == 0:
continue
# extract configurations
A = (subdata[:, 0] / 1e4).astype(int)
B = (subdata[:, 0] % 1e4).astype(int)
M = (subdata[:, 1] / 1e4).astype(int)
N = (subdata[:, 1] % 1e4).astype(int)
ABMN = np.vstack((A, B, M, N)).T
magnitudes = subdata[:, 2]
phases = subdata[:, 3]
self.tds[frequency].configs.add_to_configs(ABMN)
self.tds[frequency].register_measurements(magnitudes, phases) | def function[load_data_crt_files, parameter[self, data_dict]]:
constant[Load sEIT data from .ctr files (volt.dat files readable by CRTomo,
produced by CRMod)
Parameters
----------
data_dict : dict
Data files that are imported. See example down below
Examples
--------
>>> import glob
data_files = {}
data_files['frequencies'] = 'data/frequencies.dat'
files = sorted(glob.glob('data/volt_*.crt'))
data_files['crt'] = files
]
if call[name[isinstance], parameter[name[data_dict], name[str]]] begin[:]
<ast.Raise object at 0x7da1b24ae170>
variable[frequency_data] assign[=] call[name[data_dict]][constant[frequencies]]
if call[name[isinstance], parameter[name[frequency_data], name[str]]] begin[:]
variable[frequencies] assign[=] call[name[np].loadtxt, parameter[call[name[data_dict]][constant[frequencies]]]]
if compare[name[frequencies].size not_equal[!=] call[name[len], parameter[call[name[data_dict]][constant[crt]]]]] begin[:]
<ast.Raise object at 0x7da1b24af9d0>
call[name[self]._init_frequencies, parameter[name[frequencies]]]
for taget[tuple[[<ast.Name object at 0x7da1b24ae080>, <ast.Name object at 0x7da1b24ac610>]]] in starred[call[name[zip], parameter[name[frequencies], call[name[data_dict]][constant[crt]]]]] begin[:]
variable[subdata] assign[=] call[name[np].atleast_2d, parameter[call[name[np].loadtxt, parameter[name[filename]]]]]
if compare[name[subdata].size equal[==] constant[0]] begin[:]
continue
variable[A] assign[=] call[binary_operation[call[name[subdata]][tuple[[<ast.Slice object at 0x7da1b2352110>, <ast.Constant object at 0x7da1b2352200>]]] / constant[10000.0]].astype, parameter[name[int]]]
variable[B] assign[=] call[binary_operation[call[name[subdata]][tuple[[<ast.Slice object at 0x7da1b2352380>, <ast.Constant object at 0x7da1b2351b10>]]] <ast.Mod object at 0x7da2590d6920> constant[10000.0]].astype, parameter[name[int]]]
variable[M] assign[=] call[binary_operation[call[name[subdata]][tuple[[<ast.Slice object at 0x7da1b2353610>, <ast.Constant object at 0x7da1b2350a00>]]] / constant[10000.0]].astype, parameter[name[int]]]
variable[N] assign[=] call[binary_operation[call[name[subdata]][tuple[[<ast.Slice object at 0x7da1b2352f20>, <ast.Constant object at 0x7da1b23534c0>]]] <ast.Mod object at 0x7da2590d6920> constant[10000.0]].astype, parameter[name[int]]]
variable[ABMN] assign[=] call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da1b23507c0>, <ast.Name object at 0x7da1b23501c0>, <ast.Name object at 0x7da1b2353fd0>, <ast.Name object at 0x7da1b2352dd0>]]]].T
variable[magnitudes] assign[=] call[name[subdata]][tuple[[<ast.Slice object at 0x7da1b2350820>, <ast.Constant object at 0x7da1b2353d90>]]]
variable[phases] assign[=] call[name[subdata]][tuple[[<ast.Slice object at 0x7da1b23500d0>, <ast.Constant object at 0x7da1b23528f0>]]]
call[call[name[self].tds][name[frequency]].configs.add_to_configs, parameter[name[ABMN]]]
call[call[name[self].tds][name[frequency]].register_measurements, parameter[name[magnitudes], name[phases]]] | keyword[def] identifier[load_data_crt_files] ( identifier[self] , identifier[data_dict] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[data_dict] , identifier[str] ):
keyword[raise] identifier[Exception] ( literal[string] )
identifier[frequency_data] = identifier[data_dict] [ literal[string] ]
keyword[if] identifier[isinstance] ( identifier[frequency_data] , identifier[str] ):
identifier[frequencies] = identifier[np] . identifier[loadtxt] ( identifier[data_dict] [ literal[string] ])
keyword[else] :
identifier[frequencies] = identifier[frequency_data]
keyword[if] identifier[frequencies] . identifier[size] != identifier[len] ( identifier[data_dict] [ literal[string] ]):
keyword[raise] identifier[Exception] (
literal[string]
)
identifier[self] . identifier[_init_frequencies] ( identifier[frequencies] )
keyword[for] identifier[frequency] , identifier[filename] keyword[in] identifier[zip] ( identifier[frequencies] , identifier[data_dict] [ literal[string] ]):
identifier[subdata] = identifier[np] . identifier[atleast_2d] ( identifier[np] . identifier[loadtxt] ( identifier[filename] , identifier[skiprows] = literal[int] ))
keyword[if] identifier[subdata] . identifier[size] == literal[int] :
keyword[continue]
identifier[A] =( identifier[subdata] [:, literal[int] ]/ literal[int] ). identifier[astype] ( identifier[int] )
identifier[B] =( identifier[subdata] [:, literal[int] ]% literal[int] ). identifier[astype] ( identifier[int] )
identifier[M] =( identifier[subdata] [:, literal[int] ]/ literal[int] ). identifier[astype] ( identifier[int] )
identifier[N] =( identifier[subdata] [:, literal[int] ]% literal[int] ). identifier[astype] ( identifier[int] )
identifier[ABMN] = identifier[np] . identifier[vstack] (( identifier[A] , identifier[B] , identifier[M] , identifier[N] )). identifier[T]
identifier[magnitudes] = identifier[subdata] [:, literal[int] ]
identifier[phases] = identifier[subdata] [:, literal[int] ]
identifier[self] . identifier[tds] [ identifier[frequency] ]. identifier[configs] . identifier[add_to_configs] ( identifier[ABMN] )
identifier[self] . identifier[tds] [ identifier[frequency] ]. identifier[register_measurements] ( identifier[magnitudes] , identifier[phases] ) | def load_data_crt_files(self, data_dict):
"""Load sEIT data from .ctr files (volt.dat files readable by CRTomo,
produced by CRMod)
Parameters
----------
data_dict : dict
Data files that are imported. See example down below
Examples
--------
>>> import glob
data_files = {}
data_files['frequencies'] = 'data/frequencies.dat'
files = sorted(glob.glob('data/volt_*.crt'))
data_files['crt'] = files
"""
if isinstance(data_dict, str):
raise Exception('Parameter must be a dict!') # depends on [control=['if'], data=[]]
frequency_data = data_dict['frequencies']
if isinstance(frequency_data, str):
frequencies = np.loadtxt(data_dict['frequencies']) # depends on [control=['if'], data=[]]
else:
# if this is not a string, assume it to be the data
frequencies = frequency_data
if frequencies.size != len(data_dict['crt']):
raise Exception('number of frequencies does not match the number of data files') # depends on [control=['if'], data=[]]
self._init_frequencies(frequencies)
for (frequency, filename) in zip(frequencies, data_dict['crt']):
subdata = np.atleast_2d(np.loadtxt(filename, skiprows=1))
if subdata.size == 0:
continue # depends on [control=['if'], data=[]]
# extract configurations
A = (subdata[:, 0] / 10000.0).astype(int)
B = (subdata[:, 0] % 10000.0).astype(int)
M = (subdata[:, 1] / 10000.0).astype(int)
N = (subdata[:, 1] % 10000.0).astype(int)
ABMN = np.vstack((A, B, M, N)).T
magnitudes = subdata[:, 2]
phases = subdata[:, 3]
self.tds[frequency].configs.add_to_configs(ABMN)
self.tds[frequency].register_measurements(magnitudes, phases) # depends on [control=['for'], data=[]] |
def saccade_detection(samplemat, Hz=200, threshold=30,
acc_thresh=2000, min_duration=21, min_movement=.35,
ignore_blinks=False):
'''
Detect saccades in a stream of gaze location samples.
Coordinates in samplemat are assumed to be in degrees.
Saccades are detect by a velocity/acceleration threshold approach.
A saccade starts when a) the velocity is above threshold, b) the
acceleration is above acc_thresh at least once during the interval
defined by the velocity threshold, c) the saccade lasts at least min_duration
ms and d) the distance between saccade start and enpoint is at least
min_movement degrees.
'''
if ignore_blinks:
velocity, acceleration = get_velocity(samplemat, float(Hz), blinks=samplemat.blinks)
else:
velocity, acceleration = get_velocity(samplemat, float(Hz))
saccades = (velocity > threshold)
#print velocity[samplemat.blinks[1:]]
#print saccades[samplemat.blinks[1:]]
borders = np.where(np.diff(saccades.astype(int)))[0] + 1
if velocity[1] > threshold:
borders = np.hstack(([0], borders))
saccade = 0 * np.ones(samplemat.x.shape)
# Only count saccades when acceleration also surpasses threshold
for i, (start, end) in enumerate(zip(borders[0::2], borders[1::2])):
if sum(acceleration[start:end] > acc_thresh) >= 1:
saccade[start:end] = 1
borders = np.where(np.diff(saccade.astype(int)))[0] + 1
if saccade[0] == 0:
borders = np.hstack(([0], borders))
for i, (start, end) in enumerate(zip(borders[0::2], borders[1::2])):
if (1000*(end - start) / float(Hz)) < (min_duration):
saccade[start:end] = 1
# Delete saccade between fixations that are too close together.
dists_ok = False
while not dists_ok:
dists_ok = True
num_merges = 0
for i, (lfixstart, lfixend, start, end, nfixstart, nfixend) in enumerate(zip(
borders[0::2], borders[1::2],
borders[1::2], borders[2::2],
borders[2::2], borders[3::2])):
lastx = samplemat.x[lfixstart:lfixend].mean()
lasty = samplemat.y[lfixstart:lfixend].mean()
nextx = samplemat.x[nfixstart:nfixend].mean()
nexty = samplemat.y[nfixstart:nfixend].mean()
if (1000*(lfixend - lfixstart) / float(Hz)) < (min_duration):
saccade[lfixstart:lfixend] = 1
continue
distance = ((nextx - lastx) ** 2 + (nexty - lasty) ** 2) ** .5
if distance < min_movement:
num_merges += 1
dists_ok = False
saccade[start:end] = 0
borders = np.where(np.diff(saccade.astype(int)))[0] + 1
if saccade[0] == 0:
borders = np.hstack(([0], borders))
return saccade.astype(bool) | def function[saccade_detection, parameter[samplemat, Hz, threshold, acc_thresh, min_duration, min_movement, ignore_blinks]]:
constant[
Detect saccades in a stream of gaze location samples.
Coordinates in samplemat are assumed to be in degrees.
Saccades are detect by a velocity/acceleration threshold approach.
A saccade starts when a) the velocity is above threshold, b) the
acceleration is above acc_thresh at least once during the interval
defined by the velocity threshold, c) the saccade lasts at least min_duration
ms and d) the distance between saccade start and enpoint is at least
min_movement degrees.
]
if name[ignore_blinks] begin[:]
<ast.Tuple object at 0x7da207f013f0> assign[=] call[name[get_velocity], parameter[name[samplemat], call[name[float], parameter[name[Hz]]]]]
variable[saccades] assign[=] compare[name[velocity] greater[>] name[threshold]]
variable[borders] assign[=] binary_operation[call[call[name[np].where, parameter[call[name[np].diff, parameter[call[name[saccades].astype, parameter[name[int]]]]]]]][constant[0]] + constant[1]]
if compare[call[name[velocity]][constant[1]] greater[>] name[threshold]] begin[:]
variable[borders] assign[=] call[name[np].hstack, parameter[tuple[[<ast.List object at 0x7da207f03a00>, <ast.Name object at 0x7da207f02380>]]]]
variable[saccade] assign[=] binary_operation[constant[0] * call[name[np].ones, parameter[name[samplemat].x.shape]]]
for taget[tuple[[<ast.Name object at 0x7da207f00670>, <ast.Tuple object at 0x7da207f03f40>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[call[name[borders]][<ast.Slice object at 0x7da207f02ec0>], call[name[borders]][<ast.Slice object at 0x7da207f03b20>]]]]]] begin[:]
if compare[call[name[sum], parameter[compare[call[name[acceleration]][<ast.Slice object at 0x7da207f03a60>] greater[>] name[acc_thresh]]]] greater_or_equal[>=] constant[1]] begin[:]
call[name[saccade]][<ast.Slice object at 0x7da207f01570>] assign[=] constant[1]
variable[borders] assign[=] binary_operation[call[call[name[np].where, parameter[call[name[np].diff, parameter[call[name[saccade].astype, parameter[name[int]]]]]]]][constant[0]] + constant[1]]
if compare[call[name[saccade]][constant[0]] equal[==] constant[0]] begin[:]
variable[borders] assign[=] call[name[np].hstack, parameter[tuple[[<ast.List object at 0x7da207f02d10>, <ast.Name object at 0x7da207f03700>]]]]
for taget[tuple[[<ast.Name object at 0x7da207f029b0>, <ast.Tuple object at 0x7da207f03610>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[call[name[borders]][<ast.Slice object at 0x7da207f00f70>], call[name[borders]][<ast.Slice object at 0x7da207f02b90>]]]]]] begin[:]
if compare[binary_operation[binary_operation[constant[1000] * binary_operation[name[end] - name[start]]] / call[name[float], parameter[name[Hz]]]] less[<] name[min_duration]] begin[:]
call[name[saccade]][<ast.Slice object at 0x7da207f02f20>] assign[=] constant[1]
variable[dists_ok] assign[=] constant[False]
while <ast.UnaryOp object at 0x7da207f01f30> begin[:]
variable[dists_ok] assign[=] constant[True]
variable[num_merges] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da207f023b0>, <ast.Tuple object at 0x7da207f01660>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[call[name[borders]][<ast.Slice object at 0x7da207f02110>], call[name[borders]][<ast.Slice object at 0x7da207f00c40>], call[name[borders]][<ast.Slice object at 0x7da207f02950>], call[name[borders]][<ast.Slice object at 0x7da207f01030>], call[name[borders]][<ast.Slice object at 0x7da207f00e20>], call[name[borders]][<ast.Slice object at 0x7da207f022c0>]]]]]] begin[:]
variable[lastx] assign[=] call[call[name[samplemat].x][<ast.Slice object at 0x7da207f000a0>].mean, parameter[]]
variable[lasty] assign[=] call[call[name[samplemat].y][<ast.Slice object at 0x7da207f00640>].mean, parameter[]]
variable[nextx] assign[=] call[call[name[samplemat].x][<ast.Slice object at 0x7da207f008b0>].mean, parameter[]]
variable[nexty] assign[=] call[call[name[samplemat].y][<ast.Slice object at 0x7da207f01120>].mean, parameter[]]
if compare[binary_operation[binary_operation[constant[1000] * binary_operation[name[lfixend] - name[lfixstart]]] / call[name[float], parameter[name[Hz]]]] less[<] name[min_duration]] begin[:]
call[name[saccade]][<ast.Slice object at 0x7da207f03d00>] assign[=] constant[1]
continue
variable[distance] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[nextx] - name[lastx]] ** constant[2]] + binary_operation[binary_operation[name[nexty] - name[lasty]] ** constant[2]]] ** constant[0.5]]
if compare[name[distance] less[<] name[min_movement]] begin[:]
<ast.AugAssign object at 0x7da20c991d20>
variable[dists_ok] assign[=] constant[False]
call[name[saccade]][<ast.Slice object at 0x7da20c991ba0>] assign[=] constant[0]
variable[borders] assign[=] binary_operation[call[call[name[np].where, parameter[call[name[np].diff, parameter[call[name[saccade].astype, parameter[name[int]]]]]]]][constant[0]] + constant[1]]
if compare[call[name[saccade]][constant[0]] equal[==] constant[0]] begin[:]
variable[borders] assign[=] call[name[np].hstack, parameter[tuple[[<ast.List object at 0x7da20c992bc0>, <ast.Name object at 0x7da20c990a30>]]]]
return[call[name[saccade].astype, parameter[name[bool]]]] | keyword[def] identifier[saccade_detection] ( identifier[samplemat] , identifier[Hz] = literal[int] , identifier[threshold] = literal[int] ,
identifier[acc_thresh] = literal[int] , identifier[min_duration] = literal[int] , identifier[min_movement] = literal[int] ,
identifier[ignore_blinks] = keyword[False] ):
literal[string]
keyword[if] identifier[ignore_blinks] :
identifier[velocity] , identifier[acceleration] = identifier[get_velocity] ( identifier[samplemat] , identifier[float] ( identifier[Hz] ), identifier[blinks] = identifier[samplemat] . identifier[blinks] )
keyword[else] :
identifier[velocity] , identifier[acceleration] = identifier[get_velocity] ( identifier[samplemat] , identifier[float] ( identifier[Hz] ))
identifier[saccades] =( identifier[velocity] > identifier[threshold] )
identifier[borders] = identifier[np] . identifier[where] ( identifier[np] . identifier[diff] ( identifier[saccades] . identifier[astype] ( identifier[int] )))[ literal[int] ]+ literal[int]
keyword[if] identifier[velocity] [ literal[int] ]> identifier[threshold] :
identifier[borders] = identifier[np] . identifier[hstack] (([ literal[int] ], identifier[borders] ))
identifier[saccade] = literal[int] * identifier[np] . identifier[ones] ( identifier[samplemat] . identifier[x] . identifier[shape] )
keyword[for] identifier[i] ,( identifier[start] , identifier[end] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[borders] [ literal[int] :: literal[int] ], identifier[borders] [ literal[int] :: literal[int] ])):
keyword[if] identifier[sum] ( identifier[acceleration] [ identifier[start] : identifier[end] ]> identifier[acc_thresh] )>= literal[int] :
identifier[saccade] [ identifier[start] : identifier[end] ]= literal[int]
identifier[borders] = identifier[np] . identifier[where] ( identifier[np] . identifier[diff] ( identifier[saccade] . identifier[astype] ( identifier[int] )))[ literal[int] ]+ literal[int]
keyword[if] identifier[saccade] [ literal[int] ]== literal[int] :
identifier[borders] = identifier[np] . identifier[hstack] (([ literal[int] ], identifier[borders] ))
keyword[for] identifier[i] ,( identifier[start] , identifier[end] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[borders] [ literal[int] :: literal[int] ], identifier[borders] [ literal[int] :: literal[int] ])):
keyword[if] ( literal[int] *( identifier[end] - identifier[start] )/ identifier[float] ( identifier[Hz] ))<( identifier[min_duration] ):
identifier[saccade] [ identifier[start] : identifier[end] ]= literal[int]
identifier[dists_ok] = keyword[False]
keyword[while] keyword[not] identifier[dists_ok] :
identifier[dists_ok] = keyword[True]
identifier[num_merges] = literal[int]
keyword[for] identifier[i] ,( identifier[lfixstart] , identifier[lfixend] , identifier[start] , identifier[end] , identifier[nfixstart] , identifier[nfixend] ) keyword[in] identifier[enumerate] ( identifier[zip] (
identifier[borders] [ literal[int] :: literal[int] ], identifier[borders] [ literal[int] :: literal[int] ],
identifier[borders] [ literal[int] :: literal[int] ], identifier[borders] [ literal[int] :: literal[int] ],
identifier[borders] [ literal[int] :: literal[int] ], identifier[borders] [ literal[int] :: literal[int] ])):
identifier[lastx] = identifier[samplemat] . identifier[x] [ identifier[lfixstart] : identifier[lfixend] ]. identifier[mean] ()
identifier[lasty] = identifier[samplemat] . identifier[y] [ identifier[lfixstart] : identifier[lfixend] ]. identifier[mean] ()
identifier[nextx] = identifier[samplemat] . identifier[x] [ identifier[nfixstart] : identifier[nfixend] ]. identifier[mean] ()
identifier[nexty] = identifier[samplemat] . identifier[y] [ identifier[nfixstart] : identifier[nfixend] ]. identifier[mean] ()
keyword[if] ( literal[int] *( identifier[lfixend] - identifier[lfixstart] )/ identifier[float] ( identifier[Hz] ))<( identifier[min_duration] ):
identifier[saccade] [ identifier[lfixstart] : identifier[lfixend] ]= literal[int]
keyword[continue]
identifier[distance] =(( identifier[nextx] - identifier[lastx] )** literal[int] +( identifier[nexty] - identifier[lasty] )** literal[int] )** literal[int]
keyword[if] identifier[distance] < identifier[min_movement] :
identifier[num_merges] += literal[int]
identifier[dists_ok] = keyword[False]
identifier[saccade] [ identifier[start] : identifier[end] ]= literal[int]
identifier[borders] = identifier[np] . identifier[where] ( identifier[np] . identifier[diff] ( identifier[saccade] . identifier[astype] ( identifier[int] )))[ literal[int] ]+ literal[int]
keyword[if] identifier[saccade] [ literal[int] ]== literal[int] :
identifier[borders] = identifier[np] . identifier[hstack] (([ literal[int] ], identifier[borders] ))
keyword[return] identifier[saccade] . identifier[astype] ( identifier[bool] ) | def saccade_detection(samplemat, Hz=200, threshold=30, acc_thresh=2000, min_duration=21, min_movement=0.35, ignore_blinks=False):
"""
Detect saccades in a stream of gaze location samples.
Coordinates in samplemat are assumed to be in degrees.
Saccades are detect by a velocity/acceleration threshold approach.
A saccade starts when a) the velocity is above threshold, b) the
acceleration is above acc_thresh at least once during the interval
defined by the velocity threshold, c) the saccade lasts at least min_duration
ms and d) the distance between saccade start and enpoint is at least
min_movement degrees.
"""
if ignore_blinks:
(velocity, acceleration) = get_velocity(samplemat, float(Hz), blinks=samplemat.blinks) # depends on [control=['if'], data=[]]
else:
(velocity, acceleration) = get_velocity(samplemat, float(Hz))
saccades = velocity > threshold
#print velocity[samplemat.blinks[1:]]
#print saccades[samplemat.blinks[1:]]
borders = np.where(np.diff(saccades.astype(int)))[0] + 1
if velocity[1] > threshold:
borders = np.hstack(([0], borders)) # depends on [control=['if'], data=[]]
saccade = 0 * np.ones(samplemat.x.shape)
# Only count saccades when acceleration also surpasses threshold
for (i, (start, end)) in enumerate(zip(borders[0::2], borders[1::2])):
if sum(acceleration[start:end] > acc_thresh) >= 1:
saccade[start:end] = 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
borders = np.where(np.diff(saccade.astype(int)))[0] + 1
if saccade[0] == 0:
borders = np.hstack(([0], borders)) # depends on [control=['if'], data=[]]
for (i, (start, end)) in enumerate(zip(borders[0::2], borders[1::2])):
if 1000 * (end - start) / float(Hz) < min_duration:
saccade[start:end] = 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Delete saccade between fixations that are too close together.
dists_ok = False
while not dists_ok:
dists_ok = True
num_merges = 0
for (i, (lfixstart, lfixend, start, end, nfixstart, nfixend)) in enumerate(zip(borders[0::2], borders[1::2], borders[1::2], borders[2::2], borders[2::2], borders[3::2])):
lastx = samplemat.x[lfixstart:lfixend].mean()
lasty = samplemat.y[lfixstart:lfixend].mean()
nextx = samplemat.x[nfixstart:nfixend].mean()
nexty = samplemat.y[nfixstart:nfixend].mean()
if 1000 * (lfixend - lfixstart) / float(Hz) < min_duration:
saccade[lfixstart:lfixend] = 1
continue # depends on [control=['if'], data=[]]
distance = ((nextx - lastx) ** 2 + (nexty - lasty) ** 2) ** 0.5
if distance < min_movement:
num_merges += 1
dists_ok = False
saccade[start:end] = 0 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
borders = np.where(np.diff(saccade.astype(int)))[0] + 1
if saccade[0] == 0:
borders = np.hstack(([0], borders)) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return saccade.astype(bool) |
def make_fitness(function, greater_is_better):
"""Make a fitness measure, a metric scoring the quality of a program's fit.
This factory function creates a fitness measure object which measures the
quality of a program's fit and thus its likelihood to undergo genetic
operations into the next generation. The resulting object is able to be
called with NumPy vectorized arguments and return a resulting floating
point score quantifying the quality of the program's representation of the
true relationship.
Parameters
----------
function : callable
A function with signature function(y, y_pred, sample_weight) that
returns a floating point number. Where `y` is the input target y
vector, `y_pred` is the predicted values from the genetic program, and
sample_weight is the sample_weight vector.
greater_is_better : bool
Whether a higher value from `function` indicates a better fit. In
general this would be False for metrics indicating the magnitude of
the error, and True for metrics indicating the quality of fit.
"""
if not isinstance(greater_is_better, bool):
raise ValueError('greater_is_better must be bool, got %s'
% type(greater_is_better))
if function.__code__.co_argcount != 3:
raise ValueError('function requires 3 arguments (y, y_pred, w),'
' got %d.' % function.__code__.co_argcount)
if not isinstance(function(np.array([1, 1]),
np.array([2, 2]),
np.array([1, 1])), numbers.Number):
raise ValueError('function must return a numeric.')
return _Fitness(function, greater_is_better) | def function[make_fitness, parameter[function, greater_is_better]]:
constant[Make a fitness measure, a metric scoring the quality of a program's fit.
This factory function creates a fitness measure object which measures the
quality of a program's fit and thus its likelihood to undergo genetic
operations into the next generation. The resulting object is able to be
called with NumPy vectorized arguments and return a resulting floating
point score quantifying the quality of the program's representation of the
true relationship.
Parameters
----------
function : callable
A function with signature function(y, y_pred, sample_weight) that
returns a floating point number. Where `y` is the input target y
vector, `y_pred` is the predicted values from the genetic program, and
sample_weight is the sample_weight vector.
greater_is_better : bool
Whether a higher value from `function` indicates a better fit. In
general this would be False for metrics indicating the magnitude of
the error, and True for metrics indicating the quality of fit.
]
if <ast.UnaryOp object at 0x7da1b1ddfd00> begin[:]
<ast.Raise object at 0x7da1b1ddfc10>
if compare[name[function].__code__.co_argcount not_equal[!=] constant[3]] begin[:]
<ast.Raise object at 0x7da1b1d777f0>
if <ast.UnaryOp object at 0x7da1b1d77640> begin[:]
<ast.Raise object at 0x7da1b1d74310>
return[call[name[_Fitness], parameter[name[function], name[greater_is_better]]]] | keyword[def] identifier[make_fitness] ( identifier[function] , identifier[greater_is_better] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[greater_is_better] , identifier[bool] ):
keyword[raise] identifier[ValueError] ( literal[string]
% identifier[type] ( identifier[greater_is_better] ))
keyword[if] identifier[function] . identifier[__code__] . identifier[co_argcount] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[function] . identifier[__code__] . identifier[co_argcount] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[function] ( identifier[np] . identifier[array] ([ literal[int] , literal[int] ]),
identifier[np] . identifier[array] ([ literal[int] , literal[int] ]),
identifier[np] . identifier[array] ([ literal[int] , literal[int] ])), identifier[numbers] . identifier[Number] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[_Fitness] ( identifier[function] , identifier[greater_is_better] ) | def make_fitness(function, greater_is_better):
"""Make a fitness measure, a metric scoring the quality of a program's fit.
This factory function creates a fitness measure object which measures the
quality of a program's fit and thus its likelihood to undergo genetic
operations into the next generation. The resulting object is able to be
called with NumPy vectorized arguments and return a resulting floating
point score quantifying the quality of the program's representation of the
true relationship.
Parameters
----------
function : callable
A function with signature function(y, y_pred, sample_weight) that
returns a floating point number. Where `y` is the input target y
vector, `y_pred` is the predicted values from the genetic program, and
sample_weight is the sample_weight vector.
greater_is_better : bool
Whether a higher value from `function` indicates a better fit. In
general this would be False for metrics indicating the magnitude of
the error, and True for metrics indicating the quality of fit.
"""
if not isinstance(greater_is_better, bool):
raise ValueError('greater_is_better must be bool, got %s' % type(greater_is_better)) # depends on [control=['if'], data=[]]
if function.__code__.co_argcount != 3:
raise ValueError('function requires 3 arguments (y, y_pred, w), got %d.' % function.__code__.co_argcount) # depends on [control=['if'], data=[]]
if not isinstance(function(np.array([1, 1]), np.array([2, 2]), np.array([1, 1])), numbers.Number):
raise ValueError('function must return a numeric.') # depends on [control=['if'], data=[]]
return _Fitness(function, greater_is_better) |
def set_result(self, result):
"""Complete all tasks. """
for future in self.traverse():
# All cancelled futures should have callbacks to removed itself
# from this linked list. However, these callbacks are scheduled in
# an event loop, so we could still find them in our list.
future.set_result(result)
if not self.done():
super().set_result(result) | def function[set_result, parameter[self, result]]:
constant[Complete all tasks. ]
for taget[name[future]] in starred[call[name[self].traverse, parameter[]]] begin[:]
call[name[future].set_result, parameter[name[result]]]
if <ast.UnaryOp object at 0x7da18eb57340> begin[:]
call[call[name[super], parameter[]].set_result, parameter[name[result]]] | keyword[def] identifier[set_result] ( identifier[self] , identifier[result] ):
literal[string]
keyword[for] identifier[future] keyword[in] identifier[self] . identifier[traverse] ():
identifier[future] . identifier[set_result] ( identifier[result] )
keyword[if] keyword[not] identifier[self] . identifier[done] ():
identifier[super] (). identifier[set_result] ( identifier[result] ) | def set_result(self, result):
"""Complete all tasks. """
for future in self.traverse():
# All cancelled futures should have callbacks to removed itself
# from this linked list. However, these callbacks are scheduled in
# an event loop, so we could still find them in our list.
future.set_result(result) # depends on [control=['for'], data=['future']]
if not self.done():
super().set_result(result) # depends on [control=['if'], data=[]] |
def status(svc_name=''):
'''
Display a process status from monit
CLI Example:
.. code-block:: bash
salt '*' monit.status
salt '*' monit.status <service name>
'''
cmd = 'monit status'
res = __salt__['cmd.run'](cmd)
prostr = 'Process'+' '*28
s = res.replace('Process', prostr).replace("'", '').split('\n\n')
entries = {}
for process in s[1:-1]:
pro = process.splitlines()
tmp = {}
for items in pro:
key = items[:36].strip()
tmp[key] = items[35:].strip()
entries[pro[0].split()[1]] = tmp
if svc_name == '':
ret = entries
else:
ret = entries.get(svc_name, 'No such service')
return ret | def function[status, parameter[svc_name]]:
constant[
Display a process status from monit
CLI Example:
.. code-block:: bash
salt '*' monit.status
salt '*' monit.status <service name>
]
variable[cmd] assign[=] constant[monit status]
variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]]
variable[prostr] assign[=] binary_operation[constant[Process] + binary_operation[constant[ ] * constant[28]]]
variable[s] assign[=] call[call[call[name[res].replace, parameter[constant[Process], name[prostr]]].replace, parameter[constant['], constant[]]].split, parameter[constant[
]]]
variable[entries] assign[=] dictionary[[], []]
for taget[name[process]] in starred[call[name[s]][<ast.Slice object at 0x7da1b1cde560>]] begin[:]
variable[pro] assign[=] call[name[process].splitlines, parameter[]]
variable[tmp] assign[=] dictionary[[], []]
for taget[name[items]] in starred[name[pro]] begin[:]
variable[key] assign[=] call[call[name[items]][<ast.Slice object at 0x7da1b1cdc070>].strip, parameter[]]
call[name[tmp]][name[key]] assign[=] call[call[name[items]][<ast.Slice object at 0x7da1b1cdc910>].strip, parameter[]]
call[name[entries]][call[call[call[name[pro]][constant[0]].split, parameter[]]][constant[1]]] assign[=] name[tmp]
if compare[name[svc_name] equal[==] constant[]] begin[:]
variable[ret] assign[=] name[entries]
return[name[ret]] | keyword[def] identifier[status] ( identifier[svc_name] = literal[string] ):
literal[string]
identifier[cmd] = literal[string]
identifier[res] = identifier[__salt__] [ literal[string] ]( identifier[cmd] )
identifier[prostr] = literal[string] + literal[string] * literal[int]
identifier[s] = identifier[res] . identifier[replace] ( literal[string] , identifier[prostr] ). identifier[replace] ( literal[string] , literal[string] ). identifier[split] ( literal[string] )
identifier[entries] ={}
keyword[for] identifier[process] keyword[in] identifier[s] [ literal[int] :- literal[int] ]:
identifier[pro] = identifier[process] . identifier[splitlines] ()
identifier[tmp] ={}
keyword[for] identifier[items] keyword[in] identifier[pro] :
identifier[key] = identifier[items] [: literal[int] ]. identifier[strip] ()
identifier[tmp] [ identifier[key] ]= identifier[items] [ literal[int] :]. identifier[strip] ()
identifier[entries] [ identifier[pro] [ literal[int] ]. identifier[split] ()[ literal[int] ]]= identifier[tmp]
keyword[if] identifier[svc_name] == literal[string] :
identifier[ret] = identifier[entries]
keyword[else] :
identifier[ret] = identifier[entries] . identifier[get] ( identifier[svc_name] , literal[string] )
keyword[return] identifier[ret] | def status(svc_name=''):
"""
Display a process status from monit
CLI Example:
.. code-block:: bash
salt '*' monit.status
salt '*' monit.status <service name>
"""
cmd = 'monit status'
res = __salt__['cmd.run'](cmd)
prostr = 'Process' + ' ' * 28
s = res.replace('Process', prostr).replace("'", '').split('\n\n')
entries = {}
for process in s[1:-1]:
pro = process.splitlines()
tmp = {}
for items in pro:
key = items[:36].strip()
tmp[key] = items[35:].strip() # depends on [control=['for'], data=['items']]
entries[pro[0].split()[1]] = tmp # depends on [control=['for'], data=['process']]
if svc_name == '':
ret = entries # depends on [control=['if'], data=[]]
else:
ret = entries.get(svc_name, 'No such service')
return ret |
def setConfiguration(self, configuration):
r"""Set the active configuration of a device.
Arguments:
configuration: a configuration value or a Configuration object.
"""
if isinstance(configuration, Configuration):
configuration = configuration.value
self.dev.set_configuration(configuration) | def function[setConfiguration, parameter[self, configuration]]:
constant[Set the active configuration of a device.
Arguments:
configuration: a configuration value or a Configuration object.
]
if call[name[isinstance], parameter[name[configuration], name[Configuration]]] begin[:]
variable[configuration] assign[=] name[configuration].value
call[name[self].dev.set_configuration, parameter[name[configuration]]] | keyword[def] identifier[setConfiguration] ( identifier[self] , identifier[configuration] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[configuration] , identifier[Configuration] ):
identifier[configuration] = identifier[configuration] . identifier[value]
identifier[self] . identifier[dev] . identifier[set_configuration] ( identifier[configuration] ) | def setConfiguration(self, configuration):
"""Set the active configuration of a device.
Arguments:
configuration: a configuration value or a Configuration object.
"""
if isinstance(configuration, Configuration):
configuration = configuration.value # depends on [control=['if'], data=[]]
self.dev.set_configuration(configuration) |
def to_json(value, **kwargs):
"""Return a copy of the dictionary
If the values are HasProperties instances, they are serialized
"""
serial_dict = {
key: (
val.serialize(**kwargs) if isinstance(val, HasProperties)
else val
)
for key, val in iteritems(value)
}
return serial_dict | def function[to_json, parameter[value]]:
constant[Return a copy of the dictionary
If the values are HasProperties instances, they are serialized
]
variable[serial_dict] assign[=] <ast.DictComp object at 0x7da1b02e69b0>
return[name[serial_dict]] | keyword[def] identifier[to_json] ( identifier[value] ,** identifier[kwargs] ):
literal[string]
identifier[serial_dict] ={
identifier[key] :(
identifier[val] . identifier[serialize] (** identifier[kwargs] ) keyword[if] identifier[isinstance] ( identifier[val] , identifier[HasProperties] )
keyword[else] identifier[val]
)
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[iteritems] ( identifier[value] )
}
keyword[return] identifier[serial_dict] | def to_json(value, **kwargs):
"""Return a copy of the dictionary
If the values are HasProperties instances, they are serialized
"""
serial_dict = {key: val.serialize(**kwargs) if isinstance(val, HasProperties) else val for (key, val) in iteritems(value)}
return serial_dict |
def _structlog_default_keys_processor(logger_class, log_method, event):
''' Add unique id, type and hostname '''
global HOSTNAME
if 'id' not in event:
event['id'] = '%s_%s' % (
datetime.utcnow().strftime('%Y%m%dT%H%M%S'),
uuid.uuid1().hex
)
if 'type' not in event:
event['type'] = 'log'
event['host'] = HOSTNAME
return event | def function[_structlog_default_keys_processor, parameter[logger_class, log_method, event]]:
constant[ Add unique id, type and hostname ]
<ast.Global object at 0x7da18f720a60>
if compare[constant[id] <ast.NotIn object at 0x7da2590d7190> name[event]] begin[:]
call[name[event]][constant[id]] assign[=] binary_operation[constant[%s_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20c990bb0>, <ast.Attribute object at 0x7da20c992650>]]]
if compare[constant[type] <ast.NotIn object at 0x7da2590d7190> name[event]] begin[:]
call[name[event]][constant[type]] assign[=] constant[log]
call[name[event]][constant[host]] assign[=] name[HOSTNAME]
return[name[event]] | keyword[def] identifier[_structlog_default_keys_processor] ( identifier[logger_class] , identifier[log_method] , identifier[event] ):
literal[string]
keyword[global] identifier[HOSTNAME]
keyword[if] literal[string] keyword[not] keyword[in] identifier[event] :
identifier[event] [ literal[string] ]= literal[string] %(
identifier[datetime] . identifier[utcnow] (). identifier[strftime] ( literal[string] ),
identifier[uuid] . identifier[uuid1] (). identifier[hex]
)
keyword[if] literal[string] keyword[not] keyword[in] identifier[event] :
identifier[event] [ literal[string] ]= literal[string]
identifier[event] [ literal[string] ]= identifier[HOSTNAME]
keyword[return] identifier[event] | def _structlog_default_keys_processor(logger_class, log_method, event):
""" Add unique id, type and hostname """
global HOSTNAME
if 'id' not in event:
event['id'] = '%s_%s' % (datetime.utcnow().strftime('%Y%m%dT%H%M%S'), uuid.uuid1().hex) # depends on [control=['if'], data=['event']]
if 'type' not in event:
event['type'] = 'log' # depends on [control=['if'], data=['event']]
event['host'] = HOSTNAME
return event |
def _check_deprecated_module(self, node, mod_path):
"""check if the module is deprecated"""
for mod_name in self.config.deprecated_modules:
if mod_path == mod_name or mod_path.startswith(mod_name + "."):
self.add_message("deprecated-module", node=node, args=mod_path) | def function[_check_deprecated_module, parameter[self, node, mod_path]]:
constant[check if the module is deprecated]
for taget[name[mod_name]] in starred[name[self].config.deprecated_modules] begin[:]
if <ast.BoolOp object at 0x7da1b033ab30> begin[:]
call[name[self].add_message, parameter[constant[deprecated-module]]] | keyword[def] identifier[_check_deprecated_module] ( identifier[self] , identifier[node] , identifier[mod_path] ):
literal[string]
keyword[for] identifier[mod_name] keyword[in] identifier[self] . identifier[config] . identifier[deprecated_modules] :
keyword[if] identifier[mod_path] == identifier[mod_name] keyword[or] identifier[mod_path] . identifier[startswith] ( identifier[mod_name] + literal[string] ):
identifier[self] . identifier[add_message] ( literal[string] , identifier[node] = identifier[node] , identifier[args] = identifier[mod_path] ) | def _check_deprecated_module(self, node, mod_path):
"""check if the module is deprecated"""
for mod_name in self.config.deprecated_modules:
if mod_path == mod_name or mod_path.startswith(mod_name + '.'):
self.add_message('deprecated-module', node=node, args=mod_path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mod_name']] |
def get(key, default='', delimiter=':', merge=None, omit_opts=False,
omit_pillar=False, omit_master=False, omit_grains=False):
'''
.. versionadded: 0.14.0
Attempt to retrieve the named value from the minion config file, pillar,
grains or the master config. If the named value is not available, return the
value specified by ``default``. If not specified, the default is an empty
string.
Values can also be retrieved from nested dictionaries. Assume the below
data structure:
.. code-block:: python
{'pkg': {'apache': 'httpd'}}
To retrieve the value associated with the ``apache`` key, in the
sub-dictionary corresponding to the ``pkg`` key, the following command can
be used:
.. code-block:: bash
salt myminion config.get pkg:apache
The ``:`` (colon) is used to represent a nested dictionary level.
.. versionchanged:: 2015.5.0
The ``delimiter`` argument was added, to allow delimiters other than
``:`` to be used.
This function traverses these data stores in this order, returning the
first match found:
- Minion configuration
- Minion's grains
- Minion's pillar data
- Master configuration (requires :conf_minion:`pillar_opts` to be set to
``True`` in Minion config file in order to work)
This means that if there is a value that is going to be the same for the
majority of minions, it can be configured in the Master config file, and
then overridden using the grains, pillar, or Minion config file.
Adding config options to the Master or Minion configuration file is easy:
.. code-block:: yaml
my-config-option: value
cafe-menu:
- egg and bacon
- egg sausage and bacon
- egg and spam
- egg bacon and spam
- egg bacon sausage and spam
- spam bacon sausage and spam
- spam egg spam spam bacon and spam
- spam sausage spam spam bacon spam tomato and spam
.. note::
Minion configuration options built into Salt (like those defined
:ref:`here <configuration-salt-minion>`) will *always* be defined in
the Minion configuration and thus *cannot be overridden by grains or
pillar data*. However, additional (user-defined) configuration options
(as in the above example) will not be in the Minion configuration by
default and thus can be overridden using grains/pillar data by leaving
the option out of the minion config file.
**Arguments**
delimiter
.. versionadded:: 2015.5.0
Override the delimiter used to separate nested levels of a data
structure.
merge
.. versionadded:: 2015.5.0
If passed, this parameter will change the behavior of the function so
that, instead of traversing each data store above in order and
returning the first match, the data stores are first merged together
and then searched. The pillar data is merged into the master config
data, then the grains are merged, followed by the Minion config data.
The resulting data structure is then searched for a match. This allows
for configurations to be more flexible.
.. note::
The merging described above does not mean that grain data will end
up in the Minion's pillar data, or pillar data will end up in the
master config data, etc. The data is just combined for the purposes
of searching an amalgam of the different data stores.
The supported merge strategies are as follows:
- **recurse** - If a key exists in both dictionaries, and the new value
is not a dictionary, it is replaced. Otherwise, the sub-dictionaries
are merged together into a single dictionary, recursively on down,
following the same criteria. For example:
.. code-block:: python
>>> dict1 = {'foo': {'bar': 1, 'qux': True},
'hosts': ['a', 'b', 'c'],
'only_x': None}
>>> dict2 = {'foo': {'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_y': None}
>>> merged
{'foo': {'bar': 1, 'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_dict1': None,
'only_dict2': None}
- **overwrite** - If a key exists in the top level of both
dictionaries, the new value completely overwrites the old. For
example:
.. code-block:: python
>>> dict1 = {'foo': {'bar': 1, 'qux': True},
'hosts': ['a', 'b', 'c'],
'only_x': None}
>>> dict2 = {'foo': {'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_y': None}
>>> merged
{'foo': {'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_dict1': None,
'only_dict2': None}
CLI Example:
.. code-block:: bash
salt '*' config.get pkg:apache
salt '*' config.get lxc.container_profile:centos merge=recurse
'''
if merge is None:
if not omit_opts:
ret = salt.utils.data.traverse_dict_and_list(
__opts__,
key,
'_|-',
delimiter=delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__)
if not omit_grains:
ret = salt.utils.data.traverse_dict_and_list(
__grains__,
key,
'_|-',
delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__)
if not omit_pillar:
ret = salt.utils.data.traverse_dict_and_list(
__pillar__,
key,
'_|-',
delimiter=delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__)
if not omit_master:
ret = salt.utils.data.traverse_dict_and_list(
__pillar__.get('master', {}),
key,
'_|-',
delimiter=delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__)
ret = salt.utils.data.traverse_dict_and_list(
DEFAULTS,
key,
'_|-',
delimiter=delimiter)
log.debug("key: %s, ret: %s", key, ret)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__)
else:
if merge not in ('recurse', 'overwrite'):
log.warning('Unsupported merge strategy \'%s\'. Falling back '
'to \'recurse\'.', merge)
merge = 'recurse'
merge_lists = salt.config.master_config('/etc/salt/master').get('pillar_merge_lists')
data = copy.copy(DEFAULTS)
data = salt.utils.dictupdate.merge(data, __pillar__.get('master', {}), strategy=merge, merge_lists=merge_lists)
data = salt.utils.dictupdate.merge(data, __pillar__, strategy=merge, merge_lists=merge_lists)
data = salt.utils.dictupdate.merge(data, __grains__, strategy=merge, merge_lists=merge_lists)
data = salt.utils.dictupdate.merge(data, __opts__, strategy=merge, merge_lists=merge_lists)
ret = salt.utils.data.traverse_dict_and_list(
data,
key,
'_|-',
delimiter=delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__)
return default | def function[get, parameter[key, default, delimiter, merge, omit_opts, omit_pillar, omit_master, omit_grains]]:
constant[
.. versionadded: 0.14.0
Attempt to retrieve the named value from the minion config file, pillar,
grains or the master config. If the named value is not available, return the
value specified by ``default``. If not specified, the default is an empty
string.
Values can also be retrieved from nested dictionaries. Assume the below
data structure:
.. code-block:: python
{'pkg': {'apache': 'httpd'}}
To retrieve the value associated with the ``apache`` key, in the
sub-dictionary corresponding to the ``pkg`` key, the following command can
be used:
.. code-block:: bash
salt myminion config.get pkg:apache
The ``:`` (colon) is used to represent a nested dictionary level.
.. versionchanged:: 2015.5.0
The ``delimiter`` argument was added, to allow delimiters other than
``:`` to be used.
This function traverses these data stores in this order, returning the
first match found:
- Minion configuration
- Minion's grains
- Minion's pillar data
- Master configuration (requires :conf_minion:`pillar_opts` to be set to
``True`` in Minion config file in order to work)
This means that if there is a value that is going to be the same for the
majority of minions, it can be configured in the Master config file, and
then overridden using the grains, pillar, or Minion config file.
Adding config options to the Master or Minion configuration file is easy:
.. code-block:: yaml
my-config-option: value
cafe-menu:
- egg and bacon
- egg sausage and bacon
- egg and spam
- egg bacon and spam
- egg bacon sausage and spam
- spam bacon sausage and spam
- spam egg spam spam bacon and spam
- spam sausage spam spam bacon spam tomato and spam
.. note::
Minion configuration options built into Salt (like those defined
:ref:`here <configuration-salt-minion>`) will *always* be defined in
the Minion configuration and thus *cannot be overridden by grains or
pillar data*. However, additional (user-defined) configuration options
(as in the above example) will not be in the Minion configuration by
default and thus can be overridden using grains/pillar data by leaving
the option out of the minion config file.
**Arguments**
delimiter
.. versionadded:: 2015.5.0
Override the delimiter used to separate nested levels of a data
structure.
merge
.. versionadded:: 2015.5.0
If passed, this parameter will change the behavior of the function so
that, instead of traversing each data store above in order and
returning the first match, the data stores are first merged together
and then searched. The pillar data is merged into the master config
data, then the grains are merged, followed by the Minion config data.
The resulting data structure is then searched for a match. This allows
for configurations to be more flexible.
.. note::
The merging described above does not mean that grain data will end
up in the Minion's pillar data, or pillar data will end up in the
master config data, etc. The data is just combined for the purposes
of searching an amalgam of the different data stores.
The supported merge strategies are as follows:
- **recurse** - If a key exists in both dictionaries, and the new value
is not a dictionary, it is replaced. Otherwise, the sub-dictionaries
are merged together into a single dictionary, recursively on down,
following the same criteria. For example:
.. code-block:: python
>>> dict1 = {'foo': {'bar': 1, 'qux': True},
'hosts': ['a', 'b', 'c'],
'only_x': None}
>>> dict2 = {'foo': {'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_y': None}
>>> merged
{'foo': {'bar': 1, 'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_dict1': None,
'only_dict2': None}
- **overwrite** - If a key exists in the top level of both
dictionaries, the new value completely overwrites the old. For
example:
.. code-block:: python
>>> dict1 = {'foo': {'bar': 1, 'qux': True},
'hosts': ['a', 'b', 'c'],
'only_x': None}
>>> dict2 = {'foo': {'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_y': None}
>>> merged
{'foo': {'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_dict1': None,
'only_dict2': None}
CLI Example:
.. code-block:: bash
salt '*' config.get pkg:apache
salt '*' config.get lxc.container_profile:centos merge=recurse
]
if compare[name[merge] is constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da20c6c4190> begin[:]
variable[ret] assign[=] call[name[salt].utils.data.traverse_dict_and_list, parameter[name[__opts__], name[key], constant[_|-]]]
if compare[name[ret] not_equal[!=] constant[_|-]] begin[:]
return[call[name[sdb].sdb_get, parameter[name[ret], name[__opts__]]]]
if <ast.UnaryOp object at 0x7da20c6c5f30> begin[:]
variable[ret] assign[=] call[name[salt].utils.data.traverse_dict_and_list, parameter[name[__grains__], name[key], constant[_|-], name[delimiter]]]
if compare[name[ret] not_equal[!=] constant[_|-]] begin[:]
return[call[name[sdb].sdb_get, parameter[name[ret], name[__opts__]]]]
if <ast.UnaryOp object at 0x7da20c6c6710> begin[:]
variable[ret] assign[=] call[name[salt].utils.data.traverse_dict_and_list, parameter[name[__pillar__], name[key], constant[_|-]]]
if compare[name[ret] not_equal[!=] constant[_|-]] begin[:]
return[call[name[sdb].sdb_get, parameter[name[ret], name[__opts__]]]]
if <ast.UnaryOp object at 0x7da20c6c7100> begin[:]
variable[ret] assign[=] call[name[salt].utils.data.traverse_dict_and_list, parameter[call[name[__pillar__].get, parameter[constant[master], dictionary[[], []]]], name[key], constant[_|-]]]
if compare[name[ret] not_equal[!=] constant[_|-]] begin[:]
return[call[name[sdb].sdb_get, parameter[name[ret], name[__opts__]]]]
variable[ret] assign[=] call[name[salt].utils.data.traverse_dict_and_list, parameter[name[DEFAULTS], name[key], constant[_|-]]]
call[name[log].debug, parameter[constant[key: %s, ret: %s], name[key], name[ret]]]
if compare[name[ret] not_equal[!=] constant[_|-]] begin[:]
return[call[name[sdb].sdb_get, parameter[name[ret], name[__opts__]]]]
return[name[default]] | keyword[def] identifier[get] ( identifier[key] , identifier[default] = literal[string] , identifier[delimiter] = literal[string] , identifier[merge] = keyword[None] , identifier[omit_opts] = keyword[False] ,
identifier[omit_pillar] = keyword[False] , identifier[omit_master] = keyword[False] , identifier[omit_grains] = keyword[False] ):
literal[string]
keyword[if] identifier[merge] keyword[is] keyword[None] :
keyword[if] keyword[not] identifier[omit_opts] :
identifier[ret] = identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] (
identifier[__opts__] ,
identifier[key] ,
literal[string] ,
identifier[delimiter] = identifier[delimiter] )
keyword[if] identifier[ret] != literal[string] :
keyword[return] identifier[sdb] . identifier[sdb_get] ( identifier[ret] , identifier[__opts__] )
keyword[if] keyword[not] identifier[omit_grains] :
identifier[ret] = identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] (
identifier[__grains__] ,
identifier[key] ,
literal[string] ,
identifier[delimiter] )
keyword[if] identifier[ret] != literal[string] :
keyword[return] identifier[sdb] . identifier[sdb_get] ( identifier[ret] , identifier[__opts__] )
keyword[if] keyword[not] identifier[omit_pillar] :
identifier[ret] = identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] (
identifier[__pillar__] ,
identifier[key] ,
literal[string] ,
identifier[delimiter] = identifier[delimiter] )
keyword[if] identifier[ret] != literal[string] :
keyword[return] identifier[sdb] . identifier[sdb_get] ( identifier[ret] , identifier[__opts__] )
keyword[if] keyword[not] identifier[omit_master] :
identifier[ret] = identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] (
identifier[__pillar__] . identifier[get] ( literal[string] ,{}),
identifier[key] ,
literal[string] ,
identifier[delimiter] = identifier[delimiter] )
keyword[if] identifier[ret] != literal[string] :
keyword[return] identifier[sdb] . identifier[sdb_get] ( identifier[ret] , identifier[__opts__] )
identifier[ret] = identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] (
identifier[DEFAULTS] ,
identifier[key] ,
literal[string] ,
identifier[delimiter] = identifier[delimiter] )
identifier[log] . identifier[debug] ( literal[string] , identifier[key] , identifier[ret] )
keyword[if] identifier[ret] != literal[string] :
keyword[return] identifier[sdb] . identifier[sdb_get] ( identifier[ret] , identifier[__opts__] )
keyword[else] :
keyword[if] identifier[merge] keyword[not] keyword[in] ( literal[string] , literal[string] ):
identifier[log] . identifier[warning] ( literal[string]
literal[string] , identifier[merge] )
identifier[merge] = literal[string]
identifier[merge_lists] = identifier[salt] . identifier[config] . identifier[master_config] ( literal[string] ). identifier[get] ( literal[string] )
identifier[data] = identifier[copy] . identifier[copy] ( identifier[DEFAULTS] )
identifier[data] = identifier[salt] . identifier[utils] . identifier[dictupdate] . identifier[merge] ( identifier[data] , identifier[__pillar__] . identifier[get] ( literal[string] ,{}), identifier[strategy] = identifier[merge] , identifier[merge_lists] = identifier[merge_lists] )
identifier[data] = identifier[salt] . identifier[utils] . identifier[dictupdate] . identifier[merge] ( identifier[data] , identifier[__pillar__] , identifier[strategy] = identifier[merge] , identifier[merge_lists] = identifier[merge_lists] )
identifier[data] = identifier[salt] . identifier[utils] . identifier[dictupdate] . identifier[merge] ( identifier[data] , identifier[__grains__] , identifier[strategy] = identifier[merge] , identifier[merge_lists] = identifier[merge_lists] )
identifier[data] = identifier[salt] . identifier[utils] . identifier[dictupdate] . identifier[merge] ( identifier[data] , identifier[__opts__] , identifier[strategy] = identifier[merge] , identifier[merge_lists] = identifier[merge_lists] )
identifier[ret] = identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] (
identifier[data] ,
identifier[key] ,
literal[string] ,
identifier[delimiter] = identifier[delimiter] )
keyword[if] identifier[ret] != literal[string] :
keyword[return] identifier[sdb] . identifier[sdb_get] ( identifier[ret] , identifier[__opts__] )
keyword[return] identifier[default] | def get(key, default='', delimiter=':', merge=None, omit_opts=False, omit_pillar=False, omit_master=False, omit_grains=False):
"""
.. versionadded: 0.14.0
Attempt to retrieve the named value from the minion config file, pillar,
grains or the master config. If the named value is not available, return the
value specified by ``default``. If not specified, the default is an empty
string.
Values can also be retrieved from nested dictionaries. Assume the below
data structure:
.. code-block:: python
{'pkg': {'apache': 'httpd'}}
To retrieve the value associated with the ``apache`` key, in the
sub-dictionary corresponding to the ``pkg`` key, the following command can
be used:
.. code-block:: bash
salt myminion config.get pkg:apache
The ``:`` (colon) is used to represent a nested dictionary level.
.. versionchanged:: 2015.5.0
The ``delimiter`` argument was added, to allow delimiters other than
``:`` to be used.
This function traverses these data stores in this order, returning the
first match found:
- Minion configuration
- Minion's grains
- Minion's pillar data
- Master configuration (requires :conf_minion:`pillar_opts` to be set to
``True`` in Minion config file in order to work)
This means that if there is a value that is going to be the same for the
majority of minions, it can be configured in the Master config file, and
then overridden using the grains, pillar, or Minion config file.
Adding config options to the Master or Minion configuration file is easy:
.. code-block:: yaml
my-config-option: value
cafe-menu:
- egg and bacon
- egg sausage and bacon
- egg and spam
- egg bacon and spam
- egg bacon sausage and spam
- spam bacon sausage and spam
- spam egg spam spam bacon and spam
- spam sausage spam spam bacon spam tomato and spam
.. note::
Minion configuration options built into Salt (like those defined
:ref:`here <configuration-salt-minion>`) will *always* be defined in
the Minion configuration and thus *cannot be overridden by grains or
pillar data*. However, additional (user-defined) configuration options
(as in the above example) will not be in the Minion configuration by
default and thus can be overridden using grains/pillar data by leaving
the option out of the minion config file.
**Arguments**
delimiter
.. versionadded:: 2015.5.0
Override the delimiter used to separate nested levels of a data
structure.
merge
.. versionadded:: 2015.5.0
If passed, this parameter will change the behavior of the function so
that, instead of traversing each data store above in order and
returning the first match, the data stores are first merged together
and then searched. The pillar data is merged into the master config
data, then the grains are merged, followed by the Minion config data.
The resulting data structure is then searched for a match. This allows
for configurations to be more flexible.
.. note::
The merging described above does not mean that grain data will end
up in the Minion's pillar data, or pillar data will end up in the
master config data, etc. The data is just combined for the purposes
of searching an amalgam of the different data stores.
The supported merge strategies are as follows:
- **recurse** - If a key exists in both dictionaries, and the new value
is not a dictionary, it is replaced. Otherwise, the sub-dictionaries
are merged together into a single dictionary, recursively on down,
following the same criteria. For example:
.. code-block:: python
>>> dict1 = {'foo': {'bar': 1, 'qux': True},
'hosts': ['a', 'b', 'c'],
'only_x': None}
>>> dict2 = {'foo': {'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_y': None}
>>> merged
{'foo': {'bar': 1, 'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_dict1': None,
'only_dict2': None}
- **overwrite** - If a key exists in the top level of both
dictionaries, the new value completely overwrites the old. For
example:
.. code-block:: python
>>> dict1 = {'foo': {'bar': 1, 'qux': True},
'hosts': ['a', 'b', 'c'],
'only_x': None}
>>> dict2 = {'foo': {'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_y': None}
>>> merged
{'foo': {'baz': 2, 'qux': False},
'hosts': ['d', 'e', 'f'],
'only_dict1': None,
'only_dict2': None}
CLI Example:
.. code-block:: bash
salt '*' config.get pkg:apache
salt '*' config.get lxc.container_profile:centos merge=recurse
"""
if merge is None:
if not omit_opts:
ret = salt.utils.data.traverse_dict_and_list(__opts__, key, '_|-', delimiter=delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__) # depends on [control=['if'], data=['ret']] # depends on [control=['if'], data=[]]
if not omit_grains:
ret = salt.utils.data.traverse_dict_and_list(__grains__, key, '_|-', delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__) # depends on [control=['if'], data=['ret']] # depends on [control=['if'], data=[]]
if not omit_pillar:
ret = salt.utils.data.traverse_dict_and_list(__pillar__, key, '_|-', delimiter=delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__) # depends on [control=['if'], data=['ret']] # depends on [control=['if'], data=[]]
if not omit_master:
ret = salt.utils.data.traverse_dict_and_list(__pillar__.get('master', {}), key, '_|-', delimiter=delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__) # depends on [control=['if'], data=['ret']] # depends on [control=['if'], data=[]]
ret = salt.utils.data.traverse_dict_and_list(DEFAULTS, key, '_|-', delimiter=delimiter)
log.debug('key: %s, ret: %s', key, ret)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__) # depends on [control=['if'], data=['ret']] # depends on [control=['if'], data=[]]
else:
if merge not in ('recurse', 'overwrite'):
log.warning("Unsupported merge strategy '%s'. Falling back to 'recurse'.", merge)
merge = 'recurse' # depends on [control=['if'], data=['merge']]
merge_lists = salt.config.master_config('/etc/salt/master').get('pillar_merge_lists')
data = copy.copy(DEFAULTS)
data = salt.utils.dictupdate.merge(data, __pillar__.get('master', {}), strategy=merge, merge_lists=merge_lists)
data = salt.utils.dictupdate.merge(data, __pillar__, strategy=merge, merge_lists=merge_lists)
data = salt.utils.dictupdate.merge(data, __grains__, strategy=merge, merge_lists=merge_lists)
data = salt.utils.dictupdate.merge(data, __opts__, strategy=merge, merge_lists=merge_lists)
ret = salt.utils.data.traverse_dict_and_list(data, key, '_|-', delimiter=delimiter)
if ret != '_|-':
return sdb.sdb_get(ret, __opts__) # depends on [control=['if'], data=['ret']]
return default |
def set_attributes(self, attributes):
"""
This provides a centralized method to set the dataset attributes on the
data store.
Parameters
----------
attributes : dict-like
Dictionary of key/value (attribute name / attribute) pairs
"""
for k, v in attributes.items():
self.set_attribute(k, v) | def function[set_attributes, parameter[self, attributes]]:
constant[
This provides a centralized method to set the dataset attributes on the
data store.
Parameters
----------
attributes : dict-like
Dictionary of key/value (attribute name / attribute) pairs
]
for taget[tuple[[<ast.Name object at 0x7da20e957f10>, <ast.Name object at 0x7da20e955180>]]] in starred[call[name[attributes].items, parameter[]]] begin[:]
call[name[self].set_attribute, parameter[name[k], name[v]]] | keyword[def] identifier[set_attributes] ( identifier[self] , identifier[attributes] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attributes] . identifier[items] ():
identifier[self] . identifier[set_attribute] ( identifier[k] , identifier[v] ) | def set_attributes(self, attributes):
"""
This provides a centralized method to set the dataset attributes on the
data store.
Parameters
----------
attributes : dict-like
Dictionary of key/value (attribute name / attribute) pairs
"""
for (k, v) in attributes.items():
self.set_attribute(k, v) # depends on [control=['for'], data=[]] |
def LOS_CrossProj(VType, Ds, us, kPIns, kPOuts, kRMins,
Lplot='In', proj='All', multi=False):
""" Compute the parameters to plot the poloidal projection of the LOS """
assert type(VType) is str and VType.lower() in ['tor','lin']
assert Lplot.lower() in ['tot','in']
assert type(proj) is str
proj = proj.lower()
assert proj in ['cross','hor','all','3d']
assert Ds.ndim==2 and Ds.shape==us.shape
nL = Ds.shape[1]
k0 = kPIns if Lplot.lower()=='in' else np.zeros((nL,))
if VType.lower()=='tor' and proj in ['cross','all']:
CrossProjAng = np.arccos(np.sqrt(us[0,:]**2+us[1,:]**2)
/np.sqrt(np.sum(us**2,axis=0)))
nkp = np.ceil(25.*(1 - (CrossProjAng/(np.pi/4)-1)**2) + 2)
ks = np.max([kRMins,kPIns],axis=0) if Lplot.lower()=='in' else kRMins
pts0 = []
if multi:
for ii in range(0,nL):
if np.isnan(kPOuts[ii]):
pts0.append( np.array([[np.nan,np.nan],
[np.nan,np.nan]]) )
else:
k = np.linspace(k0[ii],kPOuts[ii],nkp[ii],endpoint=True)
k = np.unique(np.append(k,ks[ii]))
pp = Ds[:,ii:ii+1] + k[np.newaxis,:]*us[:,ii:ii+1]
pts0.append( np.array([np.hypot(pp[0,:],pp[1,:]),pp[2,:]]) )
else:
for ii in range(0,nL):
if np.isnan(kPOuts[ii]):
pts0.append(np.array([[np.nan,np.nan,np.nan],
[np.nan,np.nan,np.nan],
[np.nan,np.nan,np.nan]]))
else:
k = np.linspace(k0[ii],kPOuts[ii],nkp[ii],endpoint=True)
k = np.append(np.unique(np.append(k,ks[ii])),np.nan)
pts0.append( Ds[:,ii:ii+1] + k[np.newaxis,:]*us[:,ii:ii+1] )
pts0 = np.concatenate(tuple(pts0),axis=1)
pts0 = np.array([np.hypot(pts0[0,:],pts0[1,:]),pts0[2,:]])
if not (VType.lower()=='tor' and proj=='cross'):
pts = []
if multi:
for ii in range(0,nL):
if np.isnan(kPOuts[ii]):
pts.append( np.array([[np.nan,np.nan],
[np.nan,np.nan],
[np.nan,np.nan]]) )
else:
k = np.array([k0[ii],kPOuts[ii]])
pts.append( Ds[:,ii:ii+1] + k[np.newaxis,:]*us[:,ii:ii+1] )
else:
for ii in range(0,nL):
if np.isnan(kPOuts[ii]):
pts.append(np.array([[np.nan,np.nan,np.nan],
[np.nan,np.nan,np.nan],
[np.nan,np.nan,np.nan]]))
else:
k = np.array([k0[ii],kPOuts[ii],np.nan])
pts.append( Ds[:,ii:ii+1] + k[np.newaxis,:]*us[:,ii:ii+1] )
pts = np.concatenate(tuple(pts),axis=1)
if proj=='hor':
pts = [pp[:2,:] for pp in pts] if multi else pts[:2,:]
elif proj=='cross':
if VType.lower()=='tor':
pts = pts0
else:
pts = [pp[1:,:] for pp in pts] if multi else pts[1:,:]
elif proj=='all':
if multi:
if VType.lower()=='tor':
pts = [(p0,pp[:2,:]) for (p0,pp) in zip(*[pts0,pts])]
else:
pts = (pts[1:,:],pts[:2,:])
else:
pts = (pts0,pts[:2,:]) if VType.lower()=='tor' else (pts[1:,:],pts[:2,:])
return pts | def function[LOS_CrossProj, parameter[VType, Ds, us, kPIns, kPOuts, kRMins, Lplot, proj, multi]]:
constant[ Compute the parameters to plot the poloidal projection of the LOS ]
assert[<ast.BoolOp object at 0x7da204565ae0>]
assert[compare[call[name[Lplot].lower, parameter[]] in list[[<ast.Constant object at 0x7da2045664a0>, <ast.Constant object at 0x7da204565c00>]]]]
assert[compare[call[name[type], parameter[name[proj]]] is name[str]]]
variable[proj] assign[=] call[name[proj].lower, parameter[]]
assert[compare[name[proj] in list[[<ast.Constant object at 0x7da204564cd0>, <ast.Constant object at 0x7da204565990>, <ast.Constant object at 0x7da204564af0>, <ast.Constant object at 0x7da2045673d0>]]]]
assert[<ast.BoolOp object at 0x7da204564f70>]
variable[nL] assign[=] call[name[Ds].shape][constant[1]]
variable[k0] assign[=] <ast.IfExp object at 0x7da204566320>
if <ast.BoolOp object at 0x7da204567b20> begin[:]
variable[CrossProjAng] assign[=] call[name[np].arccos, parameter[binary_operation[call[name[np].sqrt, parameter[binary_operation[binary_operation[call[name[us]][tuple[[<ast.Constant object at 0x7da204564f10>, <ast.Slice object at 0x7da204566770>]]] ** constant[2]] + binary_operation[call[name[us]][tuple[[<ast.Constant object at 0x7da204567f10>, <ast.Slice object at 0x7da204567dc0>]]] ** constant[2]]]]] / call[name[np].sqrt, parameter[call[name[np].sum, parameter[binary_operation[name[us] ** constant[2]]]]]]]]]
variable[nkp] assign[=] call[name[np].ceil, parameter[binary_operation[binary_operation[constant[25.0] * binary_operation[constant[1] - binary_operation[binary_operation[binary_operation[name[CrossProjAng] / binary_operation[name[np].pi / constant[4]]] - constant[1]] ** constant[2]]]] + constant[2]]]]
variable[ks] assign[=] <ast.IfExp object at 0x7da204567fd0>
variable[pts0] assign[=] list[[]]
if name[multi] begin[:]
for taget[name[ii]] in starred[call[name[range], parameter[constant[0], name[nL]]]] begin[:]
if call[name[np].isnan, parameter[call[name[kPOuts]][name[ii]]]] begin[:]
call[name[pts0].append, parameter[call[name[np].array, parameter[list[[<ast.List object at 0x7da2045640a0>, <ast.List object at 0x7da204565090>]]]]]]
if <ast.UnaryOp object at 0x7da1b2344460> begin[:]
variable[pts] assign[=] list[[]]
if name[multi] begin[:]
for taget[name[ii]] in starred[call[name[range], parameter[constant[0], name[nL]]]] begin[:]
if call[name[np].isnan, parameter[call[name[kPOuts]][name[ii]]]] begin[:]
call[name[pts].append, parameter[call[name[np].array, parameter[list[[<ast.List object at 0x7da1b2347250>, <ast.List object at 0x7da1b23469e0>, <ast.List object at 0x7da1b2345630>]]]]]]
if compare[name[proj] equal[==] constant[hor]] begin[:]
variable[pts] assign[=] <ast.IfExp object at 0x7da1b23475e0>
return[name[pts]] | keyword[def] identifier[LOS_CrossProj] ( identifier[VType] , identifier[Ds] , identifier[us] , identifier[kPIns] , identifier[kPOuts] , identifier[kRMins] ,
identifier[Lplot] = literal[string] , identifier[proj] = literal[string] , identifier[multi] = keyword[False] ):
literal[string]
keyword[assert] identifier[type] ( identifier[VType] ) keyword[is] identifier[str] keyword[and] identifier[VType] . identifier[lower] () keyword[in] [ literal[string] , literal[string] ]
keyword[assert] identifier[Lplot] . identifier[lower] () keyword[in] [ literal[string] , literal[string] ]
keyword[assert] identifier[type] ( identifier[proj] ) keyword[is] identifier[str]
identifier[proj] = identifier[proj] . identifier[lower] ()
keyword[assert] identifier[proj] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[assert] identifier[Ds] . identifier[ndim] == literal[int] keyword[and] identifier[Ds] . identifier[shape] == identifier[us] . identifier[shape]
identifier[nL] = identifier[Ds] . identifier[shape] [ literal[int] ]
identifier[k0] = identifier[kPIns] keyword[if] identifier[Lplot] . identifier[lower] ()== literal[string] keyword[else] identifier[np] . identifier[zeros] (( identifier[nL] ,))
keyword[if] identifier[VType] . identifier[lower] ()== literal[string] keyword[and] identifier[proj] keyword[in] [ literal[string] , literal[string] ]:
identifier[CrossProjAng] = identifier[np] . identifier[arccos] ( identifier[np] . identifier[sqrt] ( identifier[us] [ literal[int] ,:]** literal[int] + identifier[us] [ literal[int] ,:]** literal[int] )
/ identifier[np] . identifier[sqrt] ( identifier[np] . identifier[sum] ( identifier[us] ** literal[int] , identifier[axis] = literal[int] )))
identifier[nkp] = identifier[np] . identifier[ceil] ( literal[int] *( literal[int] -( identifier[CrossProjAng] /( identifier[np] . identifier[pi] / literal[int] )- literal[int] )** literal[int] )+ literal[int] )
identifier[ks] = identifier[np] . identifier[max] ([ identifier[kRMins] , identifier[kPIns] ], identifier[axis] = literal[int] ) keyword[if] identifier[Lplot] . identifier[lower] ()== literal[string] keyword[else] identifier[kRMins]
identifier[pts0] =[]
keyword[if] identifier[multi] :
keyword[for] identifier[ii] keyword[in] identifier[range] ( literal[int] , identifier[nL] ):
keyword[if] identifier[np] . identifier[isnan] ( identifier[kPOuts] [ identifier[ii] ]):
identifier[pts0] . identifier[append] ( identifier[np] . identifier[array] ([[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ],
[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ]]))
keyword[else] :
identifier[k] = identifier[np] . identifier[linspace] ( identifier[k0] [ identifier[ii] ], identifier[kPOuts] [ identifier[ii] ], identifier[nkp] [ identifier[ii] ], identifier[endpoint] = keyword[True] )
identifier[k] = identifier[np] . identifier[unique] ( identifier[np] . identifier[append] ( identifier[k] , identifier[ks] [ identifier[ii] ]))
identifier[pp] = identifier[Ds] [:, identifier[ii] : identifier[ii] + literal[int] ]+ identifier[k] [ identifier[np] . identifier[newaxis] ,:]* identifier[us] [:, identifier[ii] : identifier[ii] + literal[int] ]
identifier[pts0] . identifier[append] ( identifier[np] . identifier[array] ([ identifier[np] . identifier[hypot] ( identifier[pp] [ literal[int] ,:], identifier[pp] [ literal[int] ,:]), identifier[pp] [ literal[int] ,:]]))
keyword[else] :
keyword[for] identifier[ii] keyword[in] identifier[range] ( literal[int] , identifier[nL] ):
keyword[if] identifier[np] . identifier[isnan] ( identifier[kPOuts] [ identifier[ii] ]):
identifier[pts0] . identifier[append] ( identifier[np] . identifier[array] ([[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ],
[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ],
[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ]]))
keyword[else] :
identifier[k] = identifier[np] . identifier[linspace] ( identifier[k0] [ identifier[ii] ], identifier[kPOuts] [ identifier[ii] ], identifier[nkp] [ identifier[ii] ], identifier[endpoint] = keyword[True] )
identifier[k] = identifier[np] . identifier[append] ( identifier[np] . identifier[unique] ( identifier[np] . identifier[append] ( identifier[k] , identifier[ks] [ identifier[ii] ])), identifier[np] . identifier[nan] )
identifier[pts0] . identifier[append] ( identifier[Ds] [:, identifier[ii] : identifier[ii] + literal[int] ]+ identifier[k] [ identifier[np] . identifier[newaxis] ,:]* identifier[us] [:, identifier[ii] : identifier[ii] + literal[int] ])
identifier[pts0] = identifier[np] . identifier[concatenate] ( identifier[tuple] ( identifier[pts0] ), identifier[axis] = literal[int] )
identifier[pts0] = identifier[np] . identifier[array] ([ identifier[np] . identifier[hypot] ( identifier[pts0] [ literal[int] ,:], identifier[pts0] [ literal[int] ,:]), identifier[pts0] [ literal[int] ,:]])
keyword[if] keyword[not] ( identifier[VType] . identifier[lower] ()== literal[string] keyword[and] identifier[proj] == literal[string] ):
identifier[pts] =[]
keyword[if] identifier[multi] :
keyword[for] identifier[ii] keyword[in] identifier[range] ( literal[int] , identifier[nL] ):
keyword[if] identifier[np] . identifier[isnan] ( identifier[kPOuts] [ identifier[ii] ]):
identifier[pts] . identifier[append] ( identifier[np] . identifier[array] ([[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ],
[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ],
[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ]]))
keyword[else] :
identifier[k] = identifier[np] . identifier[array] ([ identifier[k0] [ identifier[ii] ], identifier[kPOuts] [ identifier[ii] ]])
identifier[pts] . identifier[append] ( identifier[Ds] [:, identifier[ii] : identifier[ii] + literal[int] ]+ identifier[k] [ identifier[np] . identifier[newaxis] ,:]* identifier[us] [:, identifier[ii] : identifier[ii] + literal[int] ])
keyword[else] :
keyword[for] identifier[ii] keyword[in] identifier[range] ( literal[int] , identifier[nL] ):
keyword[if] identifier[np] . identifier[isnan] ( identifier[kPOuts] [ identifier[ii] ]):
identifier[pts] . identifier[append] ( identifier[np] . identifier[array] ([[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ],
[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ],
[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ]]))
keyword[else] :
identifier[k] = identifier[np] . identifier[array] ([ identifier[k0] [ identifier[ii] ], identifier[kPOuts] [ identifier[ii] ], identifier[np] . identifier[nan] ])
identifier[pts] . identifier[append] ( identifier[Ds] [:, identifier[ii] : identifier[ii] + literal[int] ]+ identifier[k] [ identifier[np] . identifier[newaxis] ,:]* identifier[us] [:, identifier[ii] : identifier[ii] + literal[int] ])
identifier[pts] = identifier[np] . identifier[concatenate] ( identifier[tuple] ( identifier[pts] ), identifier[axis] = literal[int] )
keyword[if] identifier[proj] == literal[string] :
identifier[pts] =[ identifier[pp] [: literal[int] ,:] keyword[for] identifier[pp] keyword[in] identifier[pts] ] keyword[if] identifier[multi] keyword[else] identifier[pts] [: literal[int] ,:]
keyword[elif] identifier[proj] == literal[string] :
keyword[if] identifier[VType] . identifier[lower] ()== literal[string] :
identifier[pts] = identifier[pts0]
keyword[else] :
identifier[pts] =[ identifier[pp] [ literal[int] :,:] keyword[for] identifier[pp] keyword[in] identifier[pts] ] keyword[if] identifier[multi] keyword[else] identifier[pts] [ literal[int] :,:]
keyword[elif] identifier[proj] == literal[string] :
keyword[if] identifier[multi] :
keyword[if] identifier[VType] . identifier[lower] ()== literal[string] :
identifier[pts] =[( identifier[p0] , identifier[pp] [: literal[int] ,:]) keyword[for] ( identifier[p0] , identifier[pp] ) keyword[in] identifier[zip] (*[ identifier[pts0] , identifier[pts] ])]
keyword[else] :
identifier[pts] =( identifier[pts] [ literal[int] :,:], identifier[pts] [: literal[int] ,:])
keyword[else] :
identifier[pts] =( identifier[pts0] , identifier[pts] [: literal[int] ,:]) keyword[if] identifier[VType] . identifier[lower] ()== literal[string] keyword[else] ( identifier[pts] [ literal[int] :,:], identifier[pts] [: literal[int] ,:])
keyword[return] identifier[pts] | def LOS_CrossProj(VType, Ds, us, kPIns, kPOuts, kRMins, Lplot='In', proj='All', multi=False):
""" Compute the parameters to plot the poloidal projection of the LOS """
assert type(VType) is str and VType.lower() in ['tor', 'lin']
assert Lplot.lower() in ['tot', 'in']
assert type(proj) is str
proj = proj.lower()
assert proj in ['cross', 'hor', 'all', '3d']
assert Ds.ndim == 2 and Ds.shape == us.shape
nL = Ds.shape[1]
k0 = kPIns if Lplot.lower() == 'in' else np.zeros((nL,))
if VType.lower() == 'tor' and proj in ['cross', 'all']:
CrossProjAng = np.arccos(np.sqrt(us[0, :] ** 2 + us[1, :] ** 2) / np.sqrt(np.sum(us ** 2, axis=0)))
nkp = np.ceil(25.0 * (1 - (CrossProjAng / (np.pi / 4) - 1) ** 2) + 2)
ks = np.max([kRMins, kPIns], axis=0) if Lplot.lower() == 'in' else kRMins
pts0 = []
if multi:
for ii in range(0, nL):
if np.isnan(kPOuts[ii]):
pts0.append(np.array([[np.nan, np.nan], [np.nan, np.nan]])) # depends on [control=['if'], data=[]]
else:
k = np.linspace(k0[ii], kPOuts[ii], nkp[ii], endpoint=True)
k = np.unique(np.append(k, ks[ii]))
pp = Ds[:, ii:ii + 1] + k[np.newaxis, :] * us[:, ii:ii + 1]
pts0.append(np.array([np.hypot(pp[0, :], pp[1, :]), pp[2, :]])) # depends on [control=['for'], data=['ii']] # depends on [control=['if'], data=[]]
else:
for ii in range(0, nL):
if np.isnan(kPOuts[ii]):
pts0.append(np.array([[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]])) # depends on [control=['if'], data=[]]
else:
k = np.linspace(k0[ii], kPOuts[ii], nkp[ii], endpoint=True)
k = np.append(np.unique(np.append(k, ks[ii])), np.nan)
pts0.append(Ds[:, ii:ii + 1] + k[np.newaxis, :] * us[:, ii:ii + 1]) # depends on [control=['for'], data=['ii']]
pts0 = np.concatenate(tuple(pts0), axis=1)
pts0 = np.array([np.hypot(pts0[0, :], pts0[1, :]), pts0[2, :]]) # depends on [control=['if'], data=[]]
if not (VType.lower() == 'tor' and proj == 'cross'):
pts = []
if multi:
for ii in range(0, nL):
if np.isnan(kPOuts[ii]):
pts.append(np.array([[np.nan, np.nan], [np.nan, np.nan], [np.nan, np.nan]])) # depends on [control=['if'], data=[]]
else:
k = np.array([k0[ii], kPOuts[ii]])
pts.append(Ds[:, ii:ii + 1] + k[np.newaxis, :] * us[:, ii:ii + 1]) # depends on [control=['for'], data=['ii']] # depends on [control=['if'], data=[]]
else:
for ii in range(0, nL):
if np.isnan(kPOuts[ii]):
pts.append(np.array([[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]])) # depends on [control=['if'], data=[]]
else:
k = np.array([k0[ii], kPOuts[ii], np.nan])
pts.append(Ds[:, ii:ii + 1] + k[np.newaxis, :] * us[:, ii:ii + 1]) # depends on [control=['for'], data=['ii']]
pts = np.concatenate(tuple(pts), axis=1) # depends on [control=['if'], data=[]]
if proj == 'hor':
pts = [pp[:2, :] for pp in pts] if multi else pts[:2, :] # depends on [control=['if'], data=[]]
elif proj == 'cross':
if VType.lower() == 'tor':
pts = pts0 # depends on [control=['if'], data=[]]
else:
pts = [pp[1:, :] for pp in pts] if multi else pts[1:, :] # depends on [control=['if'], data=[]]
elif proj == 'all':
if multi:
if VType.lower() == 'tor':
pts = [(p0, pp[:2, :]) for (p0, pp) in zip(*[pts0, pts])] # depends on [control=['if'], data=[]]
else:
pts = (pts[1:, :], pts[:2, :]) # depends on [control=['if'], data=[]]
else:
pts = (pts0, pts[:2, :]) if VType.lower() == 'tor' else (pts[1:, :], pts[:2, :]) # depends on [control=['if'], data=[]]
return pts |
def _get_choices(self, gandi):
""" Internal method to get choices list """
return [str(item['id'])
for item in gandi.snapshotprofile.list(target=self.target)] | def function[_get_choices, parameter[self, gandi]]:
constant[ Internal method to get choices list ]
return[<ast.ListComp object at 0x7da18eb55a50>] | keyword[def] identifier[_get_choices] ( identifier[self] , identifier[gandi] ):
literal[string]
keyword[return] [ identifier[str] ( identifier[item] [ literal[string] ])
keyword[for] identifier[item] keyword[in] identifier[gandi] . identifier[snapshotprofile] . identifier[list] ( identifier[target] = identifier[self] . identifier[target] )] | def _get_choices(self, gandi):
""" Internal method to get choices list """
return [str(item['id']) for item in gandi.snapshotprofile.list(target=self.target)] |
def _get_ids_from_label(self, label):
"""Return sshkey IDs which match the given label."""
keys = self.list_keys()
results = []
for key in keys:
if key['label'] == label:
results.append(key['id'])
return results | def function[_get_ids_from_label, parameter[self, label]]:
constant[Return sshkey IDs which match the given label.]
variable[keys] assign[=] call[name[self].list_keys, parameter[]]
variable[results] assign[=] list[[]]
for taget[name[key]] in starred[name[keys]] begin[:]
if compare[call[name[key]][constant[label]] equal[==] name[label]] begin[:]
call[name[results].append, parameter[call[name[key]][constant[id]]]]
return[name[results]] | keyword[def] identifier[_get_ids_from_label] ( identifier[self] , identifier[label] ):
literal[string]
identifier[keys] = identifier[self] . identifier[list_keys] ()
identifier[results] =[]
keyword[for] identifier[key] keyword[in] identifier[keys] :
keyword[if] identifier[key] [ literal[string] ]== identifier[label] :
identifier[results] . identifier[append] ( identifier[key] [ literal[string] ])
keyword[return] identifier[results] | def _get_ids_from_label(self, label):
"""Return sshkey IDs which match the given label."""
keys = self.list_keys()
results = []
for key in keys:
if key['label'] == label:
results.append(key['id']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
return results |
def write_features(self):
"""Saves features to file."""
out_json = collections.OrderedDict()
try:
# Only save the necessary information
self.read_features()
except (WrongFeaturesFormatError, FeaturesNotFound,
NoFeaturesFileError):
# We need to create the file or overwite it
# Metadata
out_json = collections.OrderedDict({"metadata": {
"versions": {"librosa": librosa.__version__,
"msaf": msaf.__version__,
"numpy": np.__version__},
"timestamp": datetime.datetime.today().strftime(
"%Y/%m/%d %H:%M:%S")}})
# Global parameters
out_json["globals"] = {
"dur": self.dur,
"sample_rate": self.sr,
"hop_length": self.hop_length,
"audio_file": self.file_struct.audio_file
}
# Beats
out_json["est_beats"] = self._est_beats_times.tolist()
out_json["est_beatsync_times"] = self._est_beatsync_times.tolist()
if self._ann_beats_times is not None:
out_json["ann_beats"] = self._ann_beats_times.tolist()
out_json["ann_beatsync_times"] = self._ann_beatsync_times.tolist()
except FeatureParamsError:
# We have other features in the file, simply add these ones
with open(self.file_struct.features_file) as f:
out_json = json.load(f)
finally:
# Specific parameters of the current features
out_json[self.get_id()] = {}
out_json[self.get_id()]["params"] = {}
for param_name in self.get_param_names():
value = getattr(self, param_name)
# Check for special case of functions
if hasattr(value, '__call__'):
value = value.__name__
else:
value = str(value)
out_json[self.get_id()]["params"][param_name] = value
# Actual features
out_json[self.get_id()]["framesync"] = \
self._framesync_features.tolist()
out_json[self.get_id()]["est_beatsync"] = \
self._est_beatsync_features.tolist()
if self._ann_beatsync_features is not None:
out_json[self.get_id()]["ann_beatsync"] = \
self._ann_beatsync_features.tolist()
# Save it
with open(self.file_struct.features_file, "w") as f:
json.dump(out_json, f, indent=2) | def function[write_features, parameter[self]]:
constant[Saves features to file.]
variable[out_json] assign[=] call[name[collections].OrderedDict, parameter[]]
<ast.Try object at 0x7da1b02daf50> | keyword[def] identifier[write_features] ( identifier[self] ):
literal[string]
identifier[out_json] = identifier[collections] . identifier[OrderedDict] ()
keyword[try] :
identifier[self] . identifier[read_features] ()
keyword[except] ( identifier[WrongFeaturesFormatError] , identifier[FeaturesNotFound] ,
identifier[NoFeaturesFileError] ):
identifier[out_json] = identifier[collections] . identifier[OrderedDict] ({ literal[string] :{
literal[string] :{ literal[string] : identifier[librosa] . identifier[__version__] ,
literal[string] : identifier[msaf] . identifier[__version__] ,
literal[string] : identifier[np] . identifier[__version__] },
literal[string] : identifier[datetime] . identifier[datetime] . identifier[today] (). identifier[strftime] (
literal[string] )}})
identifier[out_json] [ literal[string] ]={
literal[string] : identifier[self] . identifier[dur] ,
literal[string] : identifier[self] . identifier[sr] ,
literal[string] : identifier[self] . identifier[hop_length] ,
literal[string] : identifier[self] . identifier[file_struct] . identifier[audio_file]
}
identifier[out_json] [ literal[string] ]= identifier[self] . identifier[_est_beats_times] . identifier[tolist] ()
identifier[out_json] [ literal[string] ]= identifier[self] . identifier[_est_beatsync_times] . identifier[tolist] ()
keyword[if] identifier[self] . identifier[_ann_beats_times] keyword[is] keyword[not] keyword[None] :
identifier[out_json] [ literal[string] ]= identifier[self] . identifier[_ann_beats_times] . identifier[tolist] ()
identifier[out_json] [ literal[string] ]= identifier[self] . identifier[_ann_beatsync_times] . identifier[tolist] ()
keyword[except] identifier[FeatureParamsError] :
keyword[with] identifier[open] ( identifier[self] . identifier[file_struct] . identifier[features_file] ) keyword[as] identifier[f] :
identifier[out_json] = identifier[json] . identifier[load] ( identifier[f] )
keyword[finally] :
identifier[out_json] [ identifier[self] . identifier[get_id] ()]={}
identifier[out_json] [ identifier[self] . identifier[get_id] ()][ literal[string] ]={}
keyword[for] identifier[param_name] keyword[in] identifier[self] . identifier[get_param_names] ():
identifier[value] = identifier[getattr] ( identifier[self] , identifier[param_name] )
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ):
identifier[value] = identifier[value] . identifier[__name__]
keyword[else] :
identifier[value] = identifier[str] ( identifier[value] )
identifier[out_json] [ identifier[self] . identifier[get_id] ()][ literal[string] ][ identifier[param_name] ]= identifier[value]
identifier[out_json] [ identifier[self] . identifier[get_id] ()][ literal[string] ]= identifier[self] . identifier[_framesync_features] . identifier[tolist] ()
identifier[out_json] [ identifier[self] . identifier[get_id] ()][ literal[string] ]= identifier[self] . identifier[_est_beatsync_features] . identifier[tolist] ()
keyword[if] identifier[self] . identifier[_ann_beatsync_features] keyword[is] keyword[not] keyword[None] :
identifier[out_json] [ identifier[self] . identifier[get_id] ()][ literal[string] ]= identifier[self] . identifier[_ann_beatsync_features] . identifier[tolist] ()
keyword[with] identifier[open] ( identifier[self] . identifier[file_struct] . identifier[features_file] , literal[string] ) keyword[as] identifier[f] :
identifier[json] . identifier[dump] ( identifier[out_json] , identifier[f] , identifier[indent] = literal[int] ) | def write_features(self):
"""Saves features to file."""
out_json = collections.OrderedDict()
try:
# Only save the necessary information
self.read_features() # depends on [control=['try'], data=[]]
except (WrongFeaturesFormatError, FeaturesNotFound, NoFeaturesFileError):
# We need to create the file or overwite it
# Metadata
out_json = collections.OrderedDict({'metadata': {'versions': {'librosa': librosa.__version__, 'msaf': msaf.__version__, 'numpy': np.__version__}, 'timestamp': datetime.datetime.today().strftime('%Y/%m/%d %H:%M:%S')}})
# Global parameters
out_json['globals'] = {'dur': self.dur, 'sample_rate': self.sr, 'hop_length': self.hop_length, 'audio_file': self.file_struct.audio_file}
# Beats
out_json['est_beats'] = self._est_beats_times.tolist()
out_json['est_beatsync_times'] = self._est_beatsync_times.tolist()
if self._ann_beats_times is not None:
out_json['ann_beats'] = self._ann_beats_times.tolist()
out_json['ann_beatsync_times'] = self._ann_beatsync_times.tolist() # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
except FeatureParamsError:
# We have other features in the file, simply add these ones
with open(self.file_struct.features_file) as f:
out_json = json.load(f) # depends on [control=['with'], data=['f']] # depends on [control=['except'], data=[]]
finally:
# Specific parameters of the current features
out_json[self.get_id()] = {}
out_json[self.get_id()]['params'] = {}
for param_name in self.get_param_names():
value = getattr(self, param_name)
# Check for special case of functions
if hasattr(value, '__call__'):
value = value.__name__ # depends on [control=['if'], data=[]]
else:
value = str(value)
out_json[self.get_id()]['params'][param_name] = value # depends on [control=['for'], data=['param_name']]
# Actual features
out_json[self.get_id()]['framesync'] = self._framesync_features.tolist()
out_json[self.get_id()]['est_beatsync'] = self._est_beatsync_features.tolist()
if self._ann_beatsync_features is not None:
out_json[self.get_id()]['ann_beatsync'] = self._ann_beatsync_features.tolist() # depends on [control=['if'], data=[]]
# Save it
with open(self.file_struct.features_file, 'w') as f:
json.dump(out_json, f, indent=2) # depends on [control=['with'], data=['f']] |
def float(cls, name, description=None, unit='', params=None,
default=None, initial_status=None):
"""Instantiate a new float sensor object.
Parameters
----------
name : str
The name of the sensor.
description : str
A short description of the sensor.
units : str
The units of the sensor value. May be the empty string
if there are no applicable units.
params : list
[min, max] -- miniumum and maximum values of the sensor
default : float
An initial value for the sensor. Defaults to 0.0.
initial_status : int enum or None
An initial status for the sensor. If None, defaults to
Sensor.UNKNOWN. `initial_status` must be one of the keys in
Sensor.STATUSES
"""
return cls(cls.FLOAT, name, description, unit, params,
default, initial_status) | def function[float, parameter[cls, name, description, unit, params, default, initial_status]]:
constant[Instantiate a new float sensor object.
Parameters
----------
name : str
The name of the sensor.
description : str
A short description of the sensor.
units : str
The units of the sensor value. May be the empty string
if there are no applicable units.
params : list
[min, max] -- miniumum and maximum values of the sensor
default : float
An initial value for the sensor. Defaults to 0.0.
initial_status : int enum or None
An initial status for the sensor. If None, defaults to
Sensor.UNKNOWN. `initial_status` must be one of the keys in
Sensor.STATUSES
]
return[call[name[cls], parameter[name[cls].FLOAT, name[name], name[description], name[unit], name[params], name[default], name[initial_status]]]] | keyword[def] identifier[float] ( identifier[cls] , identifier[name] , identifier[description] = keyword[None] , identifier[unit] = literal[string] , identifier[params] = keyword[None] ,
identifier[default] = keyword[None] , identifier[initial_status] = keyword[None] ):
literal[string]
keyword[return] identifier[cls] ( identifier[cls] . identifier[FLOAT] , identifier[name] , identifier[description] , identifier[unit] , identifier[params] ,
identifier[default] , identifier[initial_status] ) | def float(cls, name, description=None, unit='', params=None, default=None, initial_status=None):
"""Instantiate a new float sensor object.
Parameters
----------
name : str
The name of the sensor.
description : str
A short description of the sensor.
units : str
The units of the sensor value. May be the empty string
if there are no applicable units.
params : list
[min, max] -- miniumum and maximum values of the sensor
default : float
An initial value for the sensor. Defaults to 0.0.
initial_status : int enum or None
An initial status for the sensor. If None, defaults to
Sensor.UNKNOWN. `initial_status` must be one of the keys in
Sensor.STATUSES
"""
return cls(cls.FLOAT, name, description, unit, params, default, initial_status) |
def form(self) -> Optional['HTMLFormElement']:
"""Get ``HTMLFormElement`` object related to this node."""
if self.__form:
return self.__form
parent = self.parentNode
while parent:
if isinstance(parent, HTMLFormElement):
return parent
else:
parent = parent.parentNode
return None | def function[form, parameter[self]]:
constant[Get ``HTMLFormElement`` object related to this node.]
if name[self].__form begin[:]
return[name[self].__form]
variable[parent] assign[=] name[self].parentNode
while name[parent] begin[:]
if call[name[isinstance], parameter[name[parent], name[HTMLFormElement]]] begin[:]
return[name[parent]]
return[constant[None]] | keyword[def] identifier[form] ( identifier[self] )-> identifier[Optional] [ literal[string] ]:
literal[string]
keyword[if] identifier[self] . identifier[__form] :
keyword[return] identifier[self] . identifier[__form]
identifier[parent] = identifier[self] . identifier[parentNode]
keyword[while] identifier[parent] :
keyword[if] identifier[isinstance] ( identifier[parent] , identifier[HTMLFormElement] ):
keyword[return] identifier[parent]
keyword[else] :
identifier[parent] = identifier[parent] . identifier[parentNode]
keyword[return] keyword[None] | def form(self) -> Optional['HTMLFormElement']:
"""Get ``HTMLFormElement`` object related to this node."""
if self.__form:
return self.__form # depends on [control=['if'], data=[]]
parent = self.parentNode
while parent:
if isinstance(parent, HTMLFormElement):
return parent # depends on [control=['if'], data=[]]
else:
parent = parent.parentNode # depends on [control=['while'], data=[]]
return None |
def project_run_path(cls, project, transfer_config, run):
"""Return a fully-qualified project_run string."""
return google.api_core.path_template.expand(
"projects/{project}/transferConfigs/{transfer_config}/runs/{run}",
project=project,
transfer_config=transfer_config,
run=run,
) | def function[project_run_path, parameter[cls, project, transfer_config, run]]:
constant[Return a fully-qualified project_run string.]
return[call[name[google].api_core.path_template.expand, parameter[constant[projects/{project}/transferConfigs/{transfer_config}/runs/{run}]]]] | keyword[def] identifier[project_run_path] ( identifier[cls] , identifier[project] , identifier[transfer_config] , identifier[run] ):
literal[string]
keyword[return] identifier[google] . identifier[api_core] . identifier[path_template] . identifier[expand] (
literal[string] ,
identifier[project] = identifier[project] ,
identifier[transfer_config] = identifier[transfer_config] ,
identifier[run] = identifier[run] ,
) | def project_run_path(cls, project, transfer_config, run):
"""Return a fully-qualified project_run string."""
return google.api_core.path_template.expand('projects/{project}/transferConfigs/{transfer_config}/runs/{run}', project=project, transfer_config=transfer_config, run=run) |
def checkAvailable(from_module):
"""Call me at checkConfig time to properly report config error
if neither txrequests or treq is installed
"""
if txrequests is None and treq is None:
config.error("neither txrequests nor treq is installed, but {} is requiring it\n\n{}".format(
from_module, HTTPClientService.TREQ_PROS_AND_CONS)) | def function[checkAvailable, parameter[from_module]]:
constant[Call me at checkConfig time to properly report config error
if neither txrequests or treq is installed
]
if <ast.BoolOp object at 0x7da1b21e3f40> begin[:]
call[name[config].error, parameter[call[constant[neither txrequests nor treq is installed, but {} is requiring it
{}].format, parameter[name[from_module], name[HTTPClientService].TREQ_PROS_AND_CONS]]]] | keyword[def] identifier[checkAvailable] ( identifier[from_module] ):
literal[string]
keyword[if] identifier[txrequests] keyword[is] keyword[None] keyword[and] identifier[treq] keyword[is] keyword[None] :
identifier[config] . identifier[error] ( literal[string] . identifier[format] (
identifier[from_module] , identifier[HTTPClientService] . identifier[TREQ_PROS_AND_CONS] )) | def checkAvailable(from_module):
"""Call me at checkConfig time to properly report config error
if neither txrequests or treq is installed
"""
if txrequests is None and treq is None:
config.error('neither txrequests nor treq is installed, but {} is requiring it\n\n{}'.format(from_module, HTTPClientService.TREQ_PROS_AND_CONS)) # depends on [control=['if'], data=[]] |
def serialize(self, pid, record, links_factory=None):
"""Serialize a single record and persistent identifier.
:param pid: The :class:`invenio_pidstore.models.PersistentIdentifier`
instance.
:param record: The :class:`invenio_records.api.Record` instance.
:param links_factory: Factory function for the link generation,
which are added to the response.
:returns: The object serialized.
"""
return dumps(self.transform_record(pid, record, links_factory),
**self.dumps_kwargs) | def function[serialize, parameter[self, pid, record, links_factory]]:
constant[Serialize a single record and persistent identifier.
:param pid: The :class:`invenio_pidstore.models.PersistentIdentifier`
instance.
:param record: The :class:`invenio_records.api.Record` instance.
:param links_factory: Factory function for the link generation,
which are added to the response.
:returns: The object serialized.
]
return[call[name[dumps], parameter[call[name[self].transform_record, parameter[name[pid], name[record], name[links_factory]]]]]] | keyword[def] identifier[serialize] ( identifier[self] , identifier[pid] , identifier[record] , identifier[links_factory] = keyword[None] ):
literal[string]
keyword[return] identifier[dumps] ( identifier[self] . identifier[transform_record] ( identifier[pid] , identifier[record] , identifier[links_factory] ),
** identifier[self] . identifier[dumps_kwargs] ) | def serialize(self, pid, record, links_factory=None):
"""Serialize a single record and persistent identifier.
:param pid: The :class:`invenio_pidstore.models.PersistentIdentifier`
instance.
:param record: The :class:`invenio_records.api.Record` instance.
:param links_factory: Factory function for the link generation,
which are added to the response.
:returns: The object serialized.
"""
return dumps(self.transform_record(pid, record, links_factory), **self.dumps_kwargs) |
def get_module_name(self, path_args):
"""returns the module_name and remaining path args.
return -- tuple -- (module_name, path_args)"""
controller_prefix = self.controller_prefix
cset = self.module_names
module_name = controller_prefix
mod_name = module_name
while path_args:
mod_name += "." + path_args[0]
if mod_name in cset:
module_name = mod_name
path_args.pop(0)
else:
break
return module_name, path_args | def function[get_module_name, parameter[self, path_args]]:
constant[returns the module_name and remaining path args.
return -- tuple -- (module_name, path_args)]
variable[controller_prefix] assign[=] name[self].controller_prefix
variable[cset] assign[=] name[self].module_names
variable[module_name] assign[=] name[controller_prefix]
variable[mod_name] assign[=] name[module_name]
while name[path_args] begin[:]
<ast.AugAssign object at 0x7da1b0402320>
if compare[name[mod_name] in name[cset]] begin[:]
variable[module_name] assign[=] name[mod_name]
call[name[path_args].pop, parameter[constant[0]]]
return[tuple[[<ast.Name object at 0x7da1b04020e0>, <ast.Name object at 0x7da1b0403a00>]]] | keyword[def] identifier[get_module_name] ( identifier[self] , identifier[path_args] ):
literal[string]
identifier[controller_prefix] = identifier[self] . identifier[controller_prefix]
identifier[cset] = identifier[self] . identifier[module_names]
identifier[module_name] = identifier[controller_prefix]
identifier[mod_name] = identifier[module_name]
keyword[while] identifier[path_args] :
identifier[mod_name] += literal[string] + identifier[path_args] [ literal[int] ]
keyword[if] identifier[mod_name] keyword[in] identifier[cset] :
identifier[module_name] = identifier[mod_name]
identifier[path_args] . identifier[pop] ( literal[int] )
keyword[else] :
keyword[break]
keyword[return] identifier[module_name] , identifier[path_args] | def get_module_name(self, path_args):
"""returns the module_name and remaining path args.
return -- tuple -- (module_name, path_args)"""
controller_prefix = self.controller_prefix
cset = self.module_names
module_name = controller_prefix
mod_name = module_name
while path_args:
mod_name += '.' + path_args[0]
if mod_name in cset:
module_name = mod_name
path_args.pop(0) # depends on [control=['if'], data=['mod_name']]
else:
break # depends on [control=['while'], data=[]]
return (module_name, path_args) |
def lnlike(x, star):
"""Return the log likelihood given parameter vector `x`."""
ll = lnprior(x)
if np.isinf(ll):
return ll, (np.nan, np.nan)
per, t0, b = x
model = TransitModel('b', per=per, t0=t0, b=b, rhos=10.)(star.time)
like, d, vard = star.lnlike(model, full_output=True)
ll += like
return ll, (d,) | def function[lnlike, parameter[x, star]]:
constant[Return the log likelihood given parameter vector `x`.]
variable[ll] assign[=] call[name[lnprior], parameter[name[x]]]
if call[name[np].isinf, parameter[name[ll]]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b0e0fd90>, <ast.Tuple object at 0x7da1b0e0ecb0>]]]
<ast.Tuple object at 0x7da1b0e0fbe0> assign[=] name[x]
variable[model] assign[=] call[call[name[TransitModel], parameter[constant[b]]], parameter[name[star].time]]
<ast.Tuple object at 0x7da1b0e0f190> assign[=] call[name[star].lnlike, parameter[name[model]]]
<ast.AugAssign object at 0x7da1b0e0e800>
return[tuple[[<ast.Name object at 0x7da1b0e0c310>, <ast.Tuple object at 0x7da1b0e0ca00>]]] | keyword[def] identifier[lnlike] ( identifier[x] , identifier[star] ):
literal[string]
identifier[ll] = identifier[lnprior] ( identifier[x] )
keyword[if] identifier[np] . identifier[isinf] ( identifier[ll] ):
keyword[return] identifier[ll] ,( identifier[np] . identifier[nan] , identifier[np] . identifier[nan] )
identifier[per] , identifier[t0] , identifier[b] = identifier[x]
identifier[model] = identifier[TransitModel] ( literal[string] , identifier[per] = identifier[per] , identifier[t0] = identifier[t0] , identifier[b] = identifier[b] , identifier[rhos] = literal[int] )( identifier[star] . identifier[time] )
identifier[like] , identifier[d] , identifier[vard] = identifier[star] . identifier[lnlike] ( identifier[model] , identifier[full_output] = keyword[True] )
identifier[ll] += identifier[like]
keyword[return] identifier[ll] ,( identifier[d] ,) | def lnlike(x, star):
"""Return the log likelihood given parameter vector `x`."""
ll = lnprior(x)
if np.isinf(ll):
return (ll, (np.nan, np.nan)) # depends on [control=['if'], data=[]]
(per, t0, b) = x
model = TransitModel('b', per=per, t0=t0, b=b, rhos=10.0)(star.time)
(like, d, vard) = star.lnlike(model, full_output=True)
ll += like
return (ll, (d,)) |
def put(self, obj=None):
"""
Enqueue an object, waking the first thread waiting for a result, if one
exists.
:param obj:
Object to enqueue. Defaults to :data:`None` as a convenience when
using :class:`Latch` only for synchronization.
:raises mitogen.core.LatchError:
:meth:`close` has been called, and the object is no longer valid.
"""
_vv and IOLOG.debug('%r.put(%r)', self, obj)
self._lock.acquire()
try:
if self.closed:
raise LatchError()
self._queue.append(obj)
if self._waking < len(self._sleeping):
wsock, cookie = self._sleeping[self._waking]
self._waking += 1
_vv and IOLOG.debug('%r.put() -> waking wfd=%r',
self, wsock.fileno())
self._wake(wsock, cookie)
elif self.notify:
self.notify(self)
finally:
self._lock.release() | def function[put, parameter[self, obj]]:
constant[
Enqueue an object, waking the first thread waiting for a result, if one
exists.
:param obj:
Object to enqueue. Defaults to :data:`None` as a convenience when
using :class:`Latch` only for synchronization.
:raises mitogen.core.LatchError:
:meth:`close` has been called, and the object is no longer valid.
]
<ast.BoolOp object at 0x7da1b1d50580>
call[name[self]._lock.acquire, parameter[]]
<ast.Try object at 0x7da1b1d504c0> | keyword[def] identifier[put] ( identifier[self] , identifier[obj] = keyword[None] ):
literal[string]
identifier[_vv] keyword[and] identifier[IOLOG] . identifier[debug] ( literal[string] , identifier[self] , identifier[obj] )
identifier[self] . identifier[_lock] . identifier[acquire] ()
keyword[try] :
keyword[if] identifier[self] . identifier[closed] :
keyword[raise] identifier[LatchError] ()
identifier[self] . identifier[_queue] . identifier[append] ( identifier[obj] )
keyword[if] identifier[self] . identifier[_waking] < identifier[len] ( identifier[self] . identifier[_sleeping] ):
identifier[wsock] , identifier[cookie] = identifier[self] . identifier[_sleeping] [ identifier[self] . identifier[_waking] ]
identifier[self] . identifier[_waking] += literal[int]
identifier[_vv] keyword[and] identifier[IOLOG] . identifier[debug] ( literal[string] ,
identifier[self] , identifier[wsock] . identifier[fileno] ())
identifier[self] . identifier[_wake] ( identifier[wsock] , identifier[cookie] )
keyword[elif] identifier[self] . identifier[notify] :
identifier[self] . identifier[notify] ( identifier[self] )
keyword[finally] :
identifier[self] . identifier[_lock] . identifier[release] () | def put(self, obj=None):
"""
Enqueue an object, waking the first thread waiting for a result, if one
exists.
:param obj:
Object to enqueue. Defaults to :data:`None` as a convenience when
using :class:`Latch` only for synchronization.
:raises mitogen.core.LatchError:
:meth:`close` has been called, and the object is no longer valid.
"""
_vv and IOLOG.debug('%r.put(%r)', self, obj)
self._lock.acquire()
try:
if self.closed:
raise LatchError() # depends on [control=['if'], data=[]]
self._queue.append(obj)
if self._waking < len(self._sleeping):
(wsock, cookie) = self._sleeping[self._waking]
self._waking += 1
_vv and IOLOG.debug('%r.put() -> waking wfd=%r', self, wsock.fileno())
self._wake(wsock, cookie) # depends on [control=['if'], data=[]]
elif self.notify:
self.notify(self) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
finally:
self._lock.release() |
def then_the_return_element_have_no_key(context, line, key):
"""
:param context: behave.runner.Context
:param line: str
:param key: str
"""
line = int(line)
data = context.apiRequest.data
assert 0 <= line < len(data)
assert key not in data[line] | def function[then_the_return_element_have_no_key, parameter[context, line, key]]:
constant[
:param context: behave.runner.Context
:param line: str
:param key: str
]
variable[line] assign[=] call[name[int], parameter[name[line]]]
variable[data] assign[=] name[context].apiRequest.data
assert[compare[constant[0] less_or_equal[<=] name[line]]]
assert[compare[name[key] <ast.NotIn object at 0x7da2590d7190> call[name[data]][name[line]]]] | keyword[def] identifier[then_the_return_element_have_no_key] ( identifier[context] , identifier[line] , identifier[key] ):
literal[string]
identifier[line] = identifier[int] ( identifier[line] )
identifier[data] = identifier[context] . identifier[apiRequest] . identifier[data]
keyword[assert] literal[int] <= identifier[line] < identifier[len] ( identifier[data] )
keyword[assert] identifier[key] keyword[not] keyword[in] identifier[data] [ identifier[line] ] | def then_the_return_element_have_no_key(context, line, key):
"""
:param context: behave.runner.Context
:param line: str
:param key: str
"""
line = int(line)
data = context.apiRequest.data
assert 0 <= line < len(data)
assert key not in data[line] |
def get_new_broks(self):
"""Get new broks from our satellites
:return: None
"""
for satellites in [self.schedulers, self.pollers, self.reactionners, self.receivers]:
for satellite_link in list(satellites.values()):
logger.debug("Getting broks from %s", satellite_link)
_t0 = time.time()
try:
tmp_broks = satellite_link.get_broks(self.name)
except LinkError:
logger.warning("Daemon %s connection failed, I could not get the broks!",
satellite_link)
else:
if tmp_broks:
logger.debug("Got %d Broks from %s in %s",
len(tmp_broks), satellite_link.name, time.time() - _t0)
statsmgr.gauge('get-new-broks-count.%s'
% (satellite_link.name), len(tmp_broks))
statsmgr.timer('get-new-broks-time.%s'
% (satellite_link.name), time.time() - _t0)
for brok in tmp_broks:
brok.instance_id = satellite_link.instance_id
# Add the broks to our global list
self.external_broks.extend(tmp_broks) | def function[get_new_broks, parameter[self]]:
constant[Get new broks from our satellites
:return: None
]
for taget[name[satellites]] in starred[list[[<ast.Attribute object at 0x7da18dc04c70>, <ast.Attribute object at 0x7da18dc07ee0>, <ast.Attribute object at 0x7da18dc06200>, <ast.Attribute object at 0x7da18dc078b0>]]] begin[:]
for taget[name[satellite_link]] in starred[call[name[list], parameter[call[name[satellites].values, parameter[]]]]] begin[:]
call[name[logger].debug, parameter[constant[Getting broks from %s], name[satellite_link]]]
variable[_t0] assign[=] call[name[time].time, parameter[]]
<ast.Try object at 0x7da18dc06920> | keyword[def] identifier[get_new_broks] ( identifier[self] ):
literal[string]
keyword[for] identifier[satellites] keyword[in] [ identifier[self] . identifier[schedulers] , identifier[self] . identifier[pollers] , identifier[self] . identifier[reactionners] , identifier[self] . identifier[receivers] ]:
keyword[for] identifier[satellite_link] keyword[in] identifier[list] ( identifier[satellites] . identifier[values] ()):
identifier[logger] . identifier[debug] ( literal[string] , identifier[satellite_link] )
identifier[_t0] = identifier[time] . identifier[time] ()
keyword[try] :
identifier[tmp_broks] = identifier[satellite_link] . identifier[get_broks] ( identifier[self] . identifier[name] )
keyword[except] identifier[LinkError] :
identifier[logger] . identifier[warning] ( literal[string] ,
identifier[satellite_link] )
keyword[else] :
keyword[if] identifier[tmp_broks] :
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[len] ( identifier[tmp_broks] ), identifier[satellite_link] . identifier[name] , identifier[time] . identifier[time] ()- identifier[_t0] )
identifier[statsmgr] . identifier[gauge] ( literal[string]
%( identifier[satellite_link] . identifier[name] ), identifier[len] ( identifier[tmp_broks] ))
identifier[statsmgr] . identifier[timer] ( literal[string]
%( identifier[satellite_link] . identifier[name] ), identifier[time] . identifier[time] ()- identifier[_t0] )
keyword[for] identifier[brok] keyword[in] identifier[tmp_broks] :
identifier[brok] . identifier[instance_id] = identifier[satellite_link] . identifier[instance_id]
identifier[self] . identifier[external_broks] . identifier[extend] ( identifier[tmp_broks] ) | def get_new_broks(self):
"""Get new broks from our satellites
:return: None
"""
for satellites in [self.schedulers, self.pollers, self.reactionners, self.receivers]:
for satellite_link in list(satellites.values()):
logger.debug('Getting broks from %s', satellite_link)
_t0 = time.time()
try:
tmp_broks = satellite_link.get_broks(self.name) # depends on [control=['try'], data=[]]
except LinkError:
logger.warning('Daemon %s connection failed, I could not get the broks!', satellite_link) # depends on [control=['except'], data=[]]
else:
if tmp_broks:
logger.debug('Got %d Broks from %s in %s', len(tmp_broks), satellite_link.name, time.time() - _t0)
statsmgr.gauge('get-new-broks-count.%s' % satellite_link.name, len(tmp_broks))
statsmgr.timer('get-new-broks-time.%s' % satellite_link.name, time.time() - _t0)
for brok in tmp_broks:
brok.instance_id = satellite_link.instance_id # depends on [control=['for'], data=['brok']]
# Add the broks to our global list
self.external_broks.extend(tmp_broks) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['satellite_link']] # depends on [control=['for'], data=['satellites']] |
def render_from_path(path, context=None, globals=None):
"""
Renders a templated yaml document from file path.
:param path: A path to the yaml file to process.
:param context: A context to overlay on the yaml file. This will override any yaml values.
:param globals: A dictionary of globally-accessible objects within the rendered template.
:return: A dict with the final overlayed configuration.
"""
abs_source = os.path.abspath(os.path.expanduser(path))
yaml_resolver = resolver.TYamlResolver.new_from_path(abs_source)
return yaml_resolver.resolve(Context(context), globals)._data | def function[render_from_path, parameter[path, context, globals]]:
constant[
Renders a templated yaml document from file path.
:param path: A path to the yaml file to process.
:param context: A context to overlay on the yaml file. This will override any yaml values.
:param globals: A dictionary of globally-accessible objects within the rendered template.
:return: A dict with the final overlayed configuration.
]
variable[abs_source] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.expanduser, parameter[name[path]]]]]
variable[yaml_resolver] assign[=] call[name[resolver].TYamlResolver.new_from_path, parameter[name[abs_source]]]
return[call[name[yaml_resolver].resolve, parameter[call[name[Context], parameter[name[context]]], name[globals]]]._data] | keyword[def] identifier[render_from_path] ( identifier[path] , identifier[context] = keyword[None] , identifier[globals] = keyword[None] ):
literal[string]
identifier[abs_source] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[path] ))
identifier[yaml_resolver] = identifier[resolver] . identifier[TYamlResolver] . identifier[new_from_path] ( identifier[abs_source] )
keyword[return] identifier[yaml_resolver] . identifier[resolve] ( identifier[Context] ( identifier[context] ), identifier[globals] ). identifier[_data] | def render_from_path(path, context=None, globals=None):
"""
Renders a templated yaml document from file path.
:param path: A path to the yaml file to process.
:param context: A context to overlay on the yaml file. This will override any yaml values.
:param globals: A dictionary of globally-accessible objects within the rendered template.
:return: A dict with the final overlayed configuration.
"""
abs_source = os.path.abspath(os.path.expanduser(path))
yaml_resolver = resolver.TYamlResolver.new_from_path(abs_source)
return yaml_resolver.resolve(Context(context), globals)._data |
def get_output(script, expanded):
"""Runs the script and obtains stdin/stderr.
:type script: str
:type expanded: str
:rtype: str | None
"""
env = dict(os.environ)
env.update(settings.env)
is_slow = shlex.split(expanded) in settings.slow_commands
with logs.debug_time(u'Call: {}; with env: {}; is slow: '.format(
script, env, is_slow)):
result = Popen(expanded, shell=True, stdin=PIPE,
stdout=PIPE, stderr=STDOUT, env=env)
if _wait_output(result, is_slow):
output = result.stdout.read().decode('utf-8')
logs.debug(u'Received output: {}'.format(output))
return output
else:
logs.debug(u'Execution timed out!')
return None | def function[get_output, parameter[script, expanded]]:
constant[Runs the script and obtains stdin/stderr.
:type script: str
:type expanded: str
:rtype: str | None
]
variable[env] assign[=] call[name[dict], parameter[name[os].environ]]
call[name[env].update, parameter[name[settings].env]]
variable[is_slow] assign[=] compare[call[name[shlex].split, parameter[name[expanded]]] in name[settings].slow_commands]
with call[name[logs].debug_time, parameter[call[constant[Call: {}; with env: {}; is slow: ].format, parameter[name[script], name[env], name[is_slow]]]]] begin[:]
variable[result] assign[=] call[name[Popen], parameter[name[expanded]]]
if call[name[_wait_output], parameter[name[result], name[is_slow]]] begin[:]
variable[output] assign[=] call[call[name[result].stdout.read, parameter[]].decode, parameter[constant[utf-8]]]
call[name[logs].debug, parameter[call[constant[Received output: {}].format, parameter[name[output]]]]]
return[name[output]] | keyword[def] identifier[get_output] ( identifier[script] , identifier[expanded] ):
literal[string]
identifier[env] = identifier[dict] ( identifier[os] . identifier[environ] )
identifier[env] . identifier[update] ( identifier[settings] . identifier[env] )
identifier[is_slow] = identifier[shlex] . identifier[split] ( identifier[expanded] ) keyword[in] identifier[settings] . identifier[slow_commands]
keyword[with] identifier[logs] . identifier[debug_time] ( literal[string] . identifier[format] (
identifier[script] , identifier[env] , identifier[is_slow] )):
identifier[result] = identifier[Popen] ( identifier[expanded] , identifier[shell] = keyword[True] , identifier[stdin] = identifier[PIPE] ,
identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[STDOUT] , identifier[env] = identifier[env] )
keyword[if] identifier[_wait_output] ( identifier[result] , identifier[is_slow] ):
identifier[output] = identifier[result] . identifier[stdout] . identifier[read] (). identifier[decode] ( literal[string] )
identifier[logs] . identifier[debug] ( literal[string] . identifier[format] ( identifier[output] ))
keyword[return] identifier[output]
keyword[else] :
identifier[logs] . identifier[debug] ( literal[string] )
keyword[return] keyword[None] | def get_output(script, expanded):
"""Runs the script and obtains stdin/stderr.
:type script: str
:type expanded: str
:rtype: str | None
"""
env = dict(os.environ)
env.update(settings.env)
is_slow = shlex.split(expanded) in settings.slow_commands
with logs.debug_time(u'Call: {}; with env: {}; is slow: '.format(script, env, is_slow)):
result = Popen(expanded, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, env=env)
if _wait_output(result, is_slow):
output = result.stdout.read().decode('utf-8')
logs.debug(u'Received output: {}'.format(output))
return output # depends on [control=['if'], data=[]]
else:
logs.debug(u'Execution timed out!')
return None # depends on [control=['with'], data=[]] |
def export(self):
"""
This method deactivates the security context for the calling process and returns an
interprocess token which, when passed to :meth:`imprt` in another process, will re-activate
the context in the second process. Only a single instantiation of a given context may be
active at any one time; attempting to access this security context after calling
:meth:`export` will fail. This method can only be used on a valid context where
:attr:`is_transferable` is True.
:returns: a token which represents this security context
:rtype: bytes
"""
if not (self.flags & C.GSS_C_TRANS_FLAG):
raise GSSException("Context is not transferable.")
if not self._ctx:
raise GSSException("Can't export empty/invalid context.")
minor_status = ffi.new('OM_uint32[1]')
output_token_buffer = ffi.new('gss_buffer_desc[1]')
retval = C.gss_export_sec_context(
minor_status,
self._ctx,
output_token_buffer
)
try:
if GSS_ERROR(retval):
if minor_status[0] and self.mech_type:
raise _exception_for_status(retval, minor_status[0], self.mech_type)
else:
raise _exception_for_status(retval, minor_status[0])
exported_token = _buf_to_str(output_token_buffer[0])
# Set our context to a 'blank' context
self._ctx = ffi.new('gss_ctx_id_t[1]')
return exported_token
finally:
if output_token_buffer[0].length != 0:
C.gss_release_buffer(minor_status, output_token_buffer) | def function[export, parameter[self]]:
constant[
This method deactivates the security context for the calling process and returns an
interprocess token which, when passed to :meth:`imprt` in another process, will re-activate
the context in the second process. Only a single instantiation of a given context may be
active at any one time; attempting to access this security context after calling
:meth:`export` will fail. This method can only be used on a valid context where
:attr:`is_transferable` is True.
:returns: a token which represents this security context
:rtype: bytes
]
if <ast.UnaryOp object at 0x7da1b26ad300> begin[:]
<ast.Raise object at 0x7da18fe928f0>
if <ast.UnaryOp object at 0x7da18fe92470> begin[:]
<ast.Raise object at 0x7da1b023ea10>
variable[minor_status] assign[=] call[name[ffi].new, parameter[constant[OM_uint32[1]]]]
variable[output_token_buffer] assign[=] call[name[ffi].new, parameter[constant[gss_buffer_desc[1]]]]
variable[retval] assign[=] call[name[C].gss_export_sec_context, parameter[name[minor_status], name[self]._ctx, name[output_token_buffer]]]
<ast.Try object at 0x7da1b023d930> | keyword[def] identifier[export] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] ( identifier[self] . identifier[flags] & identifier[C] . identifier[GSS_C_TRANS_FLAG] ):
keyword[raise] identifier[GSSException] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[_ctx] :
keyword[raise] identifier[GSSException] ( literal[string] )
identifier[minor_status] = identifier[ffi] . identifier[new] ( literal[string] )
identifier[output_token_buffer] = identifier[ffi] . identifier[new] ( literal[string] )
identifier[retval] = identifier[C] . identifier[gss_export_sec_context] (
identifier[minor_status] ,
identifier[self] . identifier[_ctx] ,
identifier[output_token_buffer]
)
keyword[try] :
keyword[if] identifier[GSS_ERROR] ( identifier[retval] ):
keyword[if] identifier[minor_status] [ literal[int] ] keyword[and] identifier[self] . identifier[mech_type] :
keyword[raise] identifier[_exception_for_status] ( identifier[retval] , identifier[minor_status] [ literal[int] ], identifier[self] . identifier[mech_type] )
keyword[else] :
keyword[raise] identifier[_exception_for_status] ( identifier[retval] , identifier[minor_status] [ literal[int] ])
identifier[exported_token] = identifier[_buf_to_str] ( identifier[output_token_buffer] [ literal[int] ])
identifier[self] . identifier[_ctx] = identifier[ffi] . identifier[new] ( literal[string] )
keyword[return] identifier[exported_token]
keyword[finally] :
keyword[if] identifier[output_token_buffer] [ literal[int] ]. identifier[length] != literal[int] :
identifier[C] . identifier[gss_release_buffer] ( identifier[minor_status] , identifier[output_token_buffer] ) | def export(self):
"""
This method deactivates the security context for the calling process and returns an
interprocess token which, when passed to :meth:`imprt` in another process, will re-activate
the context in the second process. Only a single instantiation of a given context may be
active at any one time; attempting to access this security context after calling
:meth:`export` will fail. This method can only be used on a valid context where
:attr:`is_transferable` is True.
:returns: a token which represents this security context
:rtype: bytes
"""
if not self.flags & C.GSS_C_TRANS_FLAG:
raise GSSException('Context is not transferable.') # depends on [control=['if'], data=[]]
if not self._ctx:
raise GSSException("Can't export empty/invalid context.") # depends on [control=['if'], data=[]]
minor_status = ffi.new('OM_uint32[1]')
output_token_buffer = ffi.new('gss_buffer_desc[1]')
retval = C.gss_export_sec_context(minor_status, self._ctx, output_token_buffer)
try:
if GSS_ERROR(retval):
if minor_status[0] and self.mech_type:
raise _exception_for_status(retval, minor_status[0], self.mech_type) # depends on [control=['if'], data=[]]
else:
raise _exception_for_status(retval, minor_status[0]) # depends on [control=['if'], data=[]]
exported_token = _buf_to_str(output_token_buffer[0])
# Set our context to a 'blank' context
self._ctx = ffi.new('gss_ctx_id_t[1]')
return exported_token # depends on [control=['try'], data=[]]
finally:
if output_token_buffer[0].length != 0:
C.gss_release_buffer(minor_status, output_token_buffer) # depends on [control=['if'], data=[]] |
def add_training_sample(self, text=u'', lang=''):
""" Initial step for adding new sample to training data.
You need to call `save_training_samples()` afterwards.
:param text: Sample text to be added.
:param lang: Language label for the input text.
"""
self.trainer.add(text=text, lang=lang) | def function[add_training_sample, parameter[self, text, lang]]:
constant[ Initial step for adding new sample to training data.
You need to call `save_training_samples()` afterwards.
:param text: Sample text to be added.
:param lang: Language label for the input text.
]
call[name[self].trainer.add, parameter[]] | keyword[def] identifier[add_training_sample] ( identifier[self] , identifier[text] = literal[string] , identifier[lang] = literal[string] ):
literal[string]
identifier[self] . identifier[trainer] . identifier[add] ( identifier[text] = identifier[text] , identifier[lang] = identifier[lang] ) | def add_training_sample(self, text=u'', lang=''):
""" Initial step for adding new sample to training data.
You need to call `save_training_samples()` afterwards.
:param text: Sample text to be added.
:param lang: Language label for the input text.
"""
self.trainer.add(text=text, lang=lang) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.