code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def setOptions(self, options):
"""
Creates a config object from the options object.
"""
from toil.lib.humanize import human2bytes # This import is used to convert
# from human readable quantites to integers
def setOption(varName, parsingFn=None, checkFn=None, default=None):
# If options object has the option "varName" specified
# then set the "varName" attrib to this value in the config object
x = getattr(options, varName, None)
if x is None:
x = default
if x is not None:
if parsingFn is not None:
x = parsingFn(x)
if checkFn is not None:
try:
checkFn(x)
except AssertionError:
raise RuntimeError("The %s option has an invalid value: %s"
% (varName, x))
setattr(self, varName, x)
# Function to parse integer from string expressed in different formats
h2b = lambda x: human2bytes(str(x))
def parseJobStore(s):
name, rest = Toil.parseLocator(s)
if name == 'file':
# We need to resolve relative paths early, on the leader, because the worker process
# may have a different working directory than the leader, e.g. under Mesos.
return Toil.buildLocator(name, os.path.abspath(rest))
else:
return s
def parseStrList(s):
s = s.split(",")
s = [str(x) for x in s]
return s
def parseIntList(s):
s = s.split(",")
s = [int(x) for x in s]
return s
# Core options
setOption("jobStore", parsingFn=parseJobStore)
# TODO: LOG LEVEL STRING
setOption("workDir")
if self.workDir is not None:
self.workDir = os.path.abspath(self.workDir)
if not os.path.exists(self.workDir):
raise RuntimeError("The path provided to --workDir (%s) does not exist."
% self.workDir)
setOption("stats")
setOption("cleanWorkDir")
setOption("clean")
if self.stats:
if self.clean != "never" and self.clean is not None:
raise RuntimeError("Contradicting options passed: Clean flag is set to %s "
"despite the stats flag requiring "
"the jobStore to be intact at the end of the run. "
"Set clean to \'never\'" % self.clean)
self.clean = "never"
elif self.clean is None:
self.clean = "onSuccess"
setOption('clusterStats')
setOption("restart")
# Batch system options
setOption("batchSystem")
setBatchOptions(self, setOption)
setOption("disableAutoDeployment")
setOption("scale", float, fC(0.0))
setOption("mesosMasterAddress")
setOption("parasolCommand")
setOption("parasolMaxBatches", int, iC(1))
setOption("linkImports")
setOption("environment", parseSetEnv)
# Autoscaling options
setOption("provisioner")
setOption("nodeTypes", parseStrList)
setOption("nodeOptions")
setOption("minNodes", parseIntList)
setOption("maxNodes", parseIntList)
setOption("targetTime", int)
if self.targetTime <= 0:
raise RuntimeError('targetTime (%s) must be a positive integer!'
'' % self.targetTime)
setOption("betaInertia", float)
if not 0.0 <= self.betaInertia <= 0.9:
raise RuntimeError('betaInertia (%f) must be between 0.0 and 0.9!'
'' % self.betaInertia)
setOption("scaleInterval", float)
setOption("metrics")
setOption("preemptableCompensation", float)
if not 0.0 <= self.preemptableCompensation <= 1.0:
raise RuntimeError('preemptableCompensation (%f) must be between 0.0 and 1.0!'
'' % self.preemptableCompensation)
setOption("nodeStorage", int)
# Parameters to limit service jobs / detect deadlocks
setOption("maxServiceJobs", int)
setOption("maxPreemptableServiceJobs", int)
setOption("deadlockWait", int)
setOption("statePollingWait", int)
# Resource requirements
setOption("defaultMemory", h2b, iC(1))
setOption("defaultCores", float, fC(1.0))
setOption("defaultDisk", h2b, iC(1))
setOption("readGlobalFileMutableByDefault")
setOption("maxCores", int, iC(1))
setOption("maxMemory", h2b, iC(1))
setOption("maxDisk", h2b, iC(1))
setOption("defaultPreemptable")
# Retrying/rescuing jobs
setOption("retryCount", int, iC(1))
setOption("maxJobDuration", int, iC(1))
setOption("rescueJobsFrequency", int, iC(1))
# Misc
setOption("maxLocalJobs", int)
setOption("disableCaching")
setOption("disableChaining")
setOption("maxLogFileSize", h2b, iC(1))
setOption("writeLogs")
setOption("writeLogsGzip")
setOption("runCwlInternalJobsOnWorkers")
def checkSse(sseKey):
with open(sseKey) as f:
assert (len(f.readline().rstrip()) == 32)
setOption("sseKey", checkFn=checkSse)
setOption("cseKey", checkFn=checkSse)
setOption("servicePollingInterval", float, fC(0.0))
setOption("forceDockerAppliance")
# Debug options
setOption("debugWorker")
setOption("badWorker", float, fC(0.0, 1.0))
setOption("badWorkerFailInterval", float, fC(0.0)) | def function[setOptions, parameter[self, options]]:
constant[
Creates a config object from the options object.
]
from relative_module[toil.lib.humanize] import module[human2bytes]
def function[setOption, parameter[varName, parsingFn, checkFn, default]]:
variable[x] assign[=] call[name[getattr], parameter[name[options], name[varName], constant[None]]]
if compare[name[x] is constant[None]] begin[:]
variable[x] assign[=] name[default]
if compare[name[x] is_not constant[None]] begin[:]
if compare[name[parsingFn] is_not constant[None]] begin[:]
variable[x] assign[=] call[name[parsingFn], parameter[name[x]]]
if compare[name[checkFn] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b1eecc70>
call[name[setattr], parameter[name[self], name[varName], name[x]]]
variable[h2b] assign[=] <ast.Lambda object at 0x7da1b1eeed70>
def function[parseJobStore, parameter[s]]:
<ast.Tuple object at 0x7da1b1eeea10> assign[=] call[name[Toil].parseLocator, parameter[name[s]]]
if compare[name[name] equal[==] constant[file]] begin[:]
return[call[name[Toil].buildLocator, parameter[name[name], call[name[os].path.abspath, parameter[name[rest]]]]]]
def function[parseStrList, parameter[s]]:
variable[s] assign[=] call[name[s].split, parameter[constant[,]]]
variable[s] assign[=] <ast.ListComp object at 0x7da1b1eef9d0>
return[name[s]]
def function[parseIntList, parameter[s]]:
variable[s] assign[=] call[name[s].split, parameter[constant[,]]]
variable[s] assign[=] <ast.ListComp object at 0x7da1b1eefc10>
return[name[s]]
call[name[setOption], parameter[constant[jobStore]]]
call[name[setOption], parameter[constant[workDir]]]
if compare[name[self].workDir is_not constant[None]] begin[:]
name[self].workDir assign[=] call[name[os].path.abspath, parameter[name[self].workDir]]
if <ast.UnaryOp object at 0x7da1b2346080> begin[:]
<ast.Raise object at 0x7da1b1e5f610>
call[name[setOption], parameter[constant[stats]]]
call[name[setOption], parameter[constant[cleanWorkDir]]]
call[name[setOption], parameter[constant[clean]]]
if name[self].stats begin[:]
if <ast.BoolOp object at 0x7da1b1e5c5b0> begin[:]
<ast.Raise object at 0x7da1b1e5f040>
name[self].clean assign[=] constant[never]
call[name[setOption], parameter[constant[clusterStats]]]
call[name[setOption], parameter[constant[restart]]]
call[name[setOption], parameter[constant[batchSystem]]]
call[name[setBatchOptions], parameter[name[self], name[setOption]]]
call[name[setOption], parameter[constant[disableAutoDeployment]]]
call[name[setOption], parameter[constant[scale], name[float], call[name[fC], parameter[constant[0.0]]]]]
call[name[setOption], parameter[constant[mesosMasterAddress]]]
call[name[setOption], parameter[constant[parasolCommand]]]
call[name[setOption], parameter[constant[parasolMaxBatches], name[int], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[linkImports]]]
call[name[setOption], parameter[constant[environment], name[parseSetEnv]]]
call[name[setOption], parameter[constant[provisioner]]]
call[name[setOption], parameter[constant[nodeTypes], name[parseStrList]]]
call[name[setOption], parameter[constant[nodeOptions]]]
call[name[setOption], parameter[constant[minNodes], name[parseIntList]]]
call[name[setOption], parameter[constant[maxNodes], name[parseIntList]]]
call[name[setOption], parameter[constant[targetTime], name[int]]]
if compare[name[self].targetTime less_or_equal[<=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b1eeee30>
call[name[setOption], parameter[constant[betaInertia], name[float]]]
if <ast.UnaryOp object at 0x7da1b1eef970> begin[:]
<ast.Raise object at 0x7da1b1eec0d0>
call[name[setOption], parameter[constant[scaleInterval], name[float]]]
call[name[setOption], parameter[constant[metrics]]]
call[name[setOption], parameter[constant[preemptableCompensation], name[float]]]
if <ast.UnaryOp object at 0x7da20c796020> begin[:]
<ast.Raise object at 0x7da20c794850>
call[name[setOption], parameter[constant[nodeStorage], name[int]]]
call[name[setOption], parameter[constant[maxServiceJobs], name[int]]]
call[name[setOption], parameter[constant[maxPreemptableServiceJobs], name[int]]]
call[name[setOption], parameter[constant[deadlockWait], name[int]]]
call[name[setOption], parameter[constant[statePollingWait], name[int]]]
call[name[setOption], parameter[constant[defaultMemory], name[h2b], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[defaultCores], name[float], call[name[fC], parameter[constant[1.0]]]]]
call[name[setOption], parameter[constant[defaultDisk], name[h2b], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[readGlobalFileMutableByDefault]]]
call[name[setOption], parameter[constant[maxCores], name[int], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[maxMemory], name[h2b], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[maxDisk], name[h2b], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[defaultPreemptable]]]
call[name[setOption], parameter[constant[retryCount], name[int], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[maxJobDuration], name[int], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[rescueJobsFrequency], name[int], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[maxLocalJobs], name[int]]]
call[name[setOption], parameter[constant[disableCaching]]]
call[name[setOption], parameter[constant[disableChaining]]]
call[name[setOption], parameter[constant[maxLogFileSize], name[h2b], call[name[iC], parameter[constant[1]]]]]
call[name[setOption], parameter[constant[writeLogs]]]
call[name[setOption], parameter[constant[writeLogsGzip]]]
call[name[setOption], parameter[constant[runCwlInternalJobsOnWorkers]]]
def function[checkSse, parameter[sseKey]]:
with call[name[open], parameter[name[sseKey]]] begin[:]
assert[compare[call[name[len], parameter[call[call[name[f].readline, parameter[]].rstrip, parameter[]]]] equal[==] constant[32]]]
call[name[setOption], parameter[constant[sseKey]]]
call[name[setOption], parameter[constant[cseKey]]]
call[name[setOption], parameter[constant[servicePollingInterval], name[float], call[name[fC], parameter[constant[0.0]]]]]
call[name[setOption], parameter[constant[forceDockerAppliance]]]
call[name[setOption], parameter[constant[debugWorker]]]
call[name[setOption], parameter[constant[badWorker], name[float], call[name[fC], parameter[constant[0.0], constant[1.0]]]]]
call[name[setOption], parameter[constant[badWorkerFailInterval], name[float], call[name[fC], parameter[constant[0.0]]]]] | keyword[def] identifier[setOptions] ( identifier[self] , identifier[options] ):
literal[string]
keyword[from] identifier[toil] . identifier[lib] . identifier[humanize] keyword[import] identifier[human2bytes]
keyword[def] identifier[setOption] ( identifier[varName] , identifier[parsingFn] = keyword[None] , identifier[checkFn] = keyword[None] , identifier[default] = keyword[None] ):
identifier[x] = identifier[getattr] ( identifier[options] , identifier[varName] , keyword[None] )
keyword[if] identifier[x] keyword[is] keyword[None] :
identifier[x] = identifier[default]
keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[parsingFn] keyword[is] keyword[not] keyword[None] :
identifier[x] = identifier[parsingFn] ( identifier[x] )
keyword[if] identifier[checkFn] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[checkFn] ( identifier[x] )
keyword[except] identifier[AssertionError] :
keyword[raise] identifier[RuntimeError] ( literal[string]
%( identifier[varName] , identifier[x] ))
identifier[setattr] ( identifier[self] , identifier[varName] , identifier[x] )
identifier[h2b] = keyword[lambda] identifier[x] : identifier[human2bytes] ( identifier[str] ( identifier[x] ))
keyword[def] identifier[parseJobStore] ( identifier[s] ):
identifier[name] , identifier[rest] = identifier[Toil] . identifier[parseLocator] ( identifier[s] )
keyword[if] identifier[name] == literal[string] :
keyword[return] identifier[Toil] . identifier[buildLocator] ( identifier[name] , identifier[os] . identifier[path] . identifier[abspath] ( identifier[rest] ))
keyword[else] :
keyword[return] identifier[s]
keyword[def] identifier[parseStrList] ( identifier[s] ):
identifier[s] = identifier[s] . identifier[split] ( literal[string] )
identifier[s] =[ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[s] ]
keyword[return] identifier[s]
keyword[def] identifier[parseIntList] ( identifier[s] ):
identifier[s] = identifier[s] . identifier[split] ( literal[string] )
identifier[s] =[ identifier[int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[s] ]
keyword[return] identifier[s]
identifier[setOption] ( literal[string] , identifier[parsingFn] = identifier[parseJobStore] )
identifier[setOption] ( literal[string] )
keyword[if] identifier[self] . identifier[workDir] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[workDir] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[self] . identifier[workDir] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[workDir] ):
keyword[raise] identifier[RuntimeError] ( literal[string]
% identifier[self] . identifier[workDir] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] )
keyword[if] identifier[self] . identifier[stats] :
keyword[if] identifier[self] . identifier[clean] != literal[string] keyword[and] identifier[self] . identifier[clean] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
literal[string]
literal[string] % identifier[self] . identifier[clean] )
identifier[self] . identifier[clean] = literal[string]
keyword[elif] identifier[self] . identifier[clean] keyword[is] keyword[None] :
identifier[self] . identifier[clean] = literal[string]
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] )
identifier[setBatchOptions] ( identifier[self] , identifier[setOption] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[float] , identifier[fC] ( literal[int] ))
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[int] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[parseSetEnv] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[parseStrList] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[parseIntList] )
identifier[setOption] ( literal[string] , identifier[parseIntList] )
identifier[setOption] ( literal[string] , identifier[int] )
keyword[if] identifier[self] . identifier[targetTime] <= literal[int] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string] % identifier[self] . identifier[targetTime] )
identifier[setOption] ( literal[string] , identifier[float] )
keyword[if] keyword[not] literal[int] <= identifier[self] . identifier[betaInertia] <= literal[int] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string] % identifier[self] . identifier[betaInertia] )
identifier[setOption] ( literal[string] , identifier[float] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[float] )
keyword[if] keyword[not] literal[int] <= identifier[self] . identifier[preemptableCompensation] <= literal[int] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string] % identifier[self] . identifier[preemptableCompensation] )
identifier[setOption] ( literal[string] , identifier[int] )
identifier[setOption] ( literal[string] , identifier[int] )
identifier[setOption] ( literal[string] , identifier[int] )
identifier[setOption] ( literal[string] , identifier[int] )
identifier[setOption] ( literal[string] , identifier[int] )
identifier[setOption] ( literal[string] , identifier[h2b] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] , identifier[float] , identifier[fC] ( literal[int] ))
identifier[setOption] ( literal[string] , identifier[h2b] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[int] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] , identifier[h2b] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] , identifier[h2b] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[int] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] , identifier[int] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] , identifier[int] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] , identifier[int] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[h2b] , identifier[iC] ( literal[int] ))
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] )
keyword[def] identifier[checkSse] ( identifier[sseKey] ):
keyword[with] identifier[open] ( identifier[sseKey] ) keyword[as] identifier[f] :
keyword[assert] ( identifier[len] ( identifier[f] . identifier[readline] (). identifier[rstrip] ())== literal[int] )
identifier[setOption] ( literal[string] , identifier[checkFn] = identifier[checkSse] )
identifier[setOption] ( literal[string] , identifier[checkFn] = identifier[checkSse] )
identifier[setOption] ( literal[string] , identifier[float] , identifier[fC] ( literal[int] ))
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] )
identifier[setOption] ( literal[string] , identifier[float] , identifier[fC] ( literal[int] , literal[int] ))
identifier[setOption] ( literal[string] , identifier[float] , identifier[fC] ( literal[int] )) | def setOptions(self, options):
"""
Creates a config object from the options object.
"""
from toil.lib.humanize import human2bytes # This import is used to convert
# from human readable quantites to integers
def setOption(varName, parsingFn=None, checkFn=None, default=None):
# If options object has the option "varName" specified
# then set the "varName" attrib to this value in the config object
x = getattr(options, varName, None)
if x is None:
x = default # depends on [control=['if'], data=['x']]
if x is not None:
if parsingFn is not None:
x = parsingFn(x) # depends on [control=['if'], data=['parsingFn']]
if checkFn is not None:
try:
checkFn(x) # depends on [control=['try'], data=[]]
except AssertionError:
raise RuntimeError('The %s option has an invalid value: %s' % (varName, x)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['checkFn']]
setattr(self, varName, x) # depends on [control=['if'], data=['x']]
# Function to parse integer from string expressed in different formats
h2b = lambda x: human2bytes(str(x))
def parseJobStore(s):
(name, rest) = Toil.parseLocator(s)
if name == 'file':
# We need to resolve relative paths early, on the leader, because the worker process
# may have a different working directory than the leader, e.g. under Mesos.
return Toil.buildLocator(name, os.path.abspath(rest)) # depends on [control=['if'], data=['name']]
else:
return s
def parseStrList(s):
s = s.split(',')
s = [str(x) for x in s]
return s
def parseIntList(s):
s = s.split(',')
s = [int(x) for x in s]
return s
# Core options
setOption('jobStore', parsingFn=parseJobStore)
# TODO: LOG LEVEL STRING
setOption('workDir')
if self.workDir is not None:
self.workDir = os.path.abspath(self.workDir)
if not os.path.exists(self.workDir):
raise RuntimeError('The path provided to --workDir (%s) does not exist.' % self.workDir) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
setOption('stats')
setOption('cleanWorkDir')
setOption('clean')
if self.stats:
if self.clean != 'never' and self.clean is not None:
raise RuntimeError("Contradicting options passed: Clean flag is set to %s despite the stats flag requiring the jobStore to be intact at the end of the run. Set clean to 'never'" % self.clean) # depends on [control=['if'], data=[]]
self.clean = 'never' # depends on [control=['if'], data=[]]
elif self.clean is None:
self.clean = 'onSuccess' # depends on [control=['if'], data=[]]
setOption('clusterStats')
setOption('restart')
# Batch system options
setOption('batchSystem')
setBatchOptions(self, setOption)
setOption('disableAutoDeployment')
setOption('scale', float, fC(0.0))
setOption('mesosMasterAddress')
setOption('parasolCommand')
setOption('parasolMaxBatches', int, iC(1))
setOption('linkImports')
setOption('environment', parseSetEnv)
# Autoscaling options
setOption('provisioner')
setOption('nodeTypes', parseStrList)
setOption('nodeOptions')
setOption('minNodes', parseIntList)
setOption('maxNodes', parseIntList)
setOption('targetTime', int)
if self.targetTime <= 0:
raise RuntimeError('targetTime (%s) must be a positive integer!' % self.targetTime) # depends on [control=['if'], data=[]]
setOption('betaInertia', float)
if not 0.0 <= self.betaInertia <= 0.9:
raise RuntimeError('betaInertia (%f) must be between 0.0 and 0.9!' % self.betaInertia) # depends on [control=['if'], data=[]]
setOption('scaleInterval', float)
setOption('metrics')
setOption('preemptableCompensation', float)
if not 0.0 <= self.preemptableCompensation <= 1.0:
raise RuntimeError('preemptableCompensation (%f) must be between 0.0 and 1.0!' % self.preemptableCompensation) # depends on [control=['if'], data=[]]
setOption('nodeStorage', int)
# Parameters to limit service jobs / detect deadlocks
setOption('maxServiceJobs', int)
setOption('maxPreemptableServiceJobs', int)
setOption('deadlockWait', int)
setOption('statePollingWait', int)
# Resource requirements
setOption('defaultMemory', h2b, iC(1))
setOption('defaultCores', float, fC(1.0))
setOption('defaultDisk', h2b, iC(1))
setOption('readGlobalFileMutableByDefault')
setOption('maxCores', int, iC(1))
setOption('maxMemory', h2b, iC(1))
setOption('maxDisk', h2b, iC(1))
setOption('defaultPreemptable')
# Retrying/rescuing jobs
setOption('retryCount', int, iC(1))
setOption('maxJobDuration', int, iC(1))
setOption('rescueJobsFrequency', int, iC(1))
# Misc
setOption('maxLocalJobs', int)
setOption('disableCaching')
setOption('disableChaining')
setOption('maxLogFileSize', h2b, iC(1))
setOption('writeLogs')
setOption('writeLogsGzip')
setOption('runCwlInternalJobsOnWorkers')
def checkSse(sseKey):
with open(sseKey) as f:
assert len(f.readline().rstrip()) == 32 # depends on [control=['with'], data=['f']]
setOption('sseKey', checkFn=checkSse)
setOption('cseKey', checkFn=checkSse)
setOption('servicePollingInterval', float, fC(0.0))
setOption('forceDockerAppliance')
# Debug options
setOption('debugWorker')
setOption('badWorker', float, fC(0.0, 1.0))
setOption('badWorkerFailInterval', float, fC(0.0)) |
def add_category_tag(self, tags, append_lists=True):
"""
Add this category to a category tag (group). This provides drop down
filters in the SMC UI by category tag.
:param list tags: category tag by name
:param bool append_lists: append to existing tags or overwrite
default: append)
:type tags: list(str)
:return: None
"""
tags = element_resolver(tags)
self.update(
category_parent_ref=tags,
append_lists=append_lists) | def function[add_category_tag, parameter[self, tags, append_lists]]:
constant[
Add this category to a category tag (group). This provides drop down
filters in the SMC UI by category tag.
:param list tags: category tag by name
:param bool append_lists: append to existing tags or overwrite
default: append)
:type tags: list(str)
:return: None
]
variable[tags] assign[=] call[name[element_resolver], parameter[name[tags]]]
call[name[self].update, parameter[]] | keyword[def] identifier[add_category_tag] ( identifier[self] , identifier[tags] , identifier[append_lists] = keyword[True] ):
literal[string]
identifier[tags] = identifier[element_resolver] ( identifier[tags] )
identifier[self] . identifier[update] (
identifier[category_parent_ref] = identifier[tags] ,
identifier[append_lists] = identifier[append_lists] ) | def add_category_tag(self, tags, append_lists=True):
"""
Add this category to a category tag (group). This provides drop down
filters in the SMC UI by category tag.
:param list tags: category tag by name
:param bool append_lists: append to existing tags or overwrite
default: append)
:type tags: list(str)
:return: None
"""
tags = element_resolver(tags)
self.update(category_parent_ref=tags, append_lists=append_lists) |
def decree(cls, path, concrete_start='', **kwargs):
"""
Constructor for Decree binary analysis.
:param str path: Path to binary to analyze
:param str concrete_start: Concrete stdin to use before symbolic input
:param kwargs: Forwarded to the Manticore constructor
:return: Manticore instance, initialized with a Decree State
:rtype: Manticore
"""
try:
return cls(_make_decree(path, concrete_start), **kwargs)
except KeyError: # FIXME(mark) magic parsing for DECREE should raise better error
raise Exception(f'Invalid binary: {path}') | def function[decree, parameter[cls, path, concrete_start]]:
constant[
Constructor for Decree binary analysis.
:param str path: Path to binary to analyze
:param str concrete_start: Concrete stdin to use before symbolic input
:param kwargs: Forwarded to the Manticore constructor
:return: Manticore instance, initialized with a Decree State
:rtype: Manticore
]
<ast.Try object at 0x7da204346500> | keyword[def] identifier[decree] ( identifier[cls] , identifier[path] , identifier[concrete_start] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
keyword[return] identifier[cls] ( identifier[_make_decree] ( identifier[path] , identifier[concrete_start] ),** identifier[kwargs] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[Exception] ( literal[string] ) | def decree(cls, path, concrete_start='', **kwargs):
"""
Constructor for Decree binary analysis.
:param str path: Path to binary to analyze
:param str concrete_start: Concrete stdin to use before symbolic input
:param kwargs: Forwarded to the Manticore constructor
:return: Manticore instance, initialized with a Decree State
:rtype: Manticore
"""
try:
return cls(_make_decree(path, concrete_start), **kwargs) # depends on [control=['try'], data=[]]
except KeyError: # FIXME(mark) magic parsing for DECREE should raise better error
raise Exception(f'Invalid binary: {path}') # depends on [control=['except'], data=[]] |
def apply_all_transactions(
self,
transactions: Tuple[BaseTransaction, ...],
base_header: BlockHeader
) -> Tuple[BlockHeader, Tuple[Receipt, ...], Tuple[BaseComputation, ...]]:
"""
Determine the results of applying all transactions to the base header.
This does *not* update the current block or header of the VM.
:param transactions: an iterable of all transactions to apply
:param base_header: the starting header to apply transactions to
:return: the final header, the receipts of each transaction, and the computations
"""
if base_header.block_number != self.header.block_number:
raise ValidationError(
"This VM instance must only work on block #{}, "
"but the target header has block #{}".format(
self.header.block_number,
base_header.block_number,
)
)
receipts = []
computations = []
previous_header = base_header
result_header = base_header
for transaction in transactions:
receipt, computation = self.apply_transaction(
previous_header,
transaction,
)
result_header = self.add_receipt_to_header(previous_header, receipt)
previous_header = result_header
receipts.append(receipt)
computations.append(computation)
receipts_tuple = tuple(receipts)
computations_tuple = tuple(computations)
return result_header, receipts_tuple, computations_tuple | def function[apply_all_transactions, parameter[self, transactions, base_header]]:
constant[
Determine the results of applying all transactions to the base header.
This does *not* update the current block or header of the VM.
:param transactions: an iterable of all transactions to apply
:param base_header: the starting header to apply transactions to
:return: the final header, the receipts of each transaction, and the computations
]
if compare[name[base_header].block_number not_equal[!=] name[self].header.block_number] begin[:]
<ast.Raise object at 0x7da1b175f4c0>
variable[receipts] assign[=] list[[]]
variable[computations] assign[=] list[[]]
variable[previous_header] assign[=] name[base_header]
variable[result_header] assign[=] name[base_header]
for taget[name[transaction]] in starred[name[transactions]] begin[:]
<ast.Tuple object at 0x7da1b18e5030> assign[=] call[name[self].apply_transaction, parameter[name[previous_header], name[transaction]]]
variable[result_header] assign[=] call[name[self].add_receipt_to_header, parameter[name[previous_header], name[receipt]]]
variable[previous_header] assign[=] name[result_header]
call[name[receipts].append, parameter[name[receipt]]]
call[name[computations].append, parameter[name[computation]]]
variable[receipts_tuple] assign[=] call[name[tuple], parameter[name[receipts]]]
variable[computations_tuple] assign[=] call[name[tuple], parameter[name[computations]]]
return[tuple[[<ast.Name object at 0x7da1b18e7ca0>, <ast.Name object at 0x7da1b18e6e60>, <ast.Name object at 0x7da1b18e5e10>]]] | keyword[def] identifier[apply_all_transactions] (
identifier[self] ,
identifier[transactions] : identifier[Tuple] [ identifier[BaseTransaction] ,...],
identifier[base_header] : identifier[BlockHeader]
)-> identifier[Tuple] [ identifier[BlockHeader] , identifier[Tuple] [ identifier[Receipt] ,...], identifier[Tuple] [ identifier[BaseComputation] ,...]]:
literal[string]
keyword[if] identifier[base_header] . identifier[block_number] != identifier[self] . identifier[header] . identifier[block_number] :
keyword[raise] identifier[ValidationError] (
literal[string]
literal[string] . identifier[format] (
identifier[self] . identifier[header] . identifier[block_number] ,
identifier[base_header] . identifier[block_number] ,
)
)
identifier[receipts] =[]
identifier[computations] =[]
identifier[previous_header] = identifier[base_header]
identifier[result_header] = identifier[base_header]
keyword[for] identifier[transaction] keyword[in] identifier[transactions] :
identifier[receipt] , identifier[computation] = identifier[self] . identifier[apply_transaction] (
identifier[previous_header] ,
identifier[transaction] ,
)
identifier[result_header] = identifier[self] . identifier[add_receipt_to_header] ( identifier[previous_header] , identifier[receipt] )
identifier[previous_header] = identifier[result_header]
identifier[receipts] . identifier[append] ( identifier[receipt] )
identifier[computations] . identifier[append] ( identifier[computation] )
identifier[receipts_tuple] = identifier[tuple] ( identifier[receipts] )
identifier[computations_tuple] = identifier[tuple] ( identifier[computations] )
keyword[return] identifier[result_header] , identifier[receipts_tuple] , identifier[computations_tuple] | def apply_all_transactions(self, transactions: Tuple[BaseTransaction, ...], base_header: BlockHeader) -> Tuple[BlockHeader, Tuple[Receipt, ...], Tuple[BaseComputation, ...]]:
"""
Determine the results of applying all transactions to the base header.
This does *not* update the current block or header of the VM.
:param transactions: an iterable of all transactions to apply
:param base_header: the starting header to apply transactions to
:return: the final header, the receipts of each transaction, and the computations
"""
if base_header.block_number != self.header.block_number:
raise ValidationError('This VM instance must only work on block #{}, but the target header has block #{}'.format(self.header.block_number, base_header.block_number)) # depends on [control=['if'], data=[]]
receipts = []
computations = []
previous_header = base_header
result_header = base_header
for transaction in transactions:
(receipt, computation) = self.apply_transaction(previous_header, transaction)
result_header = self.add_receipt_to_header(previous_header, receipt)
previous_header = result_header
receipts.append(receipt)
computations.append(computation) # depends on [control=['for'], data=['transaction']]
receipts_tuple = tuple(receipts)
computations_tuple = tuple(computations)
return (result_header, receipts_tuple, computations_tuple) |
def configure_discover(self, ns, definition):
"""
Register a discovery endpoint for a set of operations.
"""
page_schema = OffsetLimitPageSchema()
@self.add_route("/", Operation.Discover, ns)
def discover():
# accept pagination limit from request
page = OffsetLimitPage.from_query_string(page_schema)
page.offset = 0
response_data = dict(
_links=Links({
"self": Link.for_(Operation.Discover, ns, qs=page.to_items()),
"search": [
link for link in iter_links(self.find_matching_endpoints(ns), page)
],
}).to_dict()
)
return make_response(response_data) | def function[configure_discover, parameter[self, ns, definition]]:
constant[
Register a discovery endpoint for a set of operations.
]
variable[page_schema] assign[=] call[name[OffsetLimitPageSchema], parameter[]]
def function[discover, parameter[]]:
variable[page] assign[=] call[name[OffsetLimitPage].from_query_string, parameter[name[page_schema]]]
name[page].offset assign[=] constant[0]
variable[response_data] assign[=] call[name[dict], parameter[]]
return[call[name[make_response], parameter[name[response_data]]]] | keyword[def] identifier[configure_discover] ( identifier[self] , identifier[ns] , identifier[definition] ):
literal[string]
identifier[page_schema] = identifier[OffsetLimitPageSchema] ()
@ identifier[self] . identifier[add_route] ( literal[string] , identifier[Operation] . identifier[Discover] , identifier[ns] )
keyword[def] identifier[discover] ():
identifier[page] = identifier[OffsetLimitPage] . identifier[from_query_string] ( identifier[page_schema] )
identifier[page] . identifier[offset] = literal[int]
identifier[response_data] = identifier[dict] (
identifier[_links] = identifier[Links] ({
literal[string] : identifier[Link] . identifier[for_] ( identifier[Operation] . identifier[Discover] , identifier[ns] , identifier[qs] = identifier[page] . identifier[to_items] ()),
literal[string] :[
identifier[link] keyword[for] identifier[link] keyword[in] identifier[iter_links] ( identifier[self] . identifier[find_matching_endpoints] ( identifier[ns] ), identifier[page] )
],
}). identifier[to_dict] ()
)
keyword[return] identifier[make_response] ( identifier[response_data] ) | def configure_discover(self, ns, definition):
"""
Register a discovery endpoint for a set of operations.
"""
page_schema = OffsetLimitPageSchema()
@self.add_route('/', Operation.Discover, ns)
def discover():
# accept pagination limit from request
page = OffsetLimitPage.from_query_string(page_schema)
page.offset = 0
response_data = dict(_links=Links({'self': Link.for_(Operation.Discover, ns, qs=page.to_items()), 'search': [link for link in iter_links(self.find_matching_endpoints(ns), page)]}).to_dict())
return make_response(response_data) |
def output_sub_generic(gandi, data, output_keys, justify=10):
""" Generic helper to output info from a data dict."""
for key in output_keys:
if key in data:
output_sub_line(gandi, key, data[key], justify) | def function[output_sub_generic, parameter[gandi, data, output_keys, justify]]:
constant[ Generic helper to output info from a data dict.]
for taget[name[key]] in starred[name[output_keys]] begin[:]
if compare[name[key] in name[data]] begin[:]
call[name[output_sub_line], parameter[name[gandi], name[key], call[name[data]][name[key]], name[justify]]] | keyword[def] identifier[output_sub_generic] ( identifier[gandi] , identifier[data] , identifier[output_keys] , identifier[justify] = literal[int] ):
literal[string]
keyword[for] identifier[key] keyword[in] identifier[output_keys] :
keyword[if] identifier[key] keyword[in] identifier[data] :
identifier[output_sub_line] ( identifier[gandi] , identifier[key] , identifier[data] [ identifier[key] ], identifier[justify] ) | def output_sub_generic(gandi, data, output_keys, justify=10):
""" Generic helper to output info from a data dict."""
for key in output_keys:
if key in data:
output_sub_line(gandi, key, data[key], justify) # depends on [control=['if'], data=['key', 'data']] # depends on [control=['for'], data=['key']] |
def find_sparse_mode(self, core, additional, scaling, weights={}):
"""Find a sparse mode containing reactions of the core subset.
Return an iterator of the support of a sparse mode that contains as
many reactions from core as possible, and as few reactions from
additional as possible (approximately). A dictionary of weights can be
supplied which gives further penalties for including specific
additional reactions.
"""
if len(core) == 0:
return
self.lp7(core)
k = set()
for reaction_id in core:
flux = self.get_flux(reaction_id)
if self.is_flipped(reaction_id):
flux *= -1
if flux >= self._epsilon:
k.add(reaction_id)
if len(k) == 0:
return
self.lp10(k, additional, weights)
for reaction_id in self._model.reactions:
flux = self.get_flux(reaction_id)
if abs(flux) >= self._epsilon / scaling:
yield reaction_id | def function[find_sparse_mode, parameter[self, core, additional, scaling, weights]]:
constant[Find a sparse mode containing reactions of the core subset.
Return an iterator of the support of a sparse mode that contains as
many reactions from core as possible, and as few reactions from
additional as possible (approximately). A dictionary of weights can be
supplied which gives further penalties for including specific
additional reactions.
]
if compare[call[name[len], parameter[name[core]]] equal[==] constant[0]] begin[:]
return[None]
call[name[self].lp7, parameter[name[core]]]
variable[k] assign[=] call[name[set], parameter[]]
for taget[name[reaction_id]] in starred[name[core]] begin[:]
variable[flux] assign[=] call[name[self].get_flux, parameter[name[reaction_id]]]
if call[name[self].is_flipped, parameter[name[reaction_id]]] begin[:]
<ast.AugAssign object at 0x7da18f58e950>
if compare[name[flux] greater_or_equal[>=] name[self]._epsilon] begin[:]
call[name[k].add, parameter[name[reaction_id]]]
if compare[call[name[len], parameter[name[k]]] equal[==] constant[0]] begin[:]
return[None]
call[name[self].lp10, parameter[name[k], name[additional], name[weights]]]
for taget[name[reaction_id]] in starred[name[self]._model.reactions] begin[:]
variable[flux] assign[=] call[name[self].get_flux, parameter[name[reaction_id]]]
if compare[call[name[abs], parameter[name[flux]]] greater_or_equal[>=] binary_operation[name[self]._epsilon / name[scaling]]] begin[:]
<ast.Yield object at 0x7da18ede7820> | keyword[def] identifier[find_sparse_mode] ( identifier[self] , identifier[core] , identifier[additional] , identifier[scaling] , identifier[weights] ={}):
literal[string]
keyword[if] identifier[len] ( identifier[core] )== literal[int] :
keyword[return]
identifier[self] . identifier[lp7] ( identifier[core] )
identifier[k] = identifier[set] ()
keyword[for] identifier[reaction_id] keyword[in] identifier[core] :
identifier[flux] = identifier[self] . identifier[get_flux] ( identifier[reaction_id] )
keyword[if] identifier[self] . identifier[is_flipped] ( identifier[reaction_id] ):
identifier[flux] *=- literal[int]
keyword[if] identifier[flux] >= identifier[self] . identifier[_epsilon] :
identifier[k] . identifier[add] ( identifier[reaction_id] )
keyword[if] identifier[len] ( identifier[k] )== literal[int] :
keyword[return]
identifier[self] . identifier[lp10] ( identifier[k] , identifier[additional] , identifier[weights] )
keyword[for] identifier[reaction_id] keyword[in] identifier[self] . identifier[_model] . identifier[reactions] :
identifier[flux] = identifier[self] . identifier[get_flux] ( identifier[reaction_id] )
keyword[if] identifier[abs] ( identifier[flux] )>= identifier[self] . identifier[_epsilon] / identifier[scaling] :
keyword[yield] identifier[reaction_id] | def find_sparse_mode(self, core, additional, scaling, weights={}):
"""Find a sparse mode containing reactions of the core subset.
Return an iterator of the support of a sparse mode that contains as
many reactions from core as possible, and as few reactions from
additional as possible (approximately). A dictionary of weights can be
supplied which gives further penalties for including specific
additional reactions.
"""
if len(core) == 0:
return # depends on [control=['if'], data=[]]
self.lp7(core)
k = set()
for reaction_id in core:
flux = self.get_flux(reaction_id)
if self.is_flipped(reaction_id):
flux *= -1 # depends on [control=['if'], data=[]]
if flux >= self._epsilon:
k.add(reaction_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['reaction_id']]
if len(k) == 0:
return # depends on [control=['if'], data=[]]
self.lp10(k, additional, weights)
for reaction_id in self._model.reactions:
flux = self.get_flux(reaction_id)
if abs(flux) >= self._epsilon / scaling:
yield reaction_id # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['reaction_id']] |
def _update_indexes_for_mutated_object(collection, obj):
"""If an object is updated, this will simply remove
it and re-add it to the indexes defined on the
collection."""
for index in _db[collection].indexes.values():
_remove_from_index(index, obj)
_add_to_index(index, obj) | def function[_update_indexes_for_mutated_object, parameter[collection, obj]]:
constant[If an object is updated, this will simply remove
it and re-add it to the indexes defined on the
collection.]
for taget[name[index]] in starred[call[call[name[_db]][name[collection]].indexes.values, parameter[]]] begin[:]
call[name[_remove_from_index], parameter[name[index], name[obj]]]
call[name[_add_to_index], parameter[name[index], name[obj]]] | keyword[def] identifier[_update_indexes_for_mutated_object] ( identifier[collection] , identifier[obj] ):
literal[string]
keyword[for] identifier[index] keyword[in] identifier[_db] [ identifier[collection] ]. identifier[indexes] . identifier[values] ():
identifier[_remove_from_index] ( identifier[index] , identifier[obj] )
identifier[_add_to_index] ( identifier[index] , identifier[obj] ) | def _update_indexes_for_mutated_object(collection, obj):
"""If an object is updated, this will simply remove
it and re-add it to the indexes defined on the
collection."""
for index in _db[collection].indexes.values():
_remove_from_index(index, obj)
_add_to_index(index, obj) # depends on [control=['for'], data=['index']] |
def compose(*functions):
"""Define functions composition like f ∘ g ∘ h
:return: callable object that will perform
function composition of callables given in argument.
"""
def _compose2(f, g): # pylint: disable=invalid-name
return lambda x: f(g(x))
return functools.reduce(_compose2, functions, lambda x: x) | def function[compose, parameter[]]:
constant[Define functions composition like f ∘ g ∘ h
:return: callable object that will perform
function composition of callables given in argument.
]
def function[_compose2, parameter[f, g]]:
return[<ast.Lambda object at 0x7da20e9b1810>]
return[call[name[functools].reduce, parameter[name[_compose2], name[functions], <ast.Lambda object at 0x7da20e9b3b20>]]] | keyword[def] identifier[compose] (* identifier[functions] ):
literal[string]
keyword[def] identifier[_compose2] ( identifier[f] , identifier[g] ):
keyword[return] keyword[lambda] identifier[x] : identifier[f] ( identifier[g] ( identifier[x] ))
keyword[return] identifier[functools] . identifier[reduce] ( identifier[_compose2] , identifier[functions] , keyword[lambda] identifier[x] : identifier[x] ) | def compose(*functions):
"""Define functions composition like f ∘ g ∘ h
:return: callable object that will perform
function composition of callables given in argument.
"""
def _compose2(f, g): # pylint: disable=invalid-name
return lambda x: f(g(x))
return functools.reduce(_compose2, functions, lambda x: x) |
def entry_links(self):
""" Given a parsed feed, return the links to its entries, including ones
which disappeared (as a quick-and-dirty way to support deletions)
"""
return {entry['link'] for entry in self.feed.entries if entry and entry.get('link')} | def function[entry_links, parameter[self]]:
constant[ Given a parsed feed, return the links to its entries, including ones
which disappeared (as a quick-and-dirty way to support deletions)
]
return[<ast.SetComp object at 0x7da18f00fa60>] | keyword[def] identifier[entry_links] ( identifier[self] ):
literal[string]
keyword[return] { identifier[entry] [ literal[string] ] keyword[for] identifier[entry] keyword[in] identifier[self] . identifier[feed] . identifier[entries] keyword[if] identifier[entry] keyword[and] identifier[entry] . identifier[get] ( literal[string] )} | def entry_links(self):
""" Given a parsed feed, return the links to its entries, including ones
which disappeared (as a quick-and-dirty way to support deletions)
"""
return {entry['link'] for entry in self.feed.entries if entry and entry.get('link')} |
def exchange_pin(self, pin):
"""Exchange one-use pin for an access_token and request_token."""
params = {'client_id': self.client_id,
'client_secret': self.client_secret,
'grant_type': 'pin',
'pin': pin}
result = self._send_request(EXCHANGE_URL.format(self._base_url),
params=params, method='POST',
data_field=None)
self.access_token = result['access_token']
self.refresh_token = result['refresh_token']
return self.access_token, self.refresh_token | def function[exchange_pin, parameter[self, pin]]:
constant[Exchange one-use pin for an access_token and request_token.]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18f58dc00>, <ast.Constant object at 0x7da18f58ee90>, <ast.Constant object at 0x7da18f58f820>, <ast.Constant object at 0x7da18f58de40>], [<ast.Attribute object at 0x7da18f58f460>, <ast.Attribute object at 0x7da18f58e9b0>, <ast.Constant object at 0x7da18f58d9f0>, <ast.Name object at 0x7da18f58fc40>]]
variable[result] assign[=] call[name[self]._send_request, parameter[call[name[EXCHANGE_URL].format, parameter[name[self]._base_url]]]]
name[self].access_token assign[=] call[name[result]][constant[access_token]]
name[self].refresh_token assign[=] call[name[result]][constant[refresh_token]]
return[tuple[[<ast.Attribute object at 0x7da207f98760>, <ast.Attribute object at 0x7da207f9b7c0>]]] | keyword[def] identifier[exchange_pin] ( identifier[self] , identifier[pin] ):
literal[string]
identifier[params] ={ literal[string] : identifier[self] . identifier[client_id] ,
literal[string] : identifier[self] . identifier[client_secret] ,
literal[string] : literal[string] ,
literal[string] : identifier[pin] }
identifier[result] = identifier[self] . identifier[_send_request] ( identifier[EXCHANGE_URL] . identifier[format] ( identifier[self] . identifier[_base_url] ),
identifier[params] = identifier[params] , identifier[method] = literal[string] ,
identifier[data_field] = keyword[None] )
identifier[self] . identifier[access_token] = identifier[result] [ literal[string] ]
identifier[self] . identifier[refresh_token] = identifier[result] [ literal[string] ]
keyword[return] identifier[self] . identifier[access_token] , identifier[self] . identifier[refresh_token] | def exchange_pin(self, pin):
"""Exchange one-use pin for an access_token and request_token."""
params = {'client_id': self.client_id, 'client_secret': self.client_secret, 'grant_type': 'pin', 'pin': pin}
result = self._send_request(EXCHANGE_URL.format(self._base_url), params=params, method='POST', data_field=None)
self.access_token = result['access_token']
self.refresh_token = result['refresh_token']
return (self.access_token, self.refresh_token) |
def drop_retention_policy(self, name, database=None):
"""Drop an existing retention policy for a database.
:param name: the name of the retention policy to drop
:type name: str
:param database: the database for which the retention policy is
dropped. Defaults to current client's database
:type database: str
"""
query_string = (
"DROP RETENTION POLICY {0} ON {1}"
).format(quote_ident(name), quote_ident(database or self._database))
self.query(query_string, method="POST") | def function[drop_retention_policy, parameter[self, name, database]]:
constant[Drop an existing retention policy for a database.
:param name: the name of the retention policy to drop
:type name: str
:param database: the database for which the retention policy is
dropped. Defaults to current client's database
:type database: str
]
variable[query_string] assign[=] call[constant[DROP RETENTION POLICY {0} ON {1}].format, parameter[call[name[quote_ident], parameter[name[name]]], call[name[quote_ident], parameter[<ast.BoolOp object at 0x7da1b18be710>]]]]
call[name[self].query, parameter[name[query_string]]] | keyword[def] identifier[drop_retention_policy] ( identifier[self] , identifier[name] , identifier[database] = keyword[None] ):
literal[string]
identifier[query_string] =(
literal[string]
). identifier[format] ( identifier[quote_ident] ( identifier[name] ), identifier[quote_ident] ( identifier[database] keyword[or] identifier[self] . identifier[_database] ))
identifier[self] . identifier[query] ( identifier[query_string] , identifier[method] = literal[string] ) | def drop_retention_policy(self, name, database=None):
"""Drop an existing retention policy for a database.
:param name: the name of the retention policy to drop
:type name: str
:param database: the database for which the retention policy is
dropped. Defaults to current client's database
:type database: str
"""
query_string = 'DROP RETENTION POLICY {0} ON {1}'.format(quote_ident(name), quote_ident(database or self._database))
self.query(query_string, method='POST') |
def get_expr_summ_id(self, experiment_id, time_slide_id, veto_def_name, datatype, sim_proc_id = None):
"""
Return the expr_summ_id for the row in the table whose experiment_id,
time_slide_id, veto_def_name, and datatype match the given. If sim_proc_id,
will retrieve the injection run matching that sim_proc_id.
If a matching row is not found, returns None.
"""
# look for the ID
for row in self:
if (row.experiment_id, row.time_slide_id, row.veto_def_name, row.datatype, row.sim_proc_id) == (experiment_id, time_slide_id, veto_def_name, datatype, sim_proc_id):
# found it
return row.experiment_summ_id
# if get to here, experiment not found in table
return None | def function[get_expr_summ_id, parameter[self, experiment_id, time_slide_id, veto_def_name, datatype, sim_proc_id]]:
constant[
Return the expr_summ_id for the row in the table whose experiment_id,
time_slide_id, veto_def_name, and datatype match the given. If sim_proc_id,
will retrieve the injection run matching that sim_proc_id.
If a matching row is not found, returns None.
]
for taget[name[row]] in starred[name[self]] begin[:]
if compare[tuple[[<ast.Attribute object at 0x7da1b0b80dc0>, <ast.Attribute object at 0x7da1b0b81a80>, <ast.Attribute object at 0x7da1b0b807c0>, <ast.Attribute object at 0x7da1b0b71cc0>, <ast.Attribute object at 0x7da1b0b701c0>]] equal[==] tuple[[<ast.Name object at 0x7da1b0b72dd0>, <ast.Name object at 0x7da1b0b724a0>, <ast.Name object at 0x7da1b0b73af0>, <ast.Name object at 0x7da1b0b71ab0>, <ast.Name object at 0x7da1b0b72e00>]]] begin[:]
return[name[row].experiment_summ_id]
return[constant[None]] | keyword[def] identifier[get_expr_summ_id] ( identifier[self] , identifier[experiment_id] , identifier[time_slide_id] , identifier[veto_def_name] , identifier[datatype] , identifier[sim_proc_id] = keyword[None] ):
literal[string]
keyword[for] identifier[row] keyword[in] identifier[self] :
keyword[if] ( identifier[row] . identifier[experiment_id] , identifier[row] . identifier[time_slide_id] , identifier[row] . identifier[veto_def_name] , identifier[row] . identifier[datatype] , identifier[row] . identifier[sim_proc_id] )==( identifier[experiment_id] , identifier[time_slide_id] , identifier[veto_def_name] , identifier[datatype] , identifier[sim_proc_id] ):
keyword[return] identifier[row] . identifier[experiment_summ_id]
keyword[return] keyword[None] | def get_expr_summ_id(self, experiment_id, time_slide_id, veto_def_name, datatype, sim_proc_id=None):
"""
Return the expr_summ_id for the row in the table whose experiment_id,
time_slide_id, veto_def_name, and datatype match the given. If sim_proc_id,
will retrieve the injection run matching that sim_proc_id.
If a matching row is not found, returns None.
""" # look for the ID
for row in self:
if (row.experiment_id, row.time_slide_id, row.veto_def_name, row.datatype, row.sim_proc_id) == (experiment_id, time_slide_id, veto_def_name, datatype, sim_proc_id): # found it
return row.experiment_summ_id # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] # if get to here, experiment not found in table
return None |
def response(schema):
"""
Decorate a function with a response schema.
"""
def wrapper(func):
setattr(func, RESPONSE, schema)
return func
return wrapper | def function[response, parameter[schema]]:
constant[
Decorate a function with a response schema.
]
def function[wrapper, parameter[func]]:
call[name[setattr], parameter[name[func], name[RESPONSE], name[schema]]]
return[name[func]]
return[name[wrapper]] | keyword[def] identifier[response] ( identifier[schema] ):
literal[string]
keyword[def] identifier[wrapper] ( identifier[func] ):
identifier[setattr] ( identifier[func] , identifier[RESPONSE] , identifier[schema] )
keyword[return] identifier[func]
keyword[return] identifier[wrapper] | def response(schema):
"""
Decorate a function with a response schema.
"""
def wrapper(func):
setattr(func, RESPONSE, schema)
return func
return wrapper |
def params(self):
"""return information about the params that the given http option takes"""
ret = {}
for rd in self.decorators:
args = rd.args
kwargs = rd.kwargs
if param in rd:
is_required = kwargs.get('required', 'default' not in kwargs)
ret[args[0]] = {'required': is_required, 'other_names': args[1:], 'options': kwargs}
return ret | def function[params, parameter[self]]:
constant[return information about the params that the given http option takes]
variable[ret] assign[=] dictionary[[], []]
for taget[name[rd]] in starred[name[self].decorators] begin[:]
variable[args] assign[=] name[rd].args
variable[kwargs] assign[=] name[rd].kwargs
if compare[name[param] in name[rd]] begin[:]
variable[is_required] assign[=] call[name[kwargs].get, parameter[constant[required], compare[constant[default] <ast.NotIn object at 0x7da2590d7190> name[kwargs]]]]
call[name[ret]][call[name[args]][constant[0]]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0471360>, <ast.Constant object at 0x7da1b0470c40>, <ast.Constant object at 0x7da1b0473040>], [<ast.Name object at 0x7da1b0473490>, <ast.Subscript object at 0x7da1b0470130>, <ast.Name object at 0x7da1b0470a30>]]
return[name[ret]] | keyword[def] identifier[params] ( identifier[self] ):
literal[string]
identifier[ret] ={}
keyword[for] identifier[rd] keyword[in] identifier[self] . identifier[decorators] :
identifier[args] = identifier[rd] . identifier[args]
identifier[kwargs] = identifier[rd] . identifier[kwargs]
keyword[if] identifier[param] keyword[in] identifier[rd] :
identifier[is_required] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] keyword[not] keyword[in] identifier[kwargs] )
identifier[ret] [ identifier[args] [ literal[int] ]]={ literal[string] : identifier[is_required] , literal[string] : identifier[args] [ literal[int] :], literal[string] : identifier[kwargs] }
keyword[return] identifier[ret] | def params(self):
"""return information about the params that the given http option takes"""
ret = {}
for rd in self.decorators:
args = rd.args
kwargs = rd.kwargs
if param in rd:
is_required = kwargs.get('required', 'default' not in kwargs)
ret[args[0]] = {'required': is_required, 'other_names': args[1:], 'options': kwargs} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rd']]
return ret |
def replace_random_tokens(self,
n_samples, # type: int
replacement='', # type: str
random_state=None,
min_replace=1, # type: Union[int, float]
max_replace=1.0, # type: Union[int, float]
group_size=1 # type: int
):
# type: (...) -> List[Tuple[str, int, np.ndarray]]
"""
Return a list of ``(text, replaced_count, mask)``
tuples with n_samples versions of text with some words replaced.
By default words are replaced with '', i.e. removed.
"""
n_tokens = len(self.tokens)
indices = np.arange(n_tokens)
if not n_tokens:
nomask = np.array([], dtype=int)
return [('', 0, nomask)] * n_samples
min_replace, max_replace = self._get_min_max(min_replace, max_replace,
n_tokens)
rng = check_random_state(random_state)
replace_sizes = rng.randint(low=min_replace, high=max_replace + 1,
size=n_samples)
res = []
for num_to_replace in replace_sizes:
idx_to_replace = rng.choice(indices, num_to_replace, replace=False)
idx_to_replace = np.array([idx_to_replace] + [
idx_to_replace + shift for shift in range(1, group_size)
]).ravel()
padded_size = n_tokens + group_size - 1
mask = indices_to_bool_mask(idx_to_replace, padded_size)[:n_tokens]
s = self.split.masked(mask, replacement)
res.append((s.text, num_to_replace, mask))
return res | def function[replace_random_tokens, parameter[self, n_samples, replacement, random_state, min_replace, max_replace, group_size]]:
constant[
Return a list of ``(text, replaced_count, mask)``
tuples with n_samples versions of text with some words replaced.
By default words are replaced with '', i.e. removed.
]
variable[n_tokens] assign[=] call[name[len], parameter[name[self].tokens]]
variable[indices] assign[=] call[name[np].arange, parameter[name[n_tokens]]]
if <ast.UnaryOp object at 0x7da1b1f482e0> begin[:]
variable[nomask] assign[=] call[name[np].array, parameter[list[[]]]]
return[binary_operation[list[[<ast.Tuple object at 0x7da1b1f48100>]] * name[n_samples]]]
<ast.Tuple object at 0x7da1b1f48a60> assign[=] call[name[self]._get_min_max, parameter[name[min_replace], name[max_replace], name[n_tokens]]]
variable[rng] assign[=] call[name[check_random_state], parameter[name[random_state]]]
variable[replace_sizes] assign[=] call[name[rng].randint, parameter[]]
variable[res] assign[=] list[[]]
for taget[name[num_to_replace]] in starred[name[replace_sizes]] begin[:]
variable[idx_to_replace] assign[=] call[name[rng].choice, parameter[name[indices], name[num_to_replace]]]
variable[idx_to_replace] assign[=] call[call[name[np].array, parameter[binary_operation[list[[<ast.Name object at 0x7da1b1f4a4a0>]] + <ast.ListComp object at 0x7da1b1f49000>]]].ravel, parameter[]]
variable[padded_size] assign[=] binary_operation[binary_operation[name[n_tokens] + name[group_size]] - constant[1]]
variable[mask] assign[=] call[call[name[indices_to_bool_mask], parameter[name[idx_to_replace], name[padded_size]]]][<ast.Slice object at 0x7da1b1f49f30>]
variable[s] assign[=] call[name[self].split.masked, parameter[name[mask], name[replacement]]]
call[name[res].append, parameter[tuple[[<ast.Attribute object at 0x7da1b1f49e70>, <ast.Name object at 0x7da1b1f4b5e0>, <ast.Name object at 0x7da1b1f4a710>]]]]
return[name[res]] | keyword[def] identifier[replace_random_tokens] ( identifier[self] ,
identifier[n_samples] ,
identifier[replacement] = literal[string] ,
identifier[random_state] = keyword[None] ,
identifier[min_replace] = literal[int] ,
identifier[max_replace] = literal[int] ,
identifier[group_size] = literal[int]
):
literal[string]
identifier[n_tokens] = identifier[len] ( identifier[self] . identifier[tokens] )
identifier[indices] = identifier[np] . identifier[arange] ( identifier[n_tokens] )
keyword[if] keyword[not] identifier[n_tokens] :
identifier[nomask] = identifier[np] . identifier[array] ([], identifier[dtype] = identifier[int] )
keyword[return] [( literal[string] , literal[int] , identifier[nomask] )]* identifier[n_samples]
identifier[min_replace] , identifier[max_replace] = identifier[self] . identifier[_get_min_max] ( identifier[min_replace] , identifier[max_replace] ,
identifier[n_tokens] )
identifier[rng] = identifier[check_random_state] ( identifier[random_state] )
identifier[replace_sizes] = identifier[rng] . identifier[randint] ( identifier[low] = identifier[min_replace] , identifier[high] = identifier[max_replace] + literal[int] ,
identifier[size] = identifier[n_samples] )
identifier[res] =[]
keyword[for] identifier[num_to_replace] keyword[in] identifier[replace_sizes] :
identifier[idx_to_replace] = identifier[rng] . identifier[choice] ( identifier[indices] , identifier[num_to_replace] , identifier[replace] = keyword[False] )
identifier[idx_to_replace] = identifier[np] . identifier[array] ([ identifier[idx_to_replace] ]+[
identifier[idx_to_replace] + identifier[shift] keyword[for] identifier[shift] keyword[in] identifier[range] ( literal[int] , identifier[group_size] )
]). identifier[ravel] ()
identifier[padded_size] = identifier[n_tokens] + identifier[group_size] - literal[int]
identifier[mask] = identifier[indices_to_bool_mask] ( identifier[idx_to_replace] , identifier[padded_size] )[: identifier[n_tokens] ]
identifier[s] = identifier[self] . identifier[split] . identifier[masked] ( identifier[mask] , identifier[replacement] )
identifier[res] . identifier[append] (( identifier[s] . identifier[text] , identifier[num_to_replace] , identifier[mask] ))
keyword[return] identifier[res] | def replace_random_tokens(self, n_samples, replacement='', random_state=None, min_replace=1, max_replace=1.0, group_size=1): # type: int
# type: str
# type: Union[int, float]
# type: Union[int, float]
# type: int
# type: (...) -> List[Tuple[str, int, np.ndarray]]
" \n Return a list of ``(text, replaced_count, mask)``\n tuples with n_samples versions of text with some words replaced.\n By default words are replaced with '', i.e. removed.\n "
n_tokens = len(self.tokens)
indices = np.arange(n_tokens)
if not n_tokens:
nomask = np.array([], dtype=int)
return [('', 0, nomask)] * n_samples # depends on [control=['if'], data=[]]
(min_replace, max_replace) = self._get_min_max(min_replace, max_replace, n_tokens)
rng = check_random_state(random_state)
replace_sizes = rng.randint(low=min_replace, high=max_replace + 1, size=n_samples)
res = []
for num_to_replace in replace_sizes:
idx_to_replace = rng.choice(indices, num_to_replace, replace=False)
idx_to_replace = np.array([idx_to_replace] + [idx_to_replace + shift for shift in range(1, group_size)]).ravel()
padded_size = n_tokens + group_size - 1
mask = indices_to_bool_mask(idx_to_replace, padded_size)[:n_tokens]
s = self.split.masked(mask, replacement)
res.append((s.text, num_to_replace, mask)) # depends on [control=['for'], data=['num_to_replace']]
return res |
def run(self, head_rows, tail_rows=None, n_rows=None):
"""
Run the intuition process
:param head_rows: A list of rows from the start of the file. Should have at least 30 rows
:param tail_rows: A list of rows from the end of the file. Optional, but should have at least 30 rows
:param n_rows: Total number of rows, if a subset was provided in head_rows
:return:
"""
from .exceptions import RowIntuitError
header_rows = []
found_header = False
MIN_SKIP_ROWS = 30
try:
data_pattern_skip_rows = min(MIN_SKIP_ROWS, len(head_rows) - 8)
except TypeError:
# Hopefully b/c head_rows is a generator, not a sequence
raise RowIntuitError("Head_rows must be a sequence, not a generator or iterator")
try:
data_pattern, self.data_pattern_source, n_cols = self.data_pattern(head_rows[data_pattern_skip_rows:])
except Exception as e:
logger.debug("Failed to find data pattern")
raise
patterns = ([('D', data_pattern),
# More than 25% strings in row is header, if it isn't matched as data
('H', re.compile(r'X{{{},{}}}'.format(max(3, n_cols/8),max(3,n_cols/4)))),
] +
list(self.patterns))
if self.debug:
logger.debug("--- Patterns")
for e in patterns:
logger.debug(" {} {}".format(e[0], e[1].pattern))
for i, row in enumerate(head_rows):
picture = self.picture(row)
label = self.match_picture(picture, patterns)
try:
# If a header or data has more than half of the line is a continuous nulls,
# it's probably a comment.
if label != 'B' and len(re.search('_+', picture).group(0)) > len(row)/2:
label = 'C'
except AttributeError:
pass # re not matched
if not found_header and label == 'H':
found_header = True
if label is False:
if found_header:
label = 'D'
else:
# Could be a really wacky header
found_header = True
label = 'H'
if self.debug:
logger.debug("HEAD: {:<5} {} {} {}".format(i, label, picture, row))
if label == 'C':
self.comment_lines.append(i)
elif label == 'H':
self.header_lines.append(i)
header_rows.append(row)
elif label == 'D':
self.start_line = i
self.headers = self.coalesce_headers(header_rows)
break
if tail_rows:
from itertools import takewhile, islice
for i, row in enumerate(islice(reversed(tail_rows), 0, 10)):
picture = self.picture(row)
label = self.match_picture(picture, patterns)
logger.debug("TAIL: {:<5} {} {} {}".format(i, label, picture, row))
# Compute the data label for the end line, then reverse them.
labels = reversed(list(self.match_picture(self.picture(row), patterns) for row in tail_rows))
# Count the number of lines, from the end, that are either comment or blank
end_line = len(list(takewhile(lambda x: x == 'C' or x == 'B' or x == 'H', labels)))
if end_line:
self.end_line = n_rows-end_line-1
return self | def function[run, parameter[self, head_rows, tail_rows, n_rows]]:
constant[
Run the intuition process
:param head_rows: A list of rows from the start of the file. Should have at least 30 rows
:param tail_rows: A list of rows from the end of the file. Optional, but should have at least 30 rows
:param n_rows: Total number of rows, if a subset was provided in head_rows
:return:
]
from relative_module[exceptions] import module[RowIntuitError]
variable[header_rows] assign[=] list[[]]
variable[found_header] assign[=] constant[False]
variable[MIN_SKIP_ROWS] assign[=] constant[30]
<ast.Try object at 0x7da1b143d180>
<ast.Try object at 0x7da1b143cca0>
variable[patterns] assign[=] binary_operation[list[[<ast.Tuple object at 0x7da1b143da80>, <ast.Tuple object at 0x7da1b143d870>]] + call[name[list], parameter[name[self].patterns]]]
if name[self].debug begin[:]
call[name[logger].debug, parameter[constant[--- Patterns]]]
for taget[name[e]] in starred[name[patterns]] begin[:]
call[name[logger].debug, parameter[call[constant[ {} {}].format, parameter[call[name[e]][constant[0]], call[name[e]][constant[1]].pattern]]]]
for taget[tuple[[<ast.Name object at 0x7da1b143f4f0>, <ast.Name object at 0x7da1b143cbb0>]]] in starred[call[name[enumerate], parameter[name[head_rows]]]] begin[:]
variable[picture] assign[=] call[name[self].picture, parameter[name[row]]]
variable[label] assign[=] call[name[self].match_picture, parameter[name[picture], name[patterns]]]
<ast.Try object at 0x7da1b143da20>
if <ast.BoolOp object at 0x7da1b143f490> begin[:]
variable[found_header] assign[=] constant[True]
if compare[name[label] is constant[False]] begin[:]
if name[found_header] begin[:]
variable[label] assign[=] constant[D]
if name[self].debug begin[:]
call[name[logger].debug, parameter[call[constant[HEAD: {:<5} {} {} {}].format, parameter[name[i], name[label], name[picture], name[row]]]]]
if compare[name[label] equal[==] constant[C]] begin[:]
call[name[self].comment_lines.append, parameter[name[i]]]
if name[tail_rows] begin[:]
from relative_module[itertools] import module[takewhile], module[islice]
for taget[tuple[[<ast.Name object at 0x7da20c6a9e70>, <ast.Name object at 0x7da20c6ab220>]]] in starred[call[name[enumerate], parameter[call[name[islice], parameter[call[name[reversed], parameter[name[tail_rows]]], constant[0], constant[10]]]]]] begin[:]
variable[picture] assign[=] call[name[self].picture, parameter[name[row]]]
variable[label] assign[=] call[name[self].match_picture, parameter[name[picture], name[patterns]]]
call[name[logger].debug, parameter[call[constant[TAIL: {:<5} {} {} {}].format, parameter[name[i], name[label], name[picture], name[row]]]]]
variable[labels] assign[=] call[name[reversed], parameter[call[name[list], parameter[<ast.GeneratorExp object at 0x7da20c6aa560>]]]]
variable[end_line] assign[=] call[name[len], parameter[call[name[list], parameter[call[name[takewhile], parameter[<ast.Lambda object at 0x7da20c6a86d0>, name[labels]]]]]]]
if name[end_line] begin[:]
name[self].end_line assign[=] binary_operation[binary_operation[name[n_rows] - name[end_line]] - constant[1]]
return[name[self]] | keyword[def] identifier[run] ( identifier[self] , identifier[head_rows] , identifier[tail_rows] = keyword[None] , identifier[n_rows] = keyword[None] ):
literal[string]
keyword[from] . identifier[exceptions] keyword[import] identifier[RowIntuitError]
identifier[header_rows] =[]
identifier[found_header] = keyword[False]
identifier[MIN_SKIP_ROWS] = literal[int]
keyword[try] :
identifier[data_pattern_skip_rows] = identifier[min] ( identifier[MIN_SKIP_ROWS] , identifier[len] ( identifier[head_rows] )- literal[int] )
keyword[except] identifier[TypeError] :
keyword[raise] identifier[RowIntuitError] ( literal[string] )
keyword[try] :
identifier[data_pattern] , identifier[self] . identifier[data_pattern_source] , identifier[n_cols] = identifier[self] . identifier[data_pattern] ( identifier[head_rows] [ identifier[data_pattern_skip_rows] :])
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[raise]
identifier[patterns] =([( literal[string] , identifier[data_pattern] ),
( literal[string] , identifier[re] . identifier[compile] ( literal[string] . identifier[format] ( identifier[max] ( literal[int] , identifier[n_cols] / literal[int] ), identifier[max] ( literal[int] , identifier[n_cols] / literal[int] )))),
]+
identifier[list] ( identifier[self] . identifier[patterns] ))
keyword[if] identifier[self] . identifier[debug] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[for] identifier[e] keyword[in] identifier[patterns] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[e] [ literal[int] ], identifier[e] [ literal[int] ]. identifier[pattern] ))
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[head_rows] ):
identifier[picture] = identifier[self] . identifier[picture] ( identifier[row] )
identifier[label] = identifier[self] . identifier[match_picture] ( identifier[picture] , identifier[patterns] )
keyword[try] :
keyword[if] identifier[label] != literal[string] keyword[and] identifier[len] ( identifier[re] . identifier[search] ( literal[string] , identifier[picture] ). identifier[group] ( literal[int] ))> identifier[len] ( identifier[row] )/ literal[int] :
identifier[label] = literal[string]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[if] keyword[not] identifier[found_header] keyword[and] identifier[label] == literal[string] :
identifier[found_header] = keyword[True]
keyword[if] identifier[label] keyword[is] keyword[False] :
keyword[if] identifier[found_header] :
identifier[label] = literal[string]
keyword[else] :
identifier[found_header] = keyword[True]
identifier[label] = literal[string]
keyword[if] identifier[self] . identifier[debug] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[i] , identifier[label] , identifier[picture] , identifier[row] ))
keyword[if] identifier[label] == literal[string] :
identifier[self] . identifier[comment_lines] . identifier[append] ( identifier[i] )
keyword[elif] identifier[label] == literal[string] :
identifier[self] . identifier[header_lines] . identifier[append] ( identifier[i] )
identifier[header_rows] . identifier[append] ( identifier[row] )
keyword[elif] identifier[label] == literal[string] :
identifier[self] . identifier[start_line] = identifier[i]
identifier[self] . identifier[headers] = identifier[self] . identifier[coalesce_headers] ( identifier[header_rows] )
keyword[break]
keyword[if] identifier[tail_rows] :
keyword[from] identifier[itertools] keyword[import] identifier[takewhile] , identifier[islice]
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[islice] ( identifier[reversed] ( identifier[tail_rows] ), literal[int] , literal[int] )):
identifier[picture] = identifier[self] . identifier[picture] ( identifier[row] )
identifier[label] = identifier[self] . identifier[match_picture] ( identifier[picture] , identifier[patterns] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[i] , identifier[label] , identifier[picture] , identifier[row] ))
identifier[labels] = identifier[reversed] ( identifier[list] ( identifier[self] . identifier[match_picture] ( identifier[self] . identifier[picture] ( identifier[row] ), identifier[patterns] ) keyword[for] identifier[row] keyword[in] identifier[tail_rows] ))
identifier[end_line] = identifier[len] ( identifier[list] ( identifier[takewhile] ( keyword[lambda] identifier[x] : identifier[x] == literal[string] keyword[or] identifier[x] == literal[string] keyword[or] identifier[x] == literal[string] , identifier[labels] )))
keyword[if] identifier[end_line] :
identifier[self] . identifier[end_line] = identifier[n_rows] - identifier[end_line] - literal[int]
keyword[return] identifier[self] | def run(self, head_rows, tail_rows=None, n_rows=None):
"""
Run the intuition process
:param head_rows: A list of rows from the start of the file. Should have at least 30 rows
:param tail_rows: A list of rows from the end of the file. Optional, but should have at least 30 rows
:param n_rows: Total number of rows, if a subset was provided in head_rows
:return:
"""
from .exceptions import RowIntuitError
header_rows = []
found_header = False
MIN_SKIP_ROWS = 30
try:
data_pattern_skip_rows = min(MIN_SKIP_ROWS, len(head_rows) - 8) # depends on [control=['try'], data=[]]
except TypeError:
# Hopefully b/c head_rows is a generator, not a sequence
raise RowIntuitError('Head_rows must be a sequence, not a generator or iterator') # depends on [control=['except'], data=[]]
try:
(data_pattern, self.data_pattern_source, n_cols) = self.data_pattern(head_rows[data_pattern_skip_rows:]) # depends on [control=['try'], data=[]]
except Exception as e:
logger.debug('Failed to find data pattern')
raise # depends on [control=['except'], data=[]]
# More than 25% strings in row is header, if it isn't matched as data
patterns = [('D', data_pattern), ('H', re.compile('X{{{},{}}}'.format(max(3, n_cols / 8), max(3, n_cols / 4))))] + list(self.patterns)
if self.debug:
logger.debug('--- Patterns')
for e in patterns:
logger.debug(' {} {}'.format(e[0], e[1].pattern)) # depends on [control=['for'], data=['e']] # depends on [control=['if'], data=[]]
for (i, row) in enumerate(head_rows):
picture = self.picture(row)
label = self.match_picture(picture, patterns)
try:
# If a header or data has more than half of the line is a continuous nulls,
# it's probably a comment.
if label != 'B' and len(re.search('_+', picture).group(0)) > len(row) / 2:
label = 'C' # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
pass # re not matched # depends on [control=['except'], data=[]]
if not found_header and label == 'H':
found_header = True # depends on [control=['if'], data=[]]
if label is False:
if found_header:
label = 'D' # depends on [control=['if'], data=[]]
else:
# Could be a really wacky header
found_header = True
label = 'H' # depends on [control=['if'], data=['label']]
if self.debug:
logger.debug('HEAD: {:<5} {} {} {}'.format(i, label, picture, row)) # depends on [control=['if'], data=[]]
if label == 'C':
self.comment_lines.append(i) # depends on [control=['if'], data=[]]
elif label == 'H':
self.header_lines.append(i)
header_rows.append(row) # depends on [control=['if'], data=[]]
elif label == 'D':
self.start_line = i
self.headers = self.coalesce_headers(header_rows)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if tail_rows:
from itertools import takewhile, islice
for (i, row) in enumerate(islice(reversed(tail_rows), 0, 10)):
picture = self.picture(row)
label = self.match_picture(picture, patterns)
logger.debug('TAIL: {:<5} {} {} {}'.format(i, label, picture, row)) # depends on [control=['for'], data=[]]
# Compute the data label for the end line, then reverse them.
labels = reversed(list((self.match_picture(self.picture(row), patterns) for row in tail_rows)))
# Count the number of lines, from the end, that are either comment or blank
end_line = len(list(takewhile(lambda x: x == 'C' or x == 'B' or x == 'H', labels)))
if end_line:
self.end_line = n_rows - end_line - 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self |
def _generate_address(self, key_iterator):
# type: (KeyIterator) -> Address
"""
Generates a new address.
Used in the event of a cache miss.
"""
if self.checksum:
return (
self.address_from_digest(
digest=self._get_digest(key_iterator),
).with_valid_checksum()
)
else:
return self.address_from_digest(self._get_digest(key_iterator)) | def function[_generate_address, parameter[self, key_iterator]]:
constant[
Generates a new address.
Used in the event of a cache miss.
]
if name[self].checksum begin[:]
return[call[call[name[self].address_from_digest, parameter[]].with_valid_checksum, parameter[]]] | keyword[def] identifier[_generate_address] ( identifier[self] , identifier[key_iterator] ):
literal[string]
keyword[if] identifier[self] . identifier[checksum] :
keyword[return] (
identifier[self] . identifier[address_from_digest] (
identifier[digest] = identifier[self] . identifier[_get_digest] ( identifier[key_iterator] ),
). identifier[with_valid_checksum] ()
)
keyword[else] :
keyword[return] identifier[self] . identifier[address_from_digest] ( identifier[self] . identifier[_get_digest] ( identifier[key_iterator] )) | def _generate_address(self, key_iterator):
# type: (KeyIterator) -> Address
'\n Generates a new address.\n\n Used in the event of a cache miss.\n '
if self.checksum:
return self.address_from_digest(digest=self._get_digest(key_iterator)).with_valid_checksum() # depends on [control=['if'], data=[]]
else:
return self.address_from_digest(self._get_digest(key_iterator)) |
def add_account_certificate(self, account_id, body, **kwargs): # noqa: E501
"""Upload new trusted certificate. # noqa: E501
An endpoint for uploading new trusted certificates. **Example usage:** `curl -X POST https://api.us-east-1.mbedcloud.com/v3/accounts/{accountID}/trusted-certificates -d {\"name\": \"myCert1\", \"description\": \"very important cert\", \"certificate\": \"certificate_data\", \"service\": \"lwm2m\"} -H 'content-type: application/json' -H 'Authorization: Bearer API_KEY'` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.add_account_certificate(account_id, body, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str account_id: Account ID. (required)
:param TrustedCertificateRootReq body: A trusted certificate object with attributes, signature is optional. (required)
:return: TrustedCertificateResp
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.add_account_certificate_with_http_info(account_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_account_certificate_with_http_info(account_id, body, **kwargs) # noqa: E501
return data | def function[add_account_certificate, parameter[self, account_id, body]]:
constant[Upload new trusted certificate. # noqa: E501
An endpoint for uploading new trusted certificates. **Example usage:** `curl -X POST https://api.us-east-1.mbedcloud.com/v3/accounts/{accountID}/trusted-certificates -d {"name": "myCert1", "description": "very important cert", "certificate": "certificate_data", "service": "lwm2m"} -H 'content-type: application/json' -H 'Authorization: Bearer API_KEY'` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.add_account_certificate(account_id, body, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str account_id: Account ID. (required)
:param TrustedCertificateRootReq body: A trusted certificate object with attributes, signature is optional. (required)
:return: TrustedCertificateResp
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[asynchronous]]] begin[:]
return[call[name[self].add_account_certificate_with_http_info, parameter[name[account_id], name[body]]]] | keyword[def] identifier[add_account_certificate] ( identifier[self] , identifier[account_id] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[add_account_certificate_with_http_info] ( identifier[account_id] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[add_account_certificate_with_http_info] ( identifier[account_id] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data] | def add_account_certificate(self, account_id, body, **kwargs): # noqa: E501
'Upload new trusted certificate. # noqa: E501\n\n An endpoint for uploading new trusted certificates. **Example usage:** `curl -X POST https://api.us-east-1.mbedcloud.com/v3/accounts/{accountID}/trusted-certificates -d {"name": "myCert1", "description": "very important cert", "certificate": "certificate_data", "service": "lwm2m"} -H \'content-type: application/json\' -H \'Authorization: Bearer API_KEY\'` # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass asynchronous=True\n >>> thread = api.add_account_certificate(account_id, body, asynchronous=True)\n >>> result = thread.get()\n\n :param asynchronous bool\n :param str account_id: Account ID. (required)\n :param TrustedCertificateRootReq body: A trusted certificate object with attributes, signature is optional. (required)\n :return: TrustedCertificateResp\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.add_account_certificate_with_http_info(account_id, body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.add_account_certificate_with_http_info(account_id, body, **kwargs) # noqa: E501
return data |
def decode_nullable(self, data_type, obj):
"""
The data_type argument must be a Nullable.
See json_compat_obj_decode() for argument descriptions.
"""
if obj is not None:
return self.json_compat_obj_decode_helper(data_type.validator, obj)
else:
return None | def function[decode_nullable, parameter[self, data_type, obj]]:
constant[
The data_type argument must be a Nullable.
See json_compat_obj_decode() for argument descriptions.
]
if compare[name[obj] is_not constant[None]] begin[:]
return[call[name[self].json_compat_obj_decode_helper, parameter[name[data_type].validator, name[obj]]]] | keyword[def] identifier[decode_nullable] ( identifier[self] , identifier[data_type] , identifier[obj] ):
literal[string]
keyword[if] identifier[obj] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[json_compat_obj_decode_helper] ( identifier[data_type] . identifier[validator] , identifier[obj] )
keyword[else] :
keyword[return] keyword[None] | def decode_nullable(self, data_type, obj):
"""
The data_type argument must be a Nullable.
See json_compat_obj_decode() for argument descriptions.
"""
if obj is not None:
return self.json_compat_obj_decode_helper(data_type.validator, obj) # depends on [control=['if'], data=['obj']]
else:
return None |
def process_acl(auth_list, opts=None):
'''
Query LDAP, retrieve list of minion_ids from an OU or other search.
For each minion_id returned from the LDAP search, copy the perms
matchers into the auth dictionary
:param auth_list:
:param opts: __opts__ for when __opts__ is not injected
:return: Modified auth list.
'''
ou_names = []
for item in auth_list:
if isinstance(item, six.string_types):
continue
ou_names.extend([potential_ou for potential_ou in item.keys() if potential_ou.startswith('ldap(')])
if ou_names:
auth_list = __expand_ldap_entries(auth_list, opts)
return auth_list | def function[process_acl, parameter[auth_list, opts]]:
constant[
Query LDAP, retrieve list of minion_ids from an OU or other search.
For each minion_id returned from the LDAP search, copy the perms
matchers into the auth dictionary
:param auth_list:
:param opts: __opts__ for when __opts__ is not injected
:return: Modified auth list.
]
variable[ou_names] assign[=] list[[]]
for taget[name[item]] in starred[name[auth_list]] begin[:]
if call[name[isinstance], parameter[name[item], name[six].string_types]] begin[:]
continue
call[name[ou_names].extend, parameter[<ast.ListComp object at 0x7da1b1f75180>]]
if name[ou_names] begin[:]
variable[auth_list] assign[=] call[name[__expand_ldap_entries], parameter[name[auth_list], name[opts]]]
return[name[auth_list]] | keyword[def] identifier[process_acl] ( identifier[auth_list] , identifier[opts] = keyword[None] ):
literal[string]
identifier[ou_names] =[]
keyword[for] identifier[item] keyword[in] identifier[auth_list] :
keyword[if] identifier[isinstance] ( identifier[item] , identifier[six] . identifier[string_types] ):
keyword[continue]
identifier[ou_names] . identifier[extend] ([ identifier[potential_ou] keyword[for] identifier[potential_ou] keyword[in] identifier[item] . identifier[keys] () keyword[if] identifier[potential_ou] . identifier[startswith] ( literal[string] )])
keyword[if] identifier[ou_names] :
identifier[auth_list] = identifier[__expand_ldap_entries] ( identifier[auth_list] , identifier[opts] )
keyword[return] identifier[auth_list] | def process_acl(auth_list, opts=None):
"""
Query LDAP, retrieve list of minion_ids from an OU or other search.
For each minion_id returned from the LDAP search, copy the perms
matchers into the auth dictionary
:param auth_list:
:param opts: __opts__ for when __opts__ is not injected
:return: Modified auth list.
"""
ou_names = []
for item in auth_list:
if isinstance(item, six.string_types):
continue # depends on [control=['if'], data=[]]
ou_names.extend([potential_ou for potential_ou in item.keys() if potential_ou.startswith('ldap(')]) # depends on [control=['for'], data=['item']]
if ou_names:
auth_list = __expand_ldap_entries(auth_list, opts) # depends on [control=['if'], data=[]]
return auth_list |
def cli(env):
"""List placement groups."""
manager = PlacementManager(env.client)
result = manager.list()
table = formatting.Table(
["Id", "Name", "Backend Router", "Rule", "Guests", "Created"],
title="Placement Groups"
)
for group in result:
table.add_row([
group['id'],
group['name'],
group['backendRouter']['hostname'],
group['rule']['name'],
group['guestCount'],
group['createDate']
])
env.fout(table) | def function[cli, parameter[env]]:
constant[List placement groups.]
variable[manager] assign[=] call[name[PlacementManager], parameter[name[env].client]]
variable[result] assign[=] call[name[manager].list, parameter[]]
variable[table] assign[=] call[name[formatting].Table, parameter[list[[<ast.Constant object at 0x7da18fe91ab0>, <ast.Constant object at 0x7da18fe93e50>, <ast.Constant object at 0x7da18fe92950>, <ast.Constant object at 0x7da18fe91cc0>, <ast.Constant object at 0x7da18fe91ff0>, <ast.Constant object at 0x7da18fe91c00>]]]]
for taget[name[group]] in starred[name[result]] begin[:]
call[name[table].add_row, parameter[list[[<ast.Subscript object at 0x7da18fe93250>, <ast.Subscript object at 0x7da18fe90ee0>, <ast.Subscript object at 0x7da18fe924d0>, <ast.Subscript object at 0x7da18fe90160>, <ast.Subscript object at 0x7da18fe93b20>, <ast.Subscript object at 0x7da18fe90e20>]]]]
call[name[env].fout, parameter[name[table]]] | keyword[def] identifier[cli] ( identifier[env] ):
literal[string]
identifier[manager] = identifier[PlacementManager] ( identifier[env] . identifier[client] )
identifier[result] = identifier[manager] . identifier[list] ()
identifier[table] = identifier[formatting] . identifier[Table] (
[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ],
identifier[title] = literal[string]
)
keyword[for] identifier[group] keyword[in] identifier[result] :
identifier[table] . identifier[add_row] ([
identifier[group] [ literal[string] ],
identifier[group] [ literal[string] ],
identifier[group] [ literal[string] ][ literal[string] ],
identifier[group] [ literal[string] ][ literal[string] ],
identifier[group] [ literal[string] ],
identifier[group] [ literal[string] ]
])
identifier[env] . identifier[fout] ( identifier[table] ) | def cli(env):
"""List placement groups."""
manager = PlacementManager(env.client)
result = manager.list()
table = formatting.Table(['Id', 'Name', 'Backend Router', 'Rule', 'Guests', 'Created'], title='Placement Groups')
for group in result:
table.add_row([group['id'], group['name'], group['backendRouter']['hostname'], group['rule']['name'], group['guestCount'], group['createDate']]) # depends on [control=['for'], data=['group']]
env.fout(table) |
def maybe_start_recording(tokens, index):
"""Return a new _InlineRSTRecorder when its time to record."""
if tokens[index].type == TokenType.BeginInlineRST:
return _InlineRSTRecorder(index) | def function[maybe_start_recording, parameter[tokens, index]]:
constant[Return a new _InlineRSTRecorder when its time to record.]
if compare[call[name[tokens]][name[index]].type equal[==] name[TokenType].BeginInlineRST] begin[:]
return[call[name[_InlineRSTRecorder], parameter[name[index]]]] | keyword[def] identifier[maybe_start_recording] ( identifier[tokens] , identifier[index] ):
literal[string]
keyword[if] identifier[tokens] [ identifier[index] ]. identifier[type] == identifier[TokenType] . identifier[BeginInlineRST] :
keyword[return] identifier[_InlineRSTRecorder] ( identifier[index] ) | def maybe_start_recording(tokens, index):
"""Return a new _InlineRSTRecorder when its time to record."""
if tokens[index].type == TokenType.BeginInlineRST:
return _InlineRSTRecorder(index) # depends on [control=['if'], data=[]] |
def run(extension=None, name=None, description=None, salt_dir=None, merge=False, temp_dir=None):
'''
A template factory for extending the salt ecosystem
:param extension: The extension type, e.g. 'module', 'state', if omitted, user will be prompted
:type extension: ``str``
:param name: Python-friendly name for the module, if omitted, user will be prompted
:type name: ``str``
:param description: A description of the extension, if omitted, user will be prompted
:type description: ``str``
:param salt_dir: The targeted Salt source directory
:type salt_dir: ``str``
:param merge: Merge with salt directory, `False` to keep separate, `True` to merge trees.
:type merge: ``bool``
:param temp_dir: The directory for generated code, if omitted, system temp will be used
:type temp_dir: ``str``
'''
if not HAS_CLICK:
print("click is not installed, please install using pip")
sys.exit(1)
if salt_dir is None:
salt_dir = '.'
MODULE_OPTIONS = _fetch_templates(os.path.join(salt_dir, 'templates'))
if extension is None:
print('Choose which type of extension you are developing for SaltStack')
extension_type = 'Extension type'
chosen_extension = _prompt_choice(extension_type, MODULE_OPTIONS)
else:
if extension not in list(zip(*MODULE_OPTIONS))[0]:
print("Module extension option not valid")
sys.exit(1)
chosen_extension = [m for m in MODULE_OPTIONS if m[0] == extension][0]
extension_type = chosen_extension[0]
extension_context = chosen_extension[2]
if name is None:
print('Enter the short name for the module (e.g. mymodule)')
name = _prompt_user_variable('Module name', '')
if description is None:
description = _prompt_user_variable('Short description of the module', '')
template_dir = 'templates/{0}'.format(extension_type)
module_name = name
param_dict = {
"version": salt.version.SaltStackVersion.next_release().name,
"module_name": module_name,
"short_description": description,
"release_date": date.today().strftime('%Y-%m-%d'),
"year": date.today().strftime('%Y'),
}
# get additional questions from template
additional_context = {}
for key, val in extension_context.get('questions', {}).items():
# allow templates to be used in default values.
default = Template(val.get('default', '')).render(param_dict)
prompt_var = _prompt_user_variable(val['question'], default)
additional_context[key] = prompt_var
context = param_dict.copy()
context.update(extension_context)
context.update(additional_context)
if temp_dir is None:
temp_dir = tempfile.mkdtemp()
apply_template(
template_dir,
temp_dir,
context)
if not merge:
path = temp_dir
else:
_mergetree(temp_dir, salt_dir)
path = salt_dir
log.info('New module stored in %s', path)
return path | def function[run, parameter[extension, name, description, salt_dir, merge, temp_dir]]:
constant[
A template factory for extending the salt ecosystem
:param extension: The extension type, e.g. 'module', 'state', if omitted, user will be prompted
:type extension: ``str``
:param name: Python-friendly name for the module, if omitted, user will be prompted
:type name: ``str``
:param description: A description of the extension, if omitted, user will be prompted
:type description: ``str``
:param salt_dir: The targeted Salt source directory
:type salt_dir: ``str``
:param merge: Merge with salt directory, `False` to keep separate, `True` to merge trees.
:type merge: ``bool``
:param temp_dir: The directory for generated code, if omitted, system temp will be used
:type temp_dir: ``str``
]
if <ast.UnaryOp object at 0x7da2043440a0> begin[:]
call[name[print], parameter[constant[click is not installed, please install using pip]]]
call[name[sys].exit, parameter[constant[1]]]
if compare[name[salt_dir] is constant[None]] begin[:]
variable[salt_dir] assign[=] constant[.]
variable[MODULE_OPTIONS] assign[=] call[name[_fetch_templates], parameter[call[name[os].path.join, parameter[name[salt_dir], constant[templates]]]]]
if compare[name[extension] is constant[None]] begin[:]
call[name[print], parameter[constant[Choose which type of extension you are developing for SaltStack]]]
variable[extension_type] assign[=] constant[Extension type]
variable[chosen_extension] assign[=] call[name[_prompt_choice], parameter[name[extension_type], name[MODULE_OPTIONS]]]
variable[extension_type] assign[=] call[name[chosen_extension]][constant[0]]
variable[extension_context] assign[=] call[name[chosen_extension]][constant[2]]
if compare[name[name] is constant[None]] begin[:]
call[name[print], parameter[constant[Enter the short name for the module (e.g. mymodule)]]]
variable[name] assign[=] call[name[_prompt_user_variable], parameter[constant[Module name], constant[]]]
if compare[name[description] is constant[None]] begin[:]
variable[description] assign[=] call[name[_prompt_user_variable], parameter[constant[Short description of the module], constant[]]]
variable[template_dir] assign[=] call[constant[templates/{0}].format, parameter[name[extension_type]]]
variable[module_name] assign[=] name[name]
variable[param_dict] assign[=] dictionary[[<ast.Constant object at 0x7da2041db580>, <ast.Constant object at 0x7da2041d8430>, <ast.Constant object at 0x7da2041db850>, <ast.Constant object at 0x7da2041d9c00>, <ast.Constant object at 0x7da2041d9420>], [<ast.Attribute object at 0x7da2041dae60>, <ast.Name object at 0x7da2041d8310>, <ast.Name object at 0x7da2041da2c0>, <ast.Call object at 0x7da2041d9660>, <ast.Call object at 0x7da2041db3d0>]]
variable[additional_context] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2041dbc10>, <ast.Name object at 0x7da2041d8550>]]] in starred[call[call[name[extension_context].get, parameter[constant[questions], dictionary[[], []]]].items, parameter[]]] begin[:]
variable[default] assign[=] call[call[name[Template], parameter[call[name[val].get, parameter[constant[default], constant[]]]]].render, parameter[name[param_dict]]]
variable[prompt_var] assign[=] call[name[_prompt_user_variable], parameter[call[name[val]][constant[question]], name[default]]]
call[name[additional_context]][name[key]] assign[=] name[prompt_var]
variable[context] assign[=] call[name[param_dict].copy, parameter[]]
call[name[context].update, parameter[name[extension_context]]]
call[name[context].update, parameter[name[additional_context]]]
if compare[name[temp_dir] is constant[None]] begin[:]
variable[temp_dir] assign[=] call[name[tempfile].mkdtemp, parameter[]]
call[name[apply_template], parameter[name[template_dir], name[temp_dir], name[context]]]
if <ast.UnaryOp object at 0x7da2041d8940> begin[:]
variable[path] assign[=] name[temp_dir]
call[name[log].info, parameter[constant[New module stored in %s], name[path]]]
return[name[path]] | keyword[def] identifier[run] ( identifier[extension] = keyword[None] , identifier[name] = keyword[None] , identifier[description] = keyword[None] , identifier[salt_dir] = keyword[None] , identifier[merge] = keyword[False] , identifier[temp_dir] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[HAS_CLICK] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] identifier[salt_dir] keyword[is] keyword[None] :
identifier[salt_dir] = literal[string]
identifier[MODULE_OPTIONS] = identifier[_fetch_templates] ( identifier[os] . identifier[path] . identifier[join] ( identifier[salt_dir] , literal[string] ))
keyword[if] identifier[extension] keyword[is] keyword[None] :
identifier[print] ( literal[string] )
identifier[extension_type] = literal[string]
identifier[chosen_extension] = identifier[_prompt_choice] ( identifier[extension_type] , identifier[MODULE_OPTIONS] )
keyword[else] :
keyword[if] identifier[extension] keyword[not] keyword[in] identifier[list] ( identifier[zip] (* identifier[MODULE_OPTIONS] ))[ literal[int] ]:
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
identifier[chosen_extension] =[ identifier[m] keyword[for] identifier[m] keyword[in] identifier[MODULE_OPTIONS] keyword[if] identifier[m] [ literal[int] ]== identifier[extension] ][ literal[int] ]
identifier[extension_type] = identifier[chosen_extension] [ literal[int] ]
identifier[extension_context] = identifier[chosen_extension] [ literal[int] ]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[print] ( literal[string] )
identifier[name] = identifier[_prompt_user_variable] ( literal[string] , literal[string] )
keyword[if] identifier[description] keyword[is] keyword[None] :
identifier[description] = identifier[_prompt_user_variable] ( literal[string] , literal[string] )
identifier[template_dir] = literal[string] . identifier[format] ( identifier[extension_type] )
identifier[module_name] = identifier[name]
identifier[param_dict] ={
literal[string] : identifier[salt] . identifier[version] . identifier[SaltStackVersion] . identifier[next_release] (). identifier[name] ,
literal[string] : identifier[module_name] ,
literal[string] : identifier[description] ,
literal[string] : identifier[date] . identifier[today] (). identifier[strftime] ( literal[string] ),
literal[string] : identifier[date] . identifier[today] (). identifier[strftime] ( literal[string] ),
}
identifier[additional_context] ={}
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[extension_context] . identifier[get] ( literal[string] ,{}). identifier[items] ():
identifier[default] = identifier[Template] ( identifier[val] . identifier[get] ( literal[string] , literal[string] )). identifier[render] ( identifier[param_dict] )
identifier[prompt_var] = identifier[_prompt_user_variable] ( identifier[val] [ literal[string] ], identifier[default] )
identifier[additional_context] [ identifier[key] ]= identifier[prompt_var]
identifier[context] = identifier[param_dict] . identifier[copy] ()
identifier[context] . identifier[update] ( identifier[extension_context] )
identifier[context] . identifier[update] ( identifier[additional_context] )
keyword[if] identifier[temp_dir] keyword[is] keyword[None] :
identifier[temp_dir] = identifier[tempfile] . identifier[mkdtemp] ()
identifier[apply_template] (
identifier[template_dir] ,
identifier[temp_dir] ,
identifier[context] )
keyword[if] keyword[not] identifier[merge] :
identifier[path] = identifier[temp_dir]
keyword[else] :
identifier[_mergetree] ( identifier[temp_dir] , identifier[salt_dir] )
identifier[path] = identifier[salt_dir]
identifier[log] . identifier[info] ( literal[string] , identifier[path] )
keyword[return] identifier[path] | def run(extension=None, name=None, description=None, salt_dir=None, merge=False, temp_dir=None):
"""
A template factory for extending the salt ecosystem
:param extension: The extension type, e.g. 'module', 'state', if omitted, user will be prompted
:type extension: ``str``
:param name: Python-friendly name for the module, if omitted, user will be prompted
:type name: ``str``
:param description: A description of the extension, if omitted, user will be prompted
:type description: ``str``
:param salt_dir: The targeted Salt source directory
:type salt_dir: ``str``
:param merge: Merge with salt directory, `False` to keep separate, `True` to merge trees.
:type merge: ``bool``
:param temp_dir: The directory for generated code, if omitted, system temp will be used
:type temp_dir: ``str``
"""
if not HAS_CLICK:
print('click is not installed, please install using pip')
sys.exit(1) # depends on [control=['if'], data=[]]
if salt_dir is None:
salt_dir = '.' # depends on [control=['if'], data=['salt_dir']]
MODULE_OPTIONS = _fetch_templates(os.path.join(salt_dir, 'templates'))
if extension is None:
print('Choose which type of extension you are developing for SaltStack')
extension_type = 'Extension type'
chosen_extension = _prompt_choice(extension_type, MODULE_OPTIONS) # depends on [control=['if'], data=[]]
else:
if extension not in list(zip(*MODULE_OPTIONS))[0]:
print('Module extension option not valid')
sys.exit(1) # depends on [control=['if'], data=[]]
chosen_extension = [m for m in MODULE_OPTIONS if m[0] == extension][0]
extension_type = chosen_extension[0]
extension_context = chosen_extension[2]
if name is None:
print('Enter the short name for the module (e.g. mymodule)')
name = _prompt_user_variable('Module name', '') # depends on [control=['if'], data=['name']]
if description is None:
description = _prompt_user_variable('Short description of the module', '') # depends on [control=['if'], data=['description']]
template_dir = 'templates/{0}'.format(extension_type)
module_name = name
param_dict = {'version': salt.version.SaltStackVersion.next_release().name, 'module_name': module_name, 'short_description': description, 'release_date': date.today().strftime('%Y-%m-%d'), 'year': date.today().strftime('%Y')}
# get additional questions from template
additional_context = {}
for (key, val) in extension_context.get('questions', {}).items():
# allow templates to be used in default values.
default = Template(val.get('default', '')).render(param_dict)
prompt_var = _prompt_user_variable(val['question'], default)
additional_context[key] = prompt_var # depends on [control=['for'], data=[]]
context = param_dict.copy()
context.update(extension_context)
context.update(additional_context)
if temp_dir is None:
temp_dir = tempfile.mkdtemp() # depends on [control=['if'], data=['temp_dir']]
apply_template(template_dir, temp_dir, context)
if not merge:
path = temp_dir # depends on [control=['if'], data=[]]
else:
_mergetree(temp_dir, salt_dir)
path = salt_dir
log.info('New module stored in %s', path)
return path |
def _my_partial(func, *args, **kwargs):
"""
Partial returns a partial object and therefore we cannot inherit class
methods defined in FloatWithUnit. This function calls partial and patches
the new class before returning.
"""
newobj = partial(func, *args, **kwargs)
# monkey patch
newobj.from_string = FloatWithUnit.from_string
return newobj | def function[_my_partial, parameter[func]]:
constant[
Partial returns a partial object and therefore we cannot inherit class
methods defined in FloatWithUnit. This function calls partial and patches
the new class before returning.
]
variable[newobj] assign[=] call[name[partial], parameter[name[func], <ast.Starred object at 0x7da1b21a1a50>]]
name[newobj].from_string assign[=] name[FloatWithUnit].from_string
return[name[newobj]] | keyword[def] identifier[_my_partial] ( identifier[func] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[newobj] = identifier[partial] ( identifier[func] ,* identifier[args] ,** identifier[kwargs] )
identifier[newobj] . identifier[from_string] = identifier[FloatWithUnit] . identifier[from_string]
keyword[return] identifier[newobj] | def _my_partial(func, *args, **kwargs):
"""
Partial returns a partial object and therefore we cannot inherit class
methods defined in FloatWithUnit. This function calls partial and patches
the new class before returning.
"""
newobj = partial(func, *args, **kwargs)
# monkey patch
newobj.from_string = FloatWithUnit.from_string
return newobj |
def _time_delta_from_info(info):
"""Format the elapsed time for the given TensorBoardInfo.
Args:
info: A TensorBoardInfo value.
Returns:
A human-readable string describing the time since the server
described by `info` started: e.g., "2 days, 0:48:58".
"""
delta_seconds = int(time.time()) - info.start_time
return str(datetime.timedelta(seconds=delta_seconds)) | def function[_time_delta_from_info, parameter[info]]:
constant[Format the elapsed time for the given TensorBoardInfo.
Args:
info: A TensorBoardInfo value.
Returns:
A human-readable string describing the time since the server
described by `info` started: e.g., "2 days, 0:48:58".
]
variable[delta_seconds] assign[=] binary_operation[call[name[int], parameter[call[name[time].time, parameter[]]]] - name[info].start_time]
return[call[name[str], parameter[call[name[datetime].timedelta, parameter[]]]]] | keyword[def] identifier[_time_delta_from_info] ( identifier[info] ):
literal[string]
identifier[delta_seconds] = identifier[int] ( identifier[time] . identifier[time] ())- identifier[info] . identifier[start_time]
keyword[return] identifier[str] ( identifier[datetime] . identifier[timedelta] ( identifier[seconds] = identifier[delta_seconds] )) | def _time_delta_from_info(info):
"""Format the elapsed time for the given TensorBoardInfo.
Args:
info: A TensorBoardInfo value.
Returns:
A human-readable string describing the time since the server
described by `info` started: e.g., "2 days, 0:48:58".
"""
delta_seconds = int(time.time()) - info.start_time
return str(datetime.timedelta(seconds=delta_seconds)) |
def _to_json_like(self, include_defaults):
''' Returns a dictionary of the attributes of this object, in
a layout corresponding to what BokehJS expects at unmarshalling time.
This method does not convert "Bokeh types" into "plain JSON types,"
for example each child Model will still be a Model, rather
than turning into a reference, numpy isn't handled, etc.
That's what "json like" means.
This method should be considered "private" or "protected",
for use internal to Bokeh; use ``to_json()`` instead because
it gives you only plain JSON-compatible types.
Args:
include_defaults (bool) : whether to include attributes
that haven't been changed from the default.
'''
all_attrs = self.properties_with_values(include_defaults=include_defaults)
# If __subtype__ is defined, then this model may introduce properties
# that don't exist on __view_model__ in bokehjs. Don't serialize such
# properties.
subtype = getattr(self.__class__, "__subtype__", None)
if subtype is not None and subtype != self.__class__.__view_model__:
attrs = {}
for attr, value in all_attrs.items():
if attr in self.__class__.__dict__:
continue
else:
attrs[attr] = value
else:
attrs = all_attrs
for (k, v) in attrs.items():
# we can't serialize Infinity, we send it as None and
# the other side has to fix it up. This transformation
# can't be in our json_encoder because the json
# module checks for inf before it calls the custom
# encoder.
if isinstance(v, float) and v == float('inf'):
attrs[k] = None
return attrs | def function[_to_json_like, parameter[self, include_defaults]]:
constant[ Returns a dictionary of the attributes of this object, in
a layout corresponding to what BokehJS expects at unmarshalling time.
This method does not convert "Bokeh types" into "plain JSON types,"
for example each child Model will still be a Model, rather
than turning into a reference, numpy isn't handled, etc.
That's what "json like" means.
This method should be considered "private" or "protected",
for use internal to Bokeh; use ``to_json()`` instead because
it gives you only plain JSON-compatible types.
Args:
include_defaults (bool) : whether to include attributes
that haven't been changed from the default.
]
variable[all_attrs] assign[=] call[name[self].properties_with_values, parameter[]]
variable[subtype] assign[=] call[name[getattr], parameter[name[self].__class__, constant[__subtype__], constant[None]]]
if <ast.BoolOp object at 0x7da207f9acb0> begin[:]
variable[attrs] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da207f995a0>, <ast.Name object at 0x7da207f9a140>]]] in starred[call[name[all_attrs].items, parameter[]]] begin[:]
if compare[name[attr] in name[self].__class__.__dict__] begin[:]
continue
for taget[tuple[[<ast.Name object at 0x7da207f98d90>, <ast.Name object at 0x7da207f9afe0>]]] in starred[call[name[attrs].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da207f98730> begin[:]
call[name[attrs]][name[k]] assign[=] constant[None]
return[name[attrs]] | keyword[def] identifier[_to_json_like] ( identifier[self] , identifier[include_defaults] ):
literal[string]
identifier[all_attrs] = identifier[self] . identifier[properties_with_values] ( identifier[include_defaults] = identifier[include_defaults] )
identifier[subtype] = identifier[getattr] ( identifier[self] . identifier[__class__] , literal[string] , keyword[None] )
keyword[if] identifier[subtype] keyword[is] keyword[not] keyword[None] keyword[and] identifier[subtype] != identifier[self] . identifier[__class__] . identifier[__view_model__] :
identifier[attrs] ={}
keyword[for] identifier[attr] , identifier[value] keyword[in] identifier[all_attrs] . identifier[items] ():
keyword[if] identifier[attr] keyword[in] identifier[self] . identifier[__class__] . identifier[__dict__] :
keyword[continue]
keyword[else] :
identifier[attrs] [ identifier[attr] ]= identifier[value]
keyword[else] :
identifier[attrs] = identifier[all_attrs]
keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[attrs] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[v] , identifier[float] ) keyword[and] identifier[v] == identifier[float] ( literal[string] ):
identifier[attrs] [ identifier[k] ]= keyword[None]
keyword[return] identifier[attrs] | def _to_json_like(self, include_defaults):
""" Returns a dictionary of the attributes of this object, in
a layout corresponding to what BokehJS expects at unmarshalling time.
This method does not convert "Bokeh types" into "plain JSON types,"
for example each child Model will still be a Model, rather
than turning into a reference, numpy isn't handled, etc.
That's what "json like" means.
This method should be considered "private" or "protected",
for use internal to Bokeh; use ``to_json()`` instead because
it gives you only plain JSON-compatible types.
Args:
include_defaults (bool) : whether to include attributes
that haven't been changed from the default.
"""
all_attrs = self.properties_with_values(include_defaults=include_defaults)
# If __subtype__ is defined, then this model may introduce properties
# that don't exist on __view_model__ in bokehjs. Don't serialize such
# properties.
subtype = getattr(self.__class__, '__subtype__', None)
if subtype is not None and subtype != self.__class__.__view_model__:
attrs = {}
for (attr, value) in all_attrs.items():
if attr in self.__class__.__dict__:
continue # depends on [control=['if'], data=[]]
else:
attrs[attr] = value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
attrs = all_attrs
for (k, v) in attrs.items():
# we can't serialize Infinity, we send it as None and
# the other side has to fix it up. This transformation
# can't be in our json_encoder because the json
# module checks for inf before it calls the custom
# encoder.
if isinstance(v, float) and v == float('inf'):
attrs[k] = None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return attrs |
def _log_posterior(theta, counts, alpha, beta, n):
"""Log of the posterior probability and gradient
Parameters
----------
theta : ndarray, shape=(n_params,)
The free parameters of the reversible rate matrix
counts : ndarray, shape=(n, n)
The count matrix (sufficient statistics for the likielihood)
alpha : ndarray, shape=(n,)
Dirichlet concentration parameters
beta : ndarray, shape=(n_params-n,)
Scale parameter for the exponential prior on the symmetric rate
matrix.
"""
# likelihood + grad
logp1, grad = loglikelihood(theta, counts)
# exponential prior on s_{ij}
logp2 = lexponential(theta[:-n], beta, grad=grad[:-n])
# dirichlet prior on \pi
logp3 = ldirichlet_softmax(theta[-n:], alpha=alpha, grad=grad[-n:])
logp = logp1 + logp2 + logp3
return logp, grad | def function[_log_posterior, parameter[theta, counts, alpha, beta, n]]:
constant[Log of the posterior probability and gradient
Parameters
----------
theta : ndarray, shape=(n_params,)
The free parameters of the reversible rate matrix
counts : ndarray, shape=(n, n)
The count matrix (sufficient statistics for the likielihood)
alpha : ndarray, shape=(n,)
Dirichlet concentration parameters
beta : ndarray, shape=(n_params-n,)
Scale parameter for the exponential prior on the symmetric rate
matrix.
]
<ast.Tuple object at 0x7da1b07845b0> assign[=] call[name[loglikelihood], parameter[name[theta], name[counts]]]
variable[logp2] assign[=] call[name[lexponential], parameter[call[name[theta]][<ast.Slice object at 0x7da1b0784250>], name[beta]]]
variable[logp3] assign[=] call[name[ldirichlet_softmax], parameter[call[name[theta]][<ast.Slice object at 0x7da1b07853c0>]]]
variable[logp] assign[=] binary_operation[binary_operation[name[logp1] + name[logp2]] + name[logp3]]
return[tuple[[<ast.Name object at 0x7da1b0786680>, <ast.Name object at 0x7da1b0784d90>]]] | keyword[def] identifier[_log_posterior] ( identifier[theta] , identifier[counts] , identifier[alpha] , identifier[beta] , identifier[n] ):
literal[string]
identifier[logp1] , identifier[grad] = identifier[loglikelihood] ( identifier[theta] , identifier[counts] )
identifier[logp2] = identifier[lexponential] ( identifier[theta] [:- identifier[n] ], identifier[beta] , identifier[grad] = identifier[grad] [:- identifier[n] ])
identifier[logp3] = identifier[ldirichlet_softmax] ( identifier[theta] [- identifier[n] :], identifier[alpha] = identifier[alpha] , identifier[grad] = identifier[grad] [- identifier[n] :])
identifier[logp] = identifier[logp1] + identifier[logp2] + identifier[logp3]
keyword[return] identifier[logp] , identifier[grad] | def _log_posterior(theta, counts, alpha, beta, n):
"""Log of the posterior probability and gradient
Parameters
----------
theta : ndarray, shape=(n_params,)
The free parameters of the reversible rate matrix
counts : ndarray, shape=(n, n)
The count matrix (sufficient statistics for the likielihood)
alpha : ndarray, shape=(n,)
Dirichlet concentration parameters
beta : ndarray, shape=(n_params-n,)
Scale parameter for the exponential prior on the symmetric rate
matrix.
"""
# likelihood + grad
(logp1, grad) = loglikelihood(theta, counts)
# exponential prior on s_{ij}
logp2 = lexponential(theta[:-n], beta, grad=grad[:-n])
# dirichlet prior on \pi
logp3 = ldirichlet_softmax(theta[-n:], alpha=alpha, grad=grad[-n:])
logp = logp1 + logp2 + logp3
return (logp, grad) |
def createEditor(self, delegate, parent, option):
""" Creates a StringCtiEditor.
For the parameters see the AbstractCti constructor documentation.
"""
return StringCtiEditor(self, delegate, parent=parent) | def function[createEditor, parameter[self, delegate, parent, option]]:
constant[ Creates a StringCtiEditor.
For the parameters see the AbstractCti constructor documentation.
]
return[call[name[StringCtiEditor], parameter[name[self], name[delegate]]]] | keyword[def] identifier[createEditor] ( identifier[self] , identifier[delegate] , identifier[parent] , identifier[option] ):
literal[string]
keyword[return] identifier[StringCtiEditor] ( identifier[self] , identifier[delegate] , identifier[parent] = identifier[parent] ) | def createEditor(self, delegate, parent, option):
""" Creates a StringCtiEditor.
For the parameters see the AbstractCti constructor documentation.
"""
return StringCtiEditor(self, delegate, parent=parent) |
def ls(ctx, available):
"List installed datasets on path"
path = ctx.obj['path']
global_ = ctx.obj['global_']
_ls(available=available, **ctx.obj) | def function[ls, parameter[ctx, available]]:
constant[List installed datasets on path]
variable[path] assign[=] call[name[ctx].obj][constant[path]]
variable[global_] assign[=] call[name[ctx].obj][constant[global_]]
call[name[_ls], parameter[]] | keyword[def] identifier[ls] ( identifier[ctx] , identifier[available] ):
literal[string]
identifier[path] = identifier[ctx] . identifier[obj] [ literal[string] ]
identifier[global_] = identifier[ctx] . identifier[obj] [ literal[string] ]
identifier[_ls] ( identifier[available] = identifier[available] ,** identifier[ctx] . identifier[obj] ) | def ls(ctx, available):
"""List installed datasets on path"""
path = ctx.obj['path']
global_ = ctx.obj['global_']
_ls(available=available, **ctx.obj) |
def business(values, precision=3, prefix=True, prefixes=SI, statistic=median, default=''):
"""
Convert a list of numbers to the engineering notation appropriate to a
reference point like the minimum, the median or the mean --
think of it as "business notation".
Any number will have at most the amount of significant digits of the
reference point, that is, the function will round beyond the
decimal point.
For example, if the reference is `233K`, this function will turn
1,175,125 into `1180K` and 11,234 into `11K` (instead of 1175K and
11.2K respectively.) This can help enormously with readability.
If the reference point is equal to or larger than E15 or
equal to or smaller than E-15, E12 and E-12 become the
reference point instead. (Petas and femtos are too
unfamiliar to people to be easily comprehended.)
"""
reference = statistic(values)
if not reference:
return upcast([''] * len(values), values)
exponent = order(reference)
e = bound(exponent - exponent % 3, -12, 12)
# the amount of decimals is the precision minus the amount of digits
# before the decimal point, which is one more than the relative order
# of magnitude (for example, 10^5 can be represented as 100K, with
# those three digits representing place values of 10^3, 10^4 and 10^5)
d = precision - (1 + exponent - e)
prefix = prefixes[e]
strings = []
for value in values:
if isnan(value):
strings.append('')
else:
normalized = value / 10.0 ** e
# use `round` for rounding (beyond the decimal point if necessary)
# use string formatting for padding to the right amount of decimals
# and to hide decimals when necessary (by default, floats are always
# displayed with a single decimal place, to distinguish them from
# integers)
relative_order = order(value) - exponent
places = min(d - relative_order, d)
normalized = round(normalized, places)
strings.append('{0:,.{1}f}'.format(normalized, d) + prefix)
return upcast(strings, values) | def function[business, parameter[values, precision, prefix, prefixes, statistic, default]]:
constant[
Convert a list of numbers to the engineering notation appropriate to a
reference point like the minimum, the median or the mean --
think of it as "business notation".
Any number will have at most the amount of significant digits of the
reference point, that is, the function will round beyond the
decimal point.
For example, if the reference is `233K`, this function will turn
1,175,125 into `1180K` and 11,234 into `11K` (instead of 1175K and
11.2K respectively.) This can help enormously with readability.
If the reference point is equal to or larger than E15 or
equal to or smaller than E-15, E12 and E-12 become the
reference point instead. (Petas and femtos are too
unfamiliar to people to be easily comprehended.)
]
variable[reference] assign[=] call[name[statistic], parameter[name[values]]]
if <ast.UnaryOp object at 0x7da20c6e5b10> begin[:]
return[call[name[upcast], parameter[binary_operation[list[[<ast.Constant object at 0x7da20c6e53f0>]] * call[name[len], parameter[name[values]]]], name[values]]]]
variable[exponent] assign[=] call[name[order], parameter[name[reference]]]
variable[e] assign[=] call[name[bound], parameter[binary_operation[name[exponent] - binary_operation[name[exponent] <ast.Mod object at 0x7da2590d6920> constant[3]]], <ast.UnaryOp object at 0x7da20c6e5510>, constant[12]]]
variable[d] assign[=] binary_operation[name[precision] - binary_operation[binary_operation[constant[1] + name[exponent]] - name[e]]]
variable[prefix] assign[=] call[name[prefixes]][name[e]]
variable[strings] assign[=] list[[]]
for taget[name[value]] in starred[name[values]] begin[:]
if call[name[isnan], parameter[name[value]]] begin[:]
call[name[strings].append, parameter[constant[]]]
return[call[name[upcast], parameter[name[strings], name[values]]]] | keyword[def] identifier[business] ( identifier[values] , identifier[precision] = literal[int] , identifier[prefix] = keyword[True] , identifier[prefixes] = identifier[SI] , identifier[statistic] = identifier[median] , identifier[default] = literal[string] ):
literal[string]
identifier[reference] = identifier[statistic] ( identifier[values] )
keyword[if] keyword[not] identifier[reference] :
keyword[return] identifier[upcast] ([ literal[string] ]* identifier[len] ( identifier[values] ), identifier[values] )
identifier[exponent] = identifier[order] ( identifier[reference] )
identifier[e] = identifier[bound] ( identifier[exponent] - identifier[exponent] % literal[int] ,- literal[int] , literal[int] )
identifier[d] = identifier[precision] -( literal[int] + identifier[exponent] - identifier[e] )
identifier[prefix] = identifier[prefixes] [ identifier[e] ]
identifier[strings] =[]
keyword[for] identifier[value] keyword[in] identifier[values] :
keyword[if] identifier[isnan] ( identifier[value] ):
identifier[strings] . identifier[append] ( literal[string] )
keyword[else] :
identifier[normalized] = identifier[value] / literal[int] ** identifier[e]
identifier[relative_order] = identifier[order] ( identifier[value] )- identifier[exponent]
identifier[places] = identifier[min] ( identifier[d] - identifier[relative_order] , identifier[d] )
identifier[normalized] = identifier[round] ( identifier[normalized] , identifier[places] )
identifier[strings] . identifier[append] ( literal[string] . identifier[format] ( identifier[normalized] , identifier[d] )+ identifier[prefix] )
keyword[return] identifier[upcast] ( identifier[strings] , identifier[values] ) | def business(values, precision=3, prefix=True, prefixes=SI, statistic=median, default=''):
"""
Convert a list of numbers to the engineering notation appropriate to a
reference point like the minimum, the median or the mean --
think of it as "business notation".
Any number will have at most the amount of significant digits of the
reference point, that is, the function will round beyond the
decimal point.
For example, if the reference is `233K`, this function will turn
1,175,125 into `1180K` and 11,234 into `11K` (instead of 1175K and
11.2K respectively.) This can help enormously with readability.
If the reference point is equal to or larger than E15 or
equal to or smaller than E-15, E12 and E-12 become the
reference point instead. (Petas and femtos are too
unfamiliar to people to be easily comprehended.)
"""
reference = statistic(values)
if not reference:
return upcast([''] * len(values), values) # depends on [control=['if'], data=[]]
exponent = order(reference)
e = bound(exponent - exponent % 3, -12, 12)
# the amount of decimals is the precision minus the amount of digits
# before the decimal point, which is one more than the relative order
# of magnitude (for example, 10^5 can be represented as 100K, with
# those three digits representing place values of 10^3, 10^4 and 10^5)
d = precision - (1 + exponent - e)
prefix = prefixes[e]
strings = []
for value in values:
if isnan(value):
strings.append('') # depends on [control=['if'], data=[]]
else:
normalized = value / 10.0 ** e
# use `round` for rounding (beyond the decimal point if necessary)
# use string formatting for padding to the right amount of decimals
# and to hide decimals when necessary (by default, floats are always
# displayed with a single decimal place, to distinguish them from
# integers)
relative_order = order(value) - exponent
places = min(d - relative_order, d)
normalized = round(normalized, places)
strings.append('{0:,.{1}f}'.format(normalized, d) + prefix) # depends on [control=['for'], data=['value']]
return upcast(strings, values) |
def power_under_cph(n_exp, n_con, p_exp, p_con, postulated_hazard_ratio, alpha=0.05):
"""
This computes the power of the hypothesis test that the two groups, experiment and control,
have different hazards (that is, the relative hazard ratio is different from 1.)
Parameters
----------
n_exp : integer
size of the experiment group.
n_con : integer
size of the control group.
p_exp : float
probability of failure in experimental group over period of study.
p_con : float
probability of failure in control group over period of study
postulated_hazard_ratio : float
the postulated hazard ratio
alpha : float, optional (default=0.05)
type I error rate
Returns
-------
float:
power to detect the magnitude of the hazard ratio as small as that specified by postulated_hazard_ratio.
Notes
-----
`Reference <https://cran.r-project.org/web/packages/powerSurvEpi/powerSurvEpi.pdf>`_.
See Also
--------
sample_size_necessary_under_cph
"""
def z(p):
return stats.norm.ppf(p)
m = n_exp * p_exp + n_con * p_con
k = float(n_exp) / float(n_con)
return stats.norm.cdf(
np.sqrt(k * m) * abs(postulated_hazard_ratio - 1) / (k * postulated_hazard_ratio + 1) - z(1 - alpha / 2.0)
) | def function[power_under_cph, parameter[n_exp, n_con, p_exp, p_con, postulated_hazard_ratio, alpha]]:
constant[
This computes the power of the hypothesis test that the two groups, experiment and control,
have different hazards (that is, the relative hazard ratio is different from 1.)
Parameters
----------
n_exp : integer
size of the experiment group.
n_con : integer
size of the control group.
p_exp : float
probability of failure in experimental group over period of study.
p_con : float
probability of failure in control group over period of study
postulated_hazard_ratio : float
the postulated hazard ratio
alpha : float, optional (default=0.05)
type I error rate
Returns
-------
float:
power to detect the magnitude of the hazard ratio as small as that specified by postulated_hazard_ratio.
Notes
-----
`Reference <https://cran.r-project.org/web/packages/powerSurvEpi/powerSurvEpi.pdf>`_.
See Also
--------
sample_size_necessary_under_cph
]
def function[z, parameter[p]]:
return[call[name[stats].norm.ppf, parameter[name[p]]]]
variable[m] assign[=] binary_operation[binary_operation[name[n_exp] * name[p_exp]] + binary_operation[name[n_con] * name[p_con]]]
variable[k] assign[=] binary_operation[call[name[float], parameter[name[n_exp]]] / call[name[float], parameter[name[n_con]]]]
return[call[name[stats].norm.cdf, parameter[binary_operation[binary_operation[binary_operation[call[name[np].sqrt, parameter[binary_operation[name[k] * name[m]]]] * call[name[abs], parameter[binary_operation[name[postulated_hazard_ratio] - constant[1]]]]] / binary_operation[binary_operation[name[k] * name[postulated_hazard_ratio]] + constant[1]]] - call[name[z], parameter[binary_operation[constant[1] - binary_operation[name[alpha] / constant[2.0]]]]]]]]] | keyword[def] identifier[power_under_cph] ( identifier[n_exp] , identifier[n_con] , identifier[p_exp] , identifier[p_con] , identifier[postulated_hazard_ratio] , identifier[alpha] = literal[int] ):
literal[string]
keyword[def] identifier[z] ( identifier[p] ):
keyword[return] identifier[stats] . identifier[norm] . identifier[ppf] ( identifier[p] )
identifier[m] = identifier[n_exp] * identifier[p_exp] + identifier[n_con] * identifier[p_con]
identifier[k] = identifier[float] ( identifier[n_exp] )/ identifier[float] ( identifier[n_con] )
keyword[return] identifier[stats] . identifier[norm] . identifier[cdf] (
identifier[np] . identifier[sqrt] ( identifier[k] * identifier[m] )* identifier[abs] ( identifier[postulated_hazard_ratio] - literal[int] )/( identifier[k] * identifier[postulated_hazard_ratio] + literal[int] )- identifier[z] ( literal[int] - identifier[alpha] / literal[int] )
) | def power_under_cph(n_exp, n_con, p_exp, p_con, postulated_hazard_ratio, alpha=0.05):
"""
This computes the power of the hypothesis test that the two groups, experiment and control,
have different hazards (that is, the relative hazard ratio is different from 1.)
Parameters
----------
n_exp : integer
size of the experiment group.
n_con : integer
size of the control group.
p_exp : float
probability of failure in experimental group over period of study.
p_con : float
probability of failure in control group over period of study
postulated_hazard_ratio : float
the postulated hazard ratio
alpha : float, optional (default=0.05)
type I error rate
Returns
-------
float:
power to detect the magnitude of the hazard ratio as small as that specified by postulated_hazard_ratio.
Notes
-----
`Reference <https://cran.r-project.org/web/packages/powerSurvEpi/powerSurvEpi.pdf>`_.
See Also
--------
sample_size_necessary_under_cph
"""
def z(p):
return stats.norm.ppf(p)
m = n_exp * p_exp + n_con * p_con
k = float(n_exp) / float(n_con)
return stats.norm.cdf(np.sqrt(k * m) * abs(postulated_hazard_ratio - 1) / (k * postulated_hazard_ratio + 1) - z(1 - alpha / 2.0)) |
def write_loom(self, filename: PathLike, write_obsm_varm: bool = False):
"""Write ``.loom``-formatted hdf5 file.
Parameters
----------
filename
The filename.
"""
from .readwrite.write import write_loom
write_loom(filename, self, write_obsm_varm = write_obsm_varm) | def function[write_loom, parameter[self, filename, write_obsm_varm]]:
constant[Write ``.loom``-formatted hdf5 file.
Parameters
----------
filename
The filename.
]
from relative_module[readwrite.write] import module[write_loom]
call[name[write_loom], parameter[name[filename], name[self]]] | keyword[def] identifier[write_loom] ( identifier[self] , identifier[filename] : identifier[PathLike] , identifier[write_obsm_varm] : identifier[bool] = keyword[False] ):
literal[string]
keyword[from] . identifier[readwrite] . identifier[write] keyword[import] identifier[write_loom]
identifier[write_loom] ( identifier[filename] , identifier[self] , identifier[write_obsm_varm] = identifier[write_obsm_varm] ) | def write_loom(self, filename: PathLike, write_obsm_varm: bool=False):
"""Write ``.loom``-formatted hdf5 file.
Parameters
----------
filename
The filename.
"""
from .readwrite.write import write_loom
write_loom(filename, self, write_obsm_varm=write_obsm_varm) |
def timeline_by_match(self, region, match_id):
"""
Get match timeline by match ID.
Not all matches have timeline data.
:param string region: The region to execute this request on
:param long match_id: The match ID.
:returns: MatchTimelineDto
"""
url, query = MatchApiV4Urls.timeline_by_match(region=region, match_id=match_id)
return self._raw_request(self.timeline_by_match.__name__, region, url, query) | def function[timeline_by_match, parameter[self, region, match_id]]:
constant[
Get match timeline by match ID.
Not all matches have timeline data.
:param string region: The region to execute this request on
:param long match_id: The match ID.
:returns: MatchTimelineDto
]
<ast.Tuple object at 0x7da1b23efee0> assign[=] call[name[MatchApiV4Urls].timeline_by_match, parameter[]]
return[call[name[self]._raw_request, parameter[name[self].timeline_by_match.__name__, name[region], name[url], name[query]]]] | keyword[def] identifier[timeline_by_match] ( identifier[self] , identifier[region] , identifier[match_id] ):
literal[string]
identifier[url] , identifier[query] = identifier[MatchApiV4Urls] . identifier[timeline_by_match] ( identifier[region] = identifier[region] , identifier[match_id] = identifier[match_id] )
keyword[return] identifier[self] . identifier[_raw_request] ( identifier[self] . identifier[timeline_by_match] . identifier[__name__] , identifier[region] , identifier[url] , identifier[query] ) | def timeline_by_match(self, region, match_id):
"""
Get match timeline by match ID.
Not all matches have timeline data.
:param string region: The region to execute this request on
:param long match_id: The match ID.
:returns: MatchTimelineDto
"""
(url, query) = MatchApiV4Urls.timeline_by_match(region=region, match_id=match_id)
return self._raw_request(self.timeline_by_match.__name__, region, url, query) |
def setCodeVersion(self, newVersion, callback = None):
"""Switch to a new code version on all cluster nodes. You
should ensure that cluster nodes are updated, otherwise they
won't be able to apply commands.
:param newVersion: new code version
:type int
:param callback: will be called on cussess or fail
:type callback: function(`FAIL_REASON <#pysyncobj.FAIL_REASON>`_, None)
"""
assert isinstance(newVersion, int)
if newVersion > self.__selfCodeVersion:
raise Exception('wrong version, current version is %d, requested version is %d' % (self.__selfCodeVersion, newVersion))
if newVersion < self.__enabledCodeVersion:
raise Exception('wrong version, enabled version is %d, requested version is %d' % (self.__enabledCodeVersion, newVersion))
self._applyCommand(pickle.dumps(newVersion), callback, _COMMAND_TYPE.VERSION) | def function[setCodeVersion, parameter[self, newVersion, callback]]:
constant[Switch to a new code version on all cluster nodes. You
should ensure that cluster nodes are updated, otherwise they
won't be able to apply commands.
:param newVersion: new code version
:type int
:param callback: will be called on cussess or fail
:type callback: function(`FAIL_REASON <#pysyncobj.FAIL_REASON>`_, None)
]
assert[call[name[isinstance], parameter[name[newVersion], name[int]]]]
if compare[name[newVersion] greater[>] name[self].__selfCodeVersion] begin[:]
<ast.Raise object at 0x7da18f09d3f0>
if compare[name[newVersion] less[<] name[self].__enabledCodeVersion] begin[:]
<ast.Raise object at 0x7da18f09efb0>
call[name[self]._applyCommand, parameter[call[name[pickle].dumps, parameter[name[newVersion]]], name[callback], name[_COMMAND_TYPE].VERSION]] | keyword[def] identifier[setCodeVersion] ( identifier[self] , identifier[newVersion] , identifier[callback] = keyword[None] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[newVersion] , identifier[int] )
keyword[if] identifier[newVersion] > identifier[self] . identifier[__selfCodeVersion] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[self] . identifier[__selfCodeVersion] , identifier[newVersion] ))
keyword[if] identifier[newVersion] < identifier[self] . identifier[__enabledCodeVersion] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[self] . identifier[__enabledCodeVersion] , identifier[newVersion] ))
identifier[self] . identifier[_applyCommand] ( identifier[pickle] . identifier[dumps] ( identifier[newVersion] ), identifier[callback] , identifier[_COMMAND_TYPE] . identifier[VERSION] ) | def setCodeVersion(self, newVersion, callback=None):
"""Switch to a new code version on all cluster nodes. You
should ensure that cluster nodes are updated, otherwise they
won't be able to apply commands.
:param newVersion: new code version
:type int
:param callback: will be called on cussess or fail
:type callback: function(`FAIL_REASON <#pysyncobj.FAIL_REASON>`_, None)
"""
assert isinstance(newVersion, int)
if newVersion > self.__selfCodeVersion:
raise Exception('wrong version, current version is %d, requested version is %d' % (self.__selfCodeVersion, newVersion)) # depends on [control=['if'], data=['newVersion']]
if newVersion < self.__enabledCodeVersion:
raise Exception('wrong version, enabled version is %d, requested version is %d' % (self.__enabledCodeVersion, newVersion)) # depends on [control=['if'], data=['newVersion']]
self._applyCommand(pickle.dumps(newVersion), callback, _COMMAND_TYPE.VERSION) |
def _update_info(self):
"""
Call parent method and make sure this is in fact a
image HDU. Set dims in C order
"""
super(ImageHDU, self)._update_info()
if self._info['hdutype'] != IMAGE_HDU:
mess = "Extension %s is not a Image HDU" % self.ext
raise ValueError(mess)
# convert to c order
if 'dims' in self._info:
self._info['dims'] = list(reversed(self._info['dims'])) | def function[_update_info, parameter[self]]:
constant[
Call parent method and make sure this is in fact a
image HDU. Set dims in C order
]
call[call[name[super], parameter[name[ImageHDU], name[self]]]._update_info, parameter[]]
if compare[call[name[self]._info][constant[hdutype]] not_equal[!=] name[IMAGE_HDU]] begin[:]
variable[mess] assign[=] binary_operation[constant[Extension %s is not a Image HDU] <ast.Mod object at 0x7da2590d6920> name[self].ext]
<ast.Raise object at 0x7da18f58d870>
if compare[constant[dims] in name[self]._info] begin[:]
call[name[self]._info][constant[dims]] assign[=] call[name[list], parameter[call[name[reversed], parameter[call[name[self]._info][constant[dims]]]]]] | keyword[def] identifier[_update_info] ( identifier[self] ):
literal[string]
identifier[super] ( identifier[ImageHDU] , identifier[self] ). identifier[_update_info] ()
keyword[if] identifier[self] . identifier[_info] [ literal[string] ]!= identifier[IMAGE_HDU] :
identifier[mess] = literal[string] % identifier[self] . identifier[ext]
keyword[raise] identifier[ValueError] ( identifier[mess] )
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_info] :
identifier[self] . identifier[_info] [ literal[string] ]= identifier[list] ( identifier[reversed] ( identifier[self] . identifier[_info] [ literal[string] ])) | def _update_info(self):
"""
Call parent method and make sure this is in fact a
image HDU. Set dims in C order
"""
super(ImageHDU, self)._update_info()
if self._info['hdutype'] != IMAGE_HDU:
mess = 'Extension %s is not a Image HDU' % self.ext
raise ValueError(mess) # depends on [control=['if'], data=[]]
# convert to c order
if 'dims' in self._info:
self._info['dims'] = list(reversed(self._info['dims'])) # depends on [control=['if'], data=[]] |
def attached_socket(self, *args, **kwargs):
"""Opens a raw socket in a ``with`` block to write data to Splunk.
The arguments are identical to those for :meth:`attach`. The socket is
automatically closed at the end of the ``with`` block, even if an
exception is raised in the block.
:param host: The host value for events written to the stream.
:type host: ``string``
:param source: The source value for events written to the stream.
:type source: ``string``
:param sourcetype: The sourcetype value for events written to the
stream.
:type sourcetype: ``string``
:returns: Nothing.
**Example**::
import splunklib.client as client
s = client.connect(...)
index = s.indexes['some_index']
with index.attached_socket(sourcetype='test') as sock:
sock.send('Test event\\r\\n')
"""
try:
sock = self.attach(*args, **kwargs)
yield sock
finally:
sock.shutdown(socket.SHUT_RDWR)
sock.close() | def function[attached_socket, parameter[self]]:
constant[Opens a raw socket in a ``with`` block to write data to Splunk.
The arguments are identical to those for :meth:`attach`. The socket is
automatically closed at the end of the ``with`` block, even if an
exception is raised in the block.
:param host: The host value for events written to the stream.
:type host: ``string``
:param source: The source value for events written to the stream.
:type source: ``string``
:param sourcetype: The sourcetype value for events written to the
stream.
:type sourcetype: ``string``
:returns: Nothing.
**Example**::
import splunklib.client as client
s = client.connect(...)
index = s.indexes['some_index']
with index.attached_socket(sourcetype='test') as sock:
sock.send('Test event\r\n')
]
<ast.Try object at 0x7da1b1952710> | keyword[def] identifier[attached_socket] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[sock] = identifier[self] . identifier[attach] (* identifier[args] ,** identifier[kwargs] )
keyword[yield] identifier[sock]
keyword[finally] :
identifier[sock] . identifier[shutdown] ( identifier[socket] . identifier[SHUT_RDWR] )
identifier[sock] . identifier[close] () | def attached_socket(self, *args, **kwargs):
"""Opens a raw socket in a ``with`` block to write data to Splunk.
The arguments are identical to those for :meth:`attach`. The socket is
automatically closed at the end of the ``with`` block, even if an
exception is raised in the block.
:param host: The host value for events written to the stream.
:type host: ``string``
:param source: The source value for events written to the stream.
:type source: ``string``
:param sourcetype: The sourcetype value for events written to the
stream.
:type sourcetype: ``string``
:returns: Nothing.
**Example**::
import splunklib.client as client
s = client.connect(...)
index = s.indexes['some_index']
with index.attached_socket(sourcetype='test') as sock:
sock.send('Test event\\r\\n')
"""
try:
sock = self.attach(*args, **kwargs)
yield sock # depends on [control=['try'], data=[]]
finally:
sock.shutdown(socket.SHUT_RDWR)
sock.close() |
def ConsoleLogHandler(loggerRef='', handler=None, level=logging.DEBUG, color=None):
"""Add a handler to stderr with our custom formatter to a logger."""
if isinstance(loggerRef, logging.Logger):
pass
elif isinstance(loggerRef, str):
# check for root
if not loggerRef:
loggerRef = _log
# check for a valid logger name
elif loggerRef not in logging.Logger.manager.loggerDict:
raise RuntimeError("not a valid logger name: %r" % (loggerRef,))
# get the logger
loggerRef = logging.getLogger(loggerRef)
else:
raise RuntimeError("not a valid logger reference: %r" % (loggerRef,))
# see if this (or its parent) is a module level logger
if hasattr(loggerRef, 'globs'):
loggerRef.globs['_debug'] += 1
elif hasattr(loggerRef.parent, 'globs'):
loggerRef.parent.globs['_debug'] += 1
# make a handler if one wasn't provided
if not handler:
handler = logging.StreamHandler()
handler.setLevel(level)
# use our formatter
handler.setFormatter(LoggingFormatter(color))
# add it to the logger
loggerRef.addHandler(handler)
# make sure the logger has at least this level
loggerRef.setLevel(level) | def function[ConsoleLogHandler, parameter[loggerRef, handler, level, color]]:
constant[Add a handler to stderr with our custom formatter to a logger.]
if call[name[isinstance], parameter[name[loggerRef], name[logging].Logger]] begin[:]
pass
if call[name[hasattr], parameter[name[loggerRef], constant[globs]]] begin[:]
<ast.AugAssign object at 0x7da1b08e61a0>
if <ast.UnaryOp object at 0x7da1b08e4df0> begin[:]
variable[handler] assign[=] call[name[logging].StreamHandler, parameter[]]
call[name[handler].setLevel, parameter[name[level]]]
call[name[handler].setFormatter, parameter[call[name[LoggingFormatter], parameter[name[color]]]]]
call[name[loggerRef].addHandler, parameter[name[handler]]]
call[name[loggerRef].setLevel, parameter[name[level]]] | keyword[def] identifier[ConsoleLogHandler] ( identifier[loggerRef] = literal[string] , identifier[handler] = keyword[None] , identifier[level] = identifier[logging] . identifier[DEBUG] , identifier[color] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[loggerRef] , identifier[logging] . identifier[Logger] ):
keyword[pass]
keyword[elif] identifier[isinstance] ( identifier[loggerRef] , identifier[str] ):
keyword[if] keyword[not] identifier[loggerRef] :
identifier[loggerRef] = identifier[_log]
keyword[elif] identifier[loggerRef] keyword[not] keyword[in] identifier[logging] . identifier[Logger] . identifier[manager] . identifier[loggerDict] :
keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[loggerRef] ,))
identifier[loggerRef] = identifier[logging] . identifier[getLogger] ( identifier[loggerRef] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[loggerRef] ,))
keyword[if] identifier[hasattr] ( identifier[loggerRef] , literal[string] ):
identifier[loggerRef] . identifier[globs] [ literal[string] ]+= literal[int]
keyword[elif] identifier[hasattr] ( identifier[loggerRef] . identifier[parent] , literal[string] ):
identifier[loggerRef] . identifier[parent] . identifier[globs] [ literal[string] ]+= literal[int]
keyword[if] keyword[not] identifier[handler] :
identifier[handler] = identifier[logging] . identifier[StreamHandler] ()
identifier[handler] . identifier[setLevel] ( identifier[level] )
identifier[handler] . identifier[setFormatter] ( identifier[LoggingFormatter] ( identifier[color] ))
identifier[loggerRef] . identifier[addHandler] ( identifier[handler] )
identifier[loggerRef] . identifier[setLevel] ( identifier[level] ) | def ConsoleLogHandler(loggerRef='', handler=None, level=logging.DEBUG, color=None):
"""Add a handler to stderr with our custom formatter to a logger."""
if isinstance(loggerRef, logging.Logger):
pass # depends on [control=['if'], data=[]]
elif isinstance(loggerRef, str):
# check for root
if not loggerRef:
loggerRef = _log # depends on [control=['if'], data=[]]
# check for a valid logger name
elif loggerRef not in logging.Logger.manager.loggerDict:
raise RuntimeError('not a valid logger name: %r' % (loggerRef,)) # depends on [control=['if'], data=['loggerRef']]
# get the logger
loggerRef = logging.getLogger(loggerRef) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('not a valid logger reference: %r' % (loggerRef,))
# see if this (or its parent) is a module level logger
if hasattr(loggerRef, 'globs'):
loggerRef.globs['_debug'] += 1 # depends on [control=['if'], data=[]]
elif hasattr(loggerRef.parent, 'globs'):
loggerRef.parent.globs['_debug'] += 1 # depends on [control=['if'], data=[]]
# make a handler if one wasn't provided
if not handler:
handler = logging.StreamHandler()
handler.setLevel(level) # depends on [control=['if'], data=[]]
# use our formatter
handler.setFormatter(LoggingFormatter(color))
# add it to the logger
loggerRef.addHandler(handler)
# make sure the logger has at least this level
loggerRef.setLevel(level) |
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure that a pagerduty escalation policy exists. Will create or update as needed.
This method accepts as args everything defined in
https://developer.pagerduty.com/documentation/rest/escalation_policies/create.
In addition, user and schedule id's will be translated from name (or email address)
into PagerDuty unique ids. For example:
.. code-block:: yaml
pagerduty_escalation_policy.present:
- name: bruce test escalation policy
- escalation_rules:
- targets:
- type: schedule
id: 'bruce test schedule level1'
- type: user
id: 'Bruce Sherrod'
In this example, 'Bruce Sherrod' will be looked up and replaced with the
PagerDuty id (usually a 7 digit all-caps string, e.g. PX6GQL7)
'''
# for convenience, we accept id, name, or email for users
# and we accept the id or name for schedules
for escalation_rule in kwargs['escalation_rules']:
for target in escalation_rule['targets']:
target_id = None
if target['type'] == 'user':
user = __salt__['pagerduty_util.get_resource']('users',
target['id'],
['email', 'name', 'id'],
profile=profile,
subdomain=subdomain,
api_key=api_key)
if user:
target_id = user['id']
elif target['type'] == 'schedule':
schedule = __salt__['pagerduty_util.get_resource']('schedules',
target['id'],
['name', 'id'],
profile=profile,
subdomain=subdomain,
api_key=api_key)
if schedule:
target_id = schedule['schedule']['id']
if target_id is None:
raise Exception('unidentified target: {0}'.format(target))
target['id'] = target_id
r = __salt__['pagerduty_util.resource_present']('escalation_policies',
['name', 'id'],
_diff,
profile,
subdomain,
api_key,
**kwargs)
return r | def function[present, parameter[profile, subdomain, api_key]]:
constant[
Ensure that a pagerduty escalation policy exists. Will create or update as needed.
This method accepts as args everything defined in
https://developer.pagerduty.com/documentation/rest/escalation_policies/create.
In addition, user and schedule id's will be translated from name (or email address)
into PagerDuty unique ids. For example:
.. code-block:: yaml
pagerduty_escalation_policy.present:
- name: bruce test escalation policy
- escalation_rules:
- targets:
- type: schedule
id: 'bruce test schedule level1'
- type: user
id: 'Bruce Sherrod'
In this example, 'Bruce Sherrod' will be looked up and replaced with the
PagerDuty id (usually a 7 digit all-caps string, e.g. PX6GQL7)
]
for taget[name[escalation_rule]] in starred[call[name[kwargs]][constant[escalation_rules]]] begin[:]
for taget[name[target]] in starred[call[name[escalation_rule]][constant[targets]]] begin[:]
variable[target_id] assign[=] constant[None]
if compare[call[name[target]][constant[type]] equal[==] constant[user]] begin[:]
variable[user] assign[=] call[call[name[__salt__]][constant[pagerduty_util.get_resource]], parameter[constant[users], call[name[target]][constant[id]], list[[<ast.Constant object at 0x7da204344820>, <ast.Constant object at 0x7da204344760>, <ast.Constant object at 0x7da204347ee0>]]]]
if name[user] begin[:]
variable[target_id] assign[=] call[name[user]][constant[id]]
if compare[name[target_id] is constant[None]] begin[:]
<ast.Raise object at 0x7da20c76d990>
call[name[target]][constant[id]] assign[=] name[target_id]
variable[r] assign[=] call[call[name[__salt__]][constant[pagerduty_util.resource_present]], parameter[constant[escalation_policies], list[[<ast.Constant object at 0x7da20c76c3d0>, <ast.Constant object at 0x7da20c76fee0>]], name[_diff], name[profile], name[subdomain], name[api_key]]]
return[name[r]] | keyword[def] identifier[present] ( identifier[profile] = literal[string] , identifier[subdomain] = keyword[None] , identifier[api_key] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[escalation_rule] keyword[in] identifier[kwargs] [ literal[string] ]:
keyword[for] identifier[target] keyword[in] identifier[escalation_rule] [ literal[string] ]:
identifier[target_id] = keyword[None]
keyword[if] identifier[target] [ literal[string] ]== literal[string] :
identifier[user] = identifier[__salt__] [ literal[string] ]( literal[string] ,
identifier[target] [ literal[string] ],
[ literal[string] , literal[string] , literal[string] ],
identifier[profile] = identifier[profile] ,
identifier[subdomain] = identifier[subdomain] ,
identifier[api_key] = identifier[api_key] )
keyword[if] identifier[user] :
identifier[target_id] = identifier[user] [ literal[string] ]
keyword[elif] identifier[target] [ literal[string] ]== literal[string] :
identifier[schedule] = identifier[__salt__] [ literal[string] ]( literal[string] ,
identifier[target] [ literal[string] ],
[ literal[string] , literal[string] ],
identifier[profile] = identifier[profile] ,
identifier[subdomain] = identifier[subdomain] ,
identifier[api_key] = identifier[api_key] )
keyword[if] identifier[schedule] :
identifier[target_id] = identifier[schedule] [ literal[string] ][ literal[string] ]
keyword[if] identifier[target_id] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[target] ))
identifier[target] [ literal[string] ]= identifier[target_id]
identifier[r] = identifier[__salt__] [ literal[string] ]( literal[string] ,
[ literal[string] , literal[string] ],
identifier[_diff] ,
identifier[profile] ,
identifier[subdomain] ,
identifier[api_key] ,
** identifier[kwargs] )
keyword[return] identifier[r] | def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
"""
Ensure that a pagerduty escalation policy exists. Will create or update as needed.
This method accepts as args everything defined in
https://developer.pagerduty.com/documentation/rest/escalation_policies/create.
In addition, user and schedule id's will be translated from name (or email address)
into PagerDuty unique ids. For example:
.. code-block:: yaml
pagerduty_escalation_policy.present:
- name: bruce test escalation policy
- escalation_rules:
- targets:
- type: schedule
id: 'bruce test schedule level1'
- type: user
id: 'Bruce Sherrod'
In this example, 'Bruce Sherrod' will be looked up and replaced with the
PagerDuty id (usually a 7 digit all-caps string, e.g. PX6GQL7)
"""
# for convenience, we accept id, name, or email for users
# and we accept the id or name for schedules
for escalation_rule in kwargs['escalation_rules']:
for target in escalation_rule['targets']:
target_id = None
if target['type'] == 'user':
user = __salt__['pagerduty_util.get_resource']('users', target['id'], ['email', 'name', 'id'], profile=profile, subdomain=subdomain, api_key=api_key)
if user:
target_id = user['id'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif target['type'] == 'schedule':
schedule = __salt__['pagerduty_util.get_resource']('schedules', target['id'], ['name', 'id'], profile=profile, subdomain=subdomain, api_key=api_key)
if schedule:
target_id = schedule['schedule']['id'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if target_id is None:
raise Exception('unidentified target: {0}'.format(target)) # depends on [control=['if'], data=[]]
target['id'] = target_id # depends on [control=['for'], data=['target']] # depends on [control=['for'], data=['escalation_rule']]
r = __salt__['pagerduty_util.resource_present']('escalation_policies', ['name', 'id'], _diff, profile, subdomain, api_key, **kwargs)
return r |
def copyCurrentLayout(self, sourceViewSUID, targetViewSUID, body, verbose=None):
"""
Copy one network view layout onto another, setting the node location and view scale to match. This makes visually comparing networks simple.
:param sourceViewSUID: Source network view SUID (or "current")
:param targetViewSUID: Target network view SUID (or "current")
:param body: Clone the specified network view layout onto another network view -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation; 404: Network View does not exist
"""
response=api(url=self.___url+'apply/layouts/copycat/'+str(sourceViewSUID)+'/'+str(targetViewSUID)+'', method="PUT", body=body, verbose=verbose)
return response | def function[copyCurrentLayout, parameter[self, sourceViewSUID, targetViewSUID, body, verbose]]:
constant[
Copy one network view layout onto another, setting the node location and view scale to match. This makes visually comparing networks simple.
:param sourceViewSUID: Source network view SUID (or "current")
:param targetViewSUID: Target network view SUID (or "current")
:param body: Clone the specified network view layout onto another network view -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation; 404: Network View does not exist
]
variable[response] assign[=] call[name[api], parameter[]]
return[name[response]] | keyword[def] identifier[copyCurrentLayout] ( identifier[self] , identifier[sourceViewSUID] , identifier[targetViewSUID] , identifier[body] , identifier[verbose] = keyword[None] ):
literal[string]
identifier[response] = identifier[api] ( identifier[url] = identifier[self] . identifier[___url] + literal[string] + identifier[str] ( identifier[sourceViewSUID] )+ literal[string] + identifier[str] ( identifier[targetViewSUID] )+ literal[string] , identifier[method] = literal[string] , identifier[body] = identifier[body] , identifier[verbose] = identifier[verbose] )
keyword[return] identifier[response] | def copyCurrentLayout(self, sourceViewSUID, targetViewSUID, body, verbose=None):
"""
Copy one network view layout onto another, setting the node location and view scale to match. This makes visually comparing networks simple.
:param sourceViewSUID: Source network view SUID (or "current")
:param targetViewSUID: Target network view SUID (or "current")
:param body: Clone the specified network view layout onto another network view -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation; 404: Network View does not exist
"""
response = api(url=self.___url + 'apply/layouts/copycat/' + str(sourceViewSUID) + '/' + str(targetViewSUID) + '', method='PUT', body=body, verbose=verbose)
return response |
def find_keyboard_row(words):
"""
:type words: List[str]
:rtype: List[str]
"""
keyboard = [
set('qwertyuiop'),
set('asdfghjkl'),
set('zxcvbnm'),
]
result = []
for word in words:
for key in keyboard:
if set(word.lower()).issubset(key):
result.append(word)
return result | def function[find_keyboard_row, parameter[words]]:
constant[
:type words: List[str]
:rtype: List[str]
]
variable[keyboard] assign[=] list[[<ast.Call object at 0x7da1b2033e20>, <ast.Call object at 0x7da1b2031d20>, <ast.Call object at 0x7da1b2030dc0>]]
variable[result] assign[=] list[[]]
for taget[name[word]] in starred[name[words]] begin[:]
for taget[name[key]] in starred[name[keyboard]] begin[:]
if call[call[name[set], parameter[call[name[word].lower, parameter[]]]].issubset, parameter[name[key]]] begin[:]
call[name[result].append, parameter[name[word]]]
return[name[result]] | keyword[def] identifier[find_keyboard_row] ( identifier[words] ):
literal[string]
identifier[keyboard] =[
identifier[set] ( literal[string] ),
identifier[set] ( literal[string] ),
identifier[set] ( literal[string] ),
]
identifier[result] =[]
keyword[for] identifier[word] keyword[in] identifier[words] :
keyword[for] identifier[key] keyword[in] identifier[keyboard] :
keyword[if] identifier[set] ( identifier[word] . identifier[lower] ()). identifier[issubset] ( identifier[key] ):
identifier[result] . identifier[append] ( identifier[word] )
keyword[return] identifier[result] | def find_keyboard_row(words):
"""
:type words: List[str]
:rtype: List[str]
"""
keyboard = [set('qwertyuiop'), set('asdfghjkl'), set('zxcvbnm')]
result = []
for word in words:
for key in keyboard:
if set(word.lower()).issubset(key):
result.append(word) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['word']]
return result |
def get_hessian(self):
"""Return the hessian"""
force_const = self.fields.get("Cartesian Force Constants")
if force_const is None:
return None
N = len(self.molecule.numbers)
result = np.zeros((3*N, 3*N), float)
counter = 0
for row in range(3*N):
result[row, :row+1] = force_const[counter:counter+row+1]
result[:row+1, row] = force_const[counter:counter+row+1]
counter += row + 1
return result | def function[get_hessian, parameter[self]]:
constant[Return the hessian]
variable[force_const] assign[=] call[name[self].fields.get, parameter[constant[Cartesian Force Constants]]]
if compare[name[force_const] is constant[None]] begin[:]
return[constant[None]]
variable[N] assign[=] call[name[len], parameter[name[self].molecule.numbers]]
variable[result] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da2041db130>, <ast.BinOp object at 0x7da2041d8a60>]], name[float]]]
variable[counter] assign[=] constant[0]
for taget[name[row]] in starred[call[name[range], parameter[binary_operation[constant[3] * name[N]]]]] begin[:]
call[name[result]][tuple[[<ast.Name object at 0x7da2041d88b0>, <ast.Slice object at 0x7da2041d8160>]]] assign[=] call[name[force_const]][<ast.Slice object at 0x7da2041d8d00>]
call[name[result]][tuple[[<ast.Slice object at 0x7da2041dbcd0>, <ast.Name object at 0x7da2041da4a0>]]] assign[=] call[name[force_const]][<ast.Slice object at 0x7da2041d92d0>]
<ast.AugAssign object at 0x7da2041d9cc0>
return[name[result]] | keyword[def] identifier[get_hessian] ( identifier[self] ):
literal[string]
identifier[force_const] = identifier[self] . identifier[fields] . identifier[get] ( literal[string] )
keyword[if] identifier[force_const] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[N] = identifier[len] ( identifier[self] . identifier[molecule] . identifier[numbers] )
identifier[result] = identifier[np] . identifier[zeros] (( literal[int] * identifier[N] , literal[int] * identifier[N] ), identifier[float] )
identifier[counter] = literal[int]
keyword[for] identifier[row] keyword[in] identifier[range] ( literal[int] * identifier[N] ):
identifier[result] [ identifier[row] ,: identifier[row] + literal[int] ]= identifier[force_const] [ identifier[counter] : identifier[counter] + identifier[row] + literal[int] ]
identifier[result] [: identifier[row] + literal[int] , identifier[row] ]= identifier[force_const] [ identifier[counter] : identifier[counter] + identifier[row] + literal[int] ]
identifier[counter] += identifier[row] + literal[int]
keyword[return] identifier[result] | def get_hessian(self):
"""Return the hessian"""
force_const = self.fields.get('Cartesian Force Constants')
if force_const is None:
return None # depends on [control=['if'], data=[]]
N = len(self.molecule.numbers)
result = np.zeros((3 * N, 3 * N), float)
counter = 0
for row in range(3 * N):
result[row, :row + 1] = force_const[counter:counter + row + 1]
result[:row + 1, row] = force_const[counter:counter + row + 1]
counter += row + 1 # depends on [control=['for'], data=['row']]
return result |
def _thumbnail_local(self, original_filename, thumb_filename,
thumb_size, thumb_url, crop=None, bg=None,
quality=85):
"""Finds or creates a thumbnail for the specified image on the local filesystem."""
# create folders
self._get_path(thumb_filename)
thumb_url_full = url_for('static', filename=thumb_url)
# Return the thumbnail URL now if it already exists locally
if os.path.exists(thumb_filename):
return thumb_url_full
try:
image = Image.open(original_filename)
except IOError:
return ''
img = self._thumbnail_resize(image, thumb_size, crop=crop, bg=bg)
img.save(thumb_filename, image.format, quality=quality)
return thumb_url_full | def function[_thumbnail_local, parameter[self, original_filename, thumb_filename, thumb_size, thumb_url, crop, bg, quality]]:
constant[Finds or creates a thumbnail for the specified image on the local filesystem.]
call[name[self]._get_path, parameter[name[thumb_filename]]]
variable[thumb_url_full] assign[=] call[name[url_for], parameter[constant[static]]]
if call[name[os].path.exists, parameter[name[thumb_filename]]] begin[:]
return[name[thumb_url_full]]
<ast.Try object at 0x7da2047e8340>
variable[img] assign[=] call[name[self]._thumbnail_resize, parameter[name[image], name[thumb_size]]]
call[name[img].save, parameter[name[thumb_filename], name[image].format]]
return[name[thumb_url_full]] | keyword[def] identifier[_thumbnail_local] ( identifier[self] , identifier[original_filename] , identifier[thumb_filename] ,
identifier[thumb_size] , identifier[thumb_url] , identifier[crop] = keyword[None] , identifier[bg] = keyword[None] ,
identifier[quality] = literal[int] ):
literal[string]
identifier[self] . identifier[_get_path] ( identifier[thumb_filename] )
identifier[thumb_url_full] = identifier[url_for] ( literal[string] , identifier[filename] = identifier[thumb_url] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[thumb_filename] ):
keyword[return] identifier[thumb_url_full]
keyword[try] :
identifier[image] = identifier[Image] . identifier[open] ( identifier[original_filename] )
keyword[except] identifier[IOError] :
keyword[return] literal[string]
identifier[img] = identifier[self] . identifier[_thumbnail_resize] ( identifier[image] , identifier[thumb_size] , identifier[crop] = identifier[crop] , identifier[bg] = identifier[bg] )
identifier[img] . identifier[save] ( identifier[thumb_filename] , identifier[image] . identifier[format] , identifier[quality] = identifier[quality] )
keyword[return] identifier[thumb_url_full] | def _thumbnail_local(self, original_filename, thumb_filename, thumb_size, thumb_url, crop=None, bg=None, quality=85):
"""Finds or creates a thumbnail for the specified image on the local filesystem."""
# create folders
self._get_path(thumb_filename)
thumb_url_full = url_for('static', filename=thumb_url)
# Return the thumbnail URL now if it already exists locally
if os.path.exists(thumb_filename):
return thumb_url_full # depends on [control=['if'], data=[]]
try:
image = Image.open(original_filename) # depends on [control=['try'], data=[]]
except IOError:
return '' # depends on [control=['except'], data=[]]
img = self._thumbnail_resize(image, thumb_size, crop=crop, bg=bg)
img.save(thumb_filename, image.format, quality=quality)
return thumb_url_full |
def confirm_register_form_factory(Form):
"""Factory for creating a confirm register form."""
class CsrfDisabledProfileForm(ProfileForm):
"""Subclass of ProfileForm to disable CSRF token in the inner form.
This class will always be a inner form field of the parent class
`Form`. The parent will add/remove the CSRF token in the form.
"""
def __init__(self, *args, **kwargs):
"""Initialize the object by hardcoding CSRF token to false."""
kwargs = _update_with_csrf_disabled(kwargs)
super(CsrfDisabledProfileForm, self).__init__(*args, **kwargs)
class ConfirmRegisterForm(Form):
"""RegisterForm extended with UserProfile details."""
profile = FormField(CsrfDisabledProfileForm, separator='.')
return ConfirmRegisterForm | def function[confirm_register_form_factory, parameter[Form]]:
constant[Factory for creating a confirm register form.]
class class[CsrfDisabledProfileForm, parameter[]] begin[:]
constant[Subclass of ProfileForm to disable CSRF token in the inner form.
This class will always be a inner form field of the parent class
`Form`. The parent will add/remove the CSRF token in the form.
]
def function[__init__, parameter[self]]:
constant[Initialize the object by hardcoding CSRF token to false.]
variable[kwargs] assign[=] call[name[_update_with_csrf_disabled], parameter[name[kwargs]]]
call[call[name[super], parameter[name[CsrfDisabledProfileForm], name[self]]].__init__, parameter[<ast.Starred object at 0x7da20c6e4730>]]
class class[ConfirmRegisterForm, parameter[]] begin[:]
constant[RegisterForm extended with UserProfile details.]
variable[profile] assign[=] call[name[FormField], parameter[name[CsrfDisabledProfileForm]]]
return[name[ConfirmRegisterForm]] | keyword[def] identifier[confirm_register_form_factory] ( identifier[Form] ):
literal[string]
keyword[class] identifier[CsrfDisabledProfileForm] ( identifier[ProfileForm] ):
literal[string]
keyword[def] identifier[__init__] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] = identifier[_update_with_csrf_disabled] ( identifier[kwargs] )
identifier[super] ( identifier[CsrfDisabledProfileForm] , identifier[self] ). identifier[__init__] (* identifier[args] ,** identifier[kwargs] )
keyword[class] identifier[ConfirmRegisterForm] ( identifier[Form] ):
literal[string]
identifier[profile] = identifier[FormField] ( identifier[CsrfDisabledProfileForm] , identifier[separator] = literal[string] )
keyword[return] identifier[ConfirmRegisterForm] | def confirm_register_form_factory(Form):
"""Factory for creating a confirm register form."""
class CsrfDisabledProfileForm(ProfileForm):
"""Subclass of ProfileForm to disable CSRF token in the inner form.
This class will always be a inner form field of the parent class
`Form`. The parent will add/remove the CSRF token in the form.
"""
def __init__(self, *args, **kwargs):
"""Initialize the object by hardcoding CSRF token to false."""
kwargs = _update_with_csrf_disabled(kwargs)
super(CsrfDisabledProfileForm, self).__init__(*args, **kwargs)
class ConfirmRegisterForm(Form):
"""RegisterForm extended with UserProfile details."""
profile = FormField(CsrfDisabledProfileForm, separator='.')
return ConfirmRegisterForm |
def log_entry_generator(log_instance):
"""
:yield: The next LogEntry from the REST API
:raise: StopIteration when there are no more log entries to show, please
note that if you call this again at a later time the REST API
could have different results and more data could be returned
"""
current_page_num = 0
while True:
has_results = False
for log_entry in log_instance.get_page(current_page_num):
has_results = True
yield log_entry
if not has_results:
break
current_page_num += 1 | def function[log_entry_generator, parameter[log_instance]]:
constant[
:yield: The next LogEntry from the REST API
:raise: StopIteration when there are no more log entries to show, please
note that if you call this again at a later time the REST API
could have different results and more data could be returned
]
variable[current_page_num] assign[=] constant[0]
while constant[True] begin[:]
variable[has_results] assign[=] constant[False]
for taget[name[log_entry]] in starred[call[name[log_instance].get_page, parameter[name[current_page_num]]]] begin[:]
variable[has_results] assign[=] constant[True]
<ast.Yield object at 0x7da20c6a84f0>
if <ast.UnaryOp object at 0x7da20c6a8940> begin[:]
break
<ast.AugAssign object at 0x7da20c6aa860> | keyword[def] identifier[log_entry_generator] ( identifier[log_instance] ):
literal[string]
identifier[current_page_num] = literal[int]
keyword[while] keyword[True] :
identifier[has_results] = keyword[False]
keyword[for] identifier[log_entry] keyword[in] identifier[log_instance] . identifier[get_page] ( identifier[current_page_num] ):
identifier[has_results] = keyword[True]
keyword[yield] identifier[log_entry]
keyword[if] keyword[not] identifier[has_results] :
keyword[break]
identifier[current_page_num] += literal[int] | def log_entry_generator(log_instance):
"""
:yield: The next LogEntry from the REST API
:raise: StopIteration when there are no more log entries to show, please
note that if you call this again at a later time the REST API
could have different results and more data could be returned
"""
current_page_num = 0
while True:
has_results = False
for log_entry in log_instance.get_page(current_page_num):
has_results = True
yield log_entry # depends on [control=['for'], data=['log_entry']]
if not has_results:
break # depends on [control=['if'], data=[]]
current_page_num += 1 # depends on [control=['while'], data=[]] |
def fit(self, X, y=None):
"""
Build a trainer and run main_loop.
Parameters
----------
X : array_like
Training examples.
y : array_like, optional
Labels.
"""
from pylearn2.config import yaml_parse
from pylearn2.train import Train
# build trainer
params = self.get_params()
yaml_string = Template(self.yaml_string).substitute(params)
self.trainer = yaml_parse.load(yaml_string)
assert isinstance(self.trainer, Train)
if self.trainer.dataset is not None:
raise ValueError('Train YAML database must evaluate to None.')
self.trainer.dataset = self._get_dataset(X, y)
# update monitoring dataset(s)
if (hasattr(self.trainer.algorithm, 'monitoring_dataset') and
self.trainer.algorithm.monitoring_dataset is not None):
monitoring_dataset = self.trainer.algorithm.monitoring_dataset
if len(monitoring_dataset) == 1 and '' in monitoring_dataset:
monitoring_dataset[''] = self.trainer.dataset
else:
monitoring_dataset['train'] = self.trainer.dataset
self.trainer.algorithm._set_monitoring_dataset(monitoring_dataset)
else:
self.trainer.algorithm._set_monitoring_dataset(
self.trainer.dataset)
# run main loop
self.trainer.main_loop() | def function[fit, parameter[self, X, y]]:
constant[
Build a trainer and run main_loop.
Parameters
----------
X : array_like
Training examples.
y : array_like, optional
Labels.
]
from relative_module[pylearn2.config] import module[yaml_parse]
from relative_module[pylearn2.train] import module[Train]
variable[params] assign[=] call[name[self].get_params, parameter[]]
variable[yaml_string] assign[=] call[call[name[Template], parameter[name[self].yaml_string]].substitute, parameter[name[params]]]
name[self].trainer assign[=] call[name[yaml_parse].load, parameter[name[yaml_string]]]
assert[call[name[isinstance], parameter[name[self].trainer, name[Train]]]]
if compare[name[self].trainer.dataset is_not constant[None]] begin[:]
<ast.Raise object at 0x7da1aff54910>
name[self].trainer.dataset assign[=] call[name[self]._get_dataset, parameter[name[X], name[y]]]
if <ast.BoolOp object at 0x7da1aff55630> begin[:]
variable[monitoring_dataset] assign[=] name[self].trainer.algorithm.monitoring_dataset
if <ast.BoolOp object at 0x7da1aff55bd0> begin[:]
call[name[monitoring_dataset]][constant[]] assign[=] name[self].trainer.dataset
call[name[self].trainer.algorithm._set_monitoring_dataset, parameter[name[monitoring_dataset]]]
call[name[self].trainer.main_loop, parameter[]] | keyword[def] identifier[fit] ( identifier[self] , identifier[X] , identifier[y] = keyword[None] ):
literal[string]
keyword[from] identifier[pylearn2] . identifier[config] keyword[import] identifier[yaml_parse]
keyword[from] identifier[pylearn2] . identifier[train] keyword[import] identifier[Train]
identifier[params] = identifier[self] . identifier[get_params] ()
identifier[yaml_string] = identifier[Template] ( identifier[self] . identifier[yaml_string] ). identifier[substitute] ( identifier[params] )
identifier[self] . identifier[trainer] = identifier[yaml_parse] . identifier[load] ( identifier[yaml_string] )
keyword[assert] identifier[isinstance] ( identifier[self] . identifier[trainer] , identifier[Train] )
keyword[if] identifier[self] . identifier[trainer] . identifier[dataset] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[trainer] . identifier[dataset] = identifier[self] . identifier[_get_dataset] ( identifier[X] , identifier[y] )
keyword[if] ( identifier[hasattr] ( identifier[self] . identifier[trainer] . identifier[algorithm] , literal[string] ) keyword[and]
identifier[self] . identifier[trainer] . identifier[algorithm] . identifier[monitoring_dataset] keyword[is] keyword[not] keyword[None] ):
identifier[monitoring_dataset] = identifier[self] . identifier[trainer] . identifier[algorithm] . identifier[monitoring_dataset]
keyword[if] identifier[len] ( identifier[monitoring_dataset] )== literal[int] keyword[and] literal[string] keyword[in] identifier[monitoring_dataset] :
identifier[monitoring_dataset] [ literal[string] ]= identifier[self] . identifier[trainer] . identifier[dataset]
keyword[else] :
identifier[monitoring_dataset] [ literal[string] ]= identifier[self] . identifier[trainer] . identifier[dataset]
identifier[self] . identifier[trainer] . identifier[algorithm] . identifier[_set_monitoring_dataset] ( identifier[monitoring_dataset] )
keyword[else] :
identifier[self] . identifier[trainer] . identifier[algorithm] . identifier[_set_monitoring_dataset] (
identifier[self] . identifier[trainer] . identifier[dataset] )
identifier[self] . identifier[trainer] . identifier[main_loop] () | def fit(self, X, y=None):
"""
Build a trainer and run main_loop.
Parameters
----------
X : array_like
Training examples.
y : array_like, optional
Labels.
"""
from pylearn2.config import yaml_parse
from pylearn2.train import Train
# build trainer
params = self.get_params()
yaml_string = Template(self.yaml_string).substitute(params)
self.trainer = yaml_parse.load(yaml_string)
assert isinstance(self.trainer, Train)
if self.trainer.dataset is not None:
raise ValueError('Train YAML database must evaluate to None.') # depends on [control=['if'], data=[]]
self.trainer.dataset = self._get_dataset(X, y)
# update monitoring dataset(s)
if hasattr(self.trainer.algorithm, 'monitoring_dataset') and self.trainer.algorithm.monitoring_dataset is not None:
monitoring_dataset = self.trainer.algorithm.monitoring_dataset
if len(monitoring_dataset) == 1 and '' in monitoring_dataset:
monitoring_dataset[''] = self.trainer.dataset # depends on [control=['if'], data=[]]
else:
monitoring_dataset['train'] = self.trainer.dataset
self.trainer.algorithm._set_monitoring_dataset(monitoring_dataset) # depends on [control=['if'], data=[]]
else:
self.trainer.algorithm._set_monitoring_dataset(self.trainer.dataset)
# run main loop
self.trainer.main_loop() |
def extract(args):
"""
%prog extract gffile
--contigs: Extract particular contig(s) from the gff file. If multiple contigs are
involved, use "," to separate, e.g. "contig_12,contig_150"; or provide a file
with multiple contig IDs, one per line
--names: Process particular ID(s) from the gff file. If multiple IDs are
involved, use "," to separate; or provide a file with multiple IDs, one per line
"""
p = OptionParser(extract.__doc__)
p.add_option("--contigs",
help="Extract features from certain contigs [default: %default]")
p.add_option("--names",
help="Extract features with certain names [default: %default]")
p.add_option("--types", type="str", default=None,
help="Extract features of certain feature types [default: %default]")
p.add_option("--children", default=0, choices=["1", "2"],
help="Specify number of iterations: `1` grabs children, " + \
"`2` grabs grand-children [default: %default]")
p.add_option("--tag", default="ID",
help="Scan the tags for the names [default: %default]")
p.add_option("--fasta", default=False, action="store_true",
help="Write FASTA if available [default: %default]")
p.set_outfile()
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
gffile, = args
contigfile = opts.contigs
namesfile = opts.names
typesfile = opts.types
nametag = opts.tag
contigID = parse_multi_values(contigfile)
names = parse_multi_values(namesfile)
types = parse_multi_values(typesfile)
outfile = opts.outfile
if opts.children:
assert types is not None or names is not None, "Must set --names or --types"
if names == None: names = list()
populate_children(outfile, names, gffile, iter=opts.children, types=types)
return
fp = must_open(gffile)
fw = must_open(opts.outfile, "w")
for row in fp:
atoms = row.split()
if len(atoms) == 0:
continue
tag = atoms[0]
if row[0] == "#":
if row.strip() == "###":
continue
if not (tag == RegionTag and contigID and atoms[1] not in contigID):
print(row.rstrip(), file=fw)
if tag == FastaTag:
break
continue
b = GffLine(row)
attrib = b.attributes
if contigID and tag not in contigID:
continue
if types and b.type in types:
_id = b.accn
if _id not in names:
names.append(_id)
if names is not None:
if nametag not in attrib:
continue
if attrib[nametag][0] not in names:
continue
print(row.rstrip(), file=fw)
if not opts.fasta:
return
f = Fasta(gffile)
for s in contigID:
if s in f:
SeqIO.write([f[s]], fw, "fasta") | def function[extract, parameter[args]]:
constant[
%prog extract gffile
--contigs: Extract particular contig(s) from the gff file. If multiple contigs are
involved, use "," to separate, e.g. "contig_12,contig_150"; or provide a file
with multiple contig IDs, one per line
--names: Process particular ID(s) from the gff file. If multiple IDs are
involved, use "," to separate; or provide a file with multiple IDs, one per line
]
variable[p] assign[=] call[name[OptionParser], parameter[name[extract].__doc__]]
call[name[p].add_option, parameter[constant[--contigs]]]
call[name[p].add_option, parameter[constant[--names]]]
call[name[p].add_option, parameter[constant[--types]]]
call[name[p].add_option, parameter[constant[--children]]]
call[name[p].add_option, parameter[constant[--tag]]]
call[name[p].add_option, parameter[constant[--fasta]]]
call[name[p].set_outfile, parameter[]]
<ast.Tuple object at 0x7da1b08aa1a0> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b08a94b0>]]
<ast.Tuple object at 0x7da1b08a93c0> assign[=] name[args]
variable[contigfile] assign[=] name[opts].contigs
variable[namesfile] assign[=] name[opts].names
variable[typesfile] assign[=] name[opts].types
variable[nametag] assign[=] name[opts].tag
variable[contigID] assign[=] call[name[parse_multi_values], parameter[name[contigfile]]]
variable[names] assign[=] call[name[parse_multi_values], parameter[name[namesfile]]]
variable[types] assign[=] call[name[parse_multi_values], parameter[name[typesfile]]]
variable[outfile] assign[=] name[opts].outfile
if name[opts].children begin[:]
assert[<ast.BoolOp object at 0x7da1b08a89a0>]
if compare[name[names] equal[==] constant[None]] begin[:]
variable[names] assign[=] call[name[list], parameter[]]
call[name[populate_children], parameter[name[outfile], name[names], name[gffile]]]
return[None]
variable[fp] assign[=] call[name[must_open], parameter[name[gffile]]]
variable[fw] assign[=] call[name[must_open], parameter[name[opts].outfile, constant[w]]]
for taget[name[row]] in starred[name[fp]] begin[:]
variable[atoms] assign[=] call[name[row].split, parameter[]]
if compare[call[name[len], parameter[name[atoms]]] equal[==] constant[0]] begin[:]
continue
variable[tag] assign[=] call[name[atoms]][constant[0]]
if compare[call[name[row]][constant[0]] equal[==] constant[#]] begin[:]
if compare[call[name[row].strip, parameter[]] equal[==] constant[###]] begin[:]
continue
if <ast.UnaryOp object at 0x7da1b08aaef0> begin[:]
call[name[print], parameter[call[name[row].rstrip, parameter[]]]]
if compare[name[tag] equal[==] name[FastaTag]] begin[:]
break
continue
variable[b] assign[=] call[name[GffLine], parameter[name[row]]]
variable[attrib] assign[=] name[b].attributes
if <ast.BoolOp object at 0x7da1b08ab580> begin[:]
continue
if <ast.BoolOp object at 0x7da1b08ab8b0> begin[:]
variable[_id] assign[=] name[b].accn
if compare[name[_id] <ast.NotIn object at 0x7da2590d7190> name[names]] begin[:]
call[name[names].append, parameter[name[_id]]]
if compare[name[names] is_not constant[None]] begin[:]
if compare[name[nametag] <ast.NotIn object at 0x7da2590d7190> name[attrib]] begin[:]
continue
if compare[call[call[name[attrib]][name[nametag]]][constant[0]] <ast.NotIn object at 0x7da2590d7190> name[names]] begin[:]
continue
call[name[print], parameter[call[name[row].rstrip, parameter[]]]]
if <ast.UnaryOp object at 0x7da1b07119f0> begin[:]
return[None]
variable[f] assign[=] call[name[Fasta], parameter[name[gffile]]]
for taget[name[s]] in starred[name[contigID]] begin[:]
if compare[name[s] in name[f]] begin[:]
call[name[SeqIO].write, parameter[list[[<ast.Subscript object at 0x7da1b0711030>]], name[fw], constant[fasta]]] | keyword[def] identifier[extract] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[extract] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[type] = literal[string] , identifier[default] = keyword[None] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[int] , identifier[choices] =[ literal[string] , literal[string] ],
identifier[help] = literal[string] + literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[set_outfile] ()
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[gffile] ,= identifier[args]
identifier[contigfile] = identifier[opts] . identifier[contigs]
identifier[namesfile] = identifier[opts] . identifier[names]
identifier[typesfile] = identifier[opts] . identifier[types]
identifier[nametag] = identifier[opts] . identifier[tag]
identifier[contigID] = identifier[parse_multi_values] ( identifier[contigfile] )
identifier[names] = identifier[parse_multi_values] ( identifier[namesfile] )
identifier[types] = identifier[parse_multi_values] ( identifier[typesfile] )
identifier[outfile] = identifier[opts] . identifier[outfile]
keyword[if] identifier[opts] . identifier[children] :
keyword[assert] identifier[types] keyword[is] keyword[not] keyword[None] keyword[or] identifier[names] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[if] identifier[names] == keyword[None] : identifier[names] = identifier[list] ()
identifier[populate_children] ( identifier[outfile] , identifier[names] , identifier[gffile] , identifier[iter] = identifier[opts] . identifier[children] , identifier[types] = identifier[types] )
keyword[return]
identifier[fp] = identifier[must_open] ( identifier[gffile] )
identifier[fw] = identifier[must_open] ( identifier[opts] . identifier[outfile] , literal[string] )
keyword[for] identifier[row] keyword[in] identifier[fp] :
identifier[atoms] = identifier[row] . identifier[split] ()
keyword[if] identifier[len] ( identifier[atoms] )== literal[int] :
keyword[continue]
identifier[tag] = identifier[atoms] [ literal[int] ]
keyword[if] identifier[row] [ literal[int] ]== literal[string] :
keyword[if] identifier[row] . identifier[strip] ()== literal[string] :
keyword[continue]
keyword[if] keyword[not] ( identifier[tag] == identifier[RegionTag] keyword[and] identifier[contigID] keyword[and] identifier[atoms] [ literal[int] ] keyword[not] keyword[in] identifier[contigID] ):
identifier[print] ( identifier[row] . identifier[rstrip] (), identifier[file] = identifier[fw] )
keyword[if] identifier[tag] == identifier[FastaTag] :
keyword[break]
keyword[continue]
identifier[b] = identifier[GffLine] ( identifier[row] )
identifier[attrib] = identifier[b] . identifier[attributes]
keyword[if] identifier[contigID] keyword[and] identifier[tag] keyword[not] keyword[in] identifier[contigID] :
keyword[continue]
keyword[if] identifier[types] keyword[and] identifier[b] . identifier[type] keyword[in] identifier[types] :
identifier[_id] = identifier[b] . identifier[accn]
keyword[if] identifier[_id] keyword[not] keyword[in] identifier[names] :
identifier[names] . identifier[append] ( identifier[_id] )
keyword[if] identifier[names] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[nametag] keyword[not] keyword[in] identifier[attrib] :
keyword[continue]
keyword[if] identifier[attrib] [ identifier[nametag] ][ literal[int] ] keyword[not] keyword[in] identifier[names] :
keyword[continue]
identifier[print] ( identifier[row] . identifier[rstrip] (), identifier[file] = identifier[fw] )
keyword[if] keyword[not] identifier[opts] . identifier[fasta] :
keyword[return]
identifier[f] = identifier[Fasta] ( identifier[gffile] )
keyword[for] identifier[s] keyword[in] identifier[contigID] :
keyword[if] identifier[s] keyword[in] identifier[f] :
identifier[SeqIO] . identifier[write] ([ identifier[f] [ identifier[s] ]], identifier[fw] , literal[string] ) | def extract(args):
"""
%prog extract gffile
--contigs: Extract particular contig(s) from the gff file. If multiple contigs are
involved, use "," to separate, e.g. "contig_12,contig_150"; or provide a file
with multiple contig IDs, one per line
--names: Process particular ID(s) from the gff file. If multiple IDs are
involved, use "," to separate; or provide a file with multiple IDs, one per line
"""
p = OptionParser(extract.__doc__)
p.add_option('--contigs', help='Extract features from certain contigs [default: %default]')
p.add_option('--names', help='Extract features with certain names [default: %default]')
p.add_option('--types', type='str', default=None, help='Extract features of certain feature types [default: %default]')
p.add_option('--children', default=0, choices=['1', '2'], help='Specify number of iterations: `1` grabs children, ' + '`2` grabs grand-children [default: %default]')
p.add_option('--tag', default='ID', help='Scan the tags for the names [default: %default]')
p.add_option('--fasta', default=False, action='store_true', help='Write FASTA if available [default: %default]')
p.set_outfile()
(opts, args) = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(gffile,) = args
contigfile = opts.contigs
namesfile = opts.names
typesfile = opts.types
nametag = opts.tag
contigID = parse_multi_values(contigfile)
names = parse_multi_values(namesfile)
types = parse_multi_values(typesfile)
outfile = opts.outfile
if opts.children:
assert types is not None or names is not None, 'Must set --names or --types'
if names == None:
names = list() # depends on [control=['if'], data=['names']]
populate_children(outfile, names, gffile, iter=opts.children, types=types)
return # depends on [control=['if'], data=[]]
fp = must_open(gffile)
fw = must_open(opts.outfile, 'w')
for row in fp:
atoms = row.split()
if len(atoms) == 0:
continue # depends on [control=['if'], data=[]]
tag = atoms[0]
if row[0] == '#':
if row.strip() == '###':
continue # depends on [control=['if'], data=[]]
if not (tag == RegionTag and contigID and (atoms[1] not in contigID)):
print(row.rstrip(), file=fw) # depends on [control=['if'], data=[]]
if tag == FastaTag:
break # depends on [control=['if'], data=[]]
continue # depends on [control=['if'], data=[]]
b = GffLine(row)
attrib = b.attributes
if contigID and tag not in contigID:
continue # depends on [control=['if'], data=[]]
if types and b.type in types:
_id = b.accn
if _id not in names:
names.append(_id) # depends on [control=['if'], data=['_id', 'names']] # depends on [control=['if'], data=[]]
if names is not None:
if nametag not in attrib:
continue # depends on [control=['if'], data=[]]
if attrib[nametag][0] not in names:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['names']]
print(row.rstrip(), file=fw) # depends on [control=['for'], data=['row']]
if not opts.fasta:
return # depends on [control=['if'], data=[]]
f = Fasta(gffile)
for s in contigID:
if s in f:
SeqIO.write([f[s]], fw, 'fasta') # depends on [control=['if'], data=['s', 'f']] # depends on [control=['for'], data=['s']] |
def create_new_database(request):
"""Create a New Mongo Database by adding a single document."""
name = "Create a New MongoDB Database"
if request.method == 'POST':
form = CreateDatabaseForm(request.POST)
if form.is_valid():
result = form.save()
if "error" in result:
messages.error(
request, "The database creation operation failed.")
messages.error(request, result["error"])
else:
messages.success(request, "Database created.")
return HttpResponseRedirect(reverse('djmongo_show_dbs'))
else:
# The form is invalid
messages.error(
request, _("Please correct the errors in the form."))
return render(request,
'djmongo/console/generic/bootstrapform.html',
{'form': form,
'name': name})
else:
# this is a GET
context = {'name': name,
'form': CreateDatabaseForm(
initial={"initial_document": '{ "foo" : "bar" }'
})
}
return render(request, 'djmongo/console/generic/bootstrapform.html',
context) | def function[create_new_database, parameter[request]]:
constant[Create a New Mongo Database by adding a single document.]
variable[name] assign[=] constant[Create a New MongoDB Database]
if compare[name[request].method equal[==] constant[POST]] begin[:]
variable[form] assign[=] call[name[CreateDatabaseForm], parameter[name[request].POST]]
if call[name[form].is_valid, parameter[]] begin[:]
variable[result] assign[=] call[name[form].save, parameter[]]
if compare[constant[error] in name[result]] begin[:]
call[name[messages].error, parameter[name[request], constant[The database creation operation failed.]]]
call[name[messages].error, parameter[name[request], call[name[result]][constant[error]]]]
return[call[name[HttpResponseRedirect], parameter[call[name[reverse], parameter[constant[djmongo_show_dbs]]]]]] | keyword[def] identifier[create_new_database] ( identifier[request] ):
literal[string]
identifier[name] = literal[string]
keyword[if] identifier[request] . identifier[method] == literal[string] :
identifier[form] = identifier[CreateDatabaseForm] ( identifier[request] . identifier[POST] )
keyword[if] identifier[form] . identifier[is_valid] ():
identifier[result] = identifier[form] . identifier[save] ()
keyword[if] literal[string] keyword[in] identifier[result] :
identifier[messages] . identifier[error] (
identifier[request] , literal[string] )
identifier[messages] . identifier[error] ( identifier[request] , identifier[result] [ literal[string] ])
keyword[else] :
identifier[messages] . identifier[success] ( identifier[request] , literal[string] )
keyword[return] identifier[HttpResponseRedirect] ( identifier[reverse] ( literal[string] ))
keyword[else] :
identifier[messages] . identifier[error] (
identifier[request] , identifier[_] ( literal[string] ))
keyword[return] identifier[render] ( identifier[request] ,
literal[string] ,
{ literal[string] : identifier[form] ,
literal[string] : identifier[name] })
keyword[else] :
identifier[context] ={ literal[string] : identifier[name] ,
literal[string] : identifier[CreateDatabaseForm] (
identifier[initial] ={ literal[string] : literal[string]
})
}
keyword[return] identifier[render] ( identifier[request] , literal[string] ,
identifier[context] ) | def create_new_database(request):
"""Create a New Mongo Database by adding a single document."""
name = 'Create a New MongoDB Database'
if request.method == 'POST':
form = CreateDatabaseForm(request.POST)
if form.is_valid():
result = form.save()
if 'error' in result:
messages.error(request, 'The database creation operation failed.')
messages.error(request, result['error']) # depends on [control=['if'], data=['result']]
else:
messages.success(request, 'Database created.')
return HttpResponseRedirect(reverse('djmongo_show_dbs')) # depends on [control=['if'], data=[]]
else:
# The form is invalid
messages.error(request, _('Please correct the errors in the form.'))
return render(request, 'djmongo/console/generic/bootstrapform.html', {'form': form, 'name': name}) # depends on [control=['if'], data=[]]
else:
# this is a GET
context = {'name': name, 'form': CreateDatabaseForm(initial={'initial_document': '{ "foo" : "bar" }'})}
return render(request, 'djmongo/console/generic/bootstrapform.html', context) |
def create(model_config, url, local_dir, sequence_length=64, batch_size=64, train_val_split=0.8):
""" Vel factory function """
if not os.path.isabs(local_dir):
local_dir = model_config.project_data_dir(local_dir)
return TextUrlSource(
url,
absolute_data_path=local_dir,
sequence_length=sequence_length,
batch_size=batch_size,
train_val_split=train_val_split,
) | def function[create, parameter[model_config, url, local_dir, sequence_length, batch_size, train_val_split]]:
constant[ Vel factory function ]
if <ast.UnaryOp object at 0x7da1b1633220> begin[:]
variable[local_dir] assign[=] call[name[model_config].project_data_dir, parameter[name[local_dir]]]
return[call[name[TextUrlSource], parameter[name[url]]]] | keyword[def] identifier[create] ( identifier[model_config] , identifier[url] , identifier[local_dir] , identifier[sequence_length] = literal[int] , identifier[batch_size] = literal[int] , identifier[train_val_split] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[local_dir] ):
identifier[local_dir] = identifier[model_config] . identifier[project_data_dir] ( identifier[local_dir] )
keyword[return] identifier[TextUrlSource] (
identifier[url] ,
identifier[absolute_data_path] = identifier[local_dir] ,
identifier[sequence_length] = identifier[sequence_length] ,
identifier[batch_size] = identifier[batch_size] ,
identifier[train_val_split] = identifier[train_val_split] ,
) | def create(model_config, url, local_dir, sequence_length=64, batch_size=64, train_val_split=0.8):
""" Vel factory function """
if not os.path.isabs(local_dir):
local_dir = model_config.project_data_dir(local_dir) # depends on [control=['if'], data=[]]
return TextUrlSource(url, absolute_data_path=local_dir, sequence_length=sequence_length, batch_size=batch_size, train_val_split=train_val_split) |
def close(self):
"""
Closes the injector and all sub-injectors. This is also called on
destruction.
close() will be called on all managed objects.
"""
if self.___closed:
return
for ref in self.___subs:
sub = ref()
if sub is not None:
sub.close()
# Destroy in reverse order as first elements created have more components depending on them
for call in self.___close_list[::-1]:
if inspect.iscoroutinefunction(call):
self.loop.run_until_complete(call())
else:
call()
self.___closed = True | def function[close, parameter[self]]:
constant[
Closes the injector and all sub-injectors. This is also called on
destruction.
close() will be called on all managed objects.
]
if name[self].___closed begin[:]
return[None]
for taget[name[ref]] in starred[name[self].___subs] begin[:]
variable[sub] assign[=] call[name[ref], parameter[]]
if compare[name[sub] is_not constant[None]] begin[:]
call[name[sub].close, parameter[]]
for taget[name[call]] in starred[call[name[self].___close_list][<ast.Slice object at 0x7da1b1ff02e0>]] begin[:]
if call[name[inspect].iscoroutinefunction, parameter[name[call]]] begin[:]
call[name[self].loop.run_until_complete, parameter[call[name[call], parameter[]]]]
name[self].___closed assign[=] constant[True] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[___closed] :
keyword[return]
keyword[for] identifier[ref] keyword[in] identifier[self] . identifier[___subs] :
identifier[sub] = identifier[ref] ()
keyword[if] identifier[sub] keyword[is] keyword[not] keyword[None] :
identifier[sub] . identifier[close] ()
keyword[for] identifier[call] keyword[in] identifier[self] . identifier[___close_list] [::- literal[int] ]:
keyword[if] identifier[inspect] . identifier[iscoroutinefunction] ( identifier[call] ):
identifier[self] . identifier[loop] . identifier[run_until_complete] ( identifier[call] ())
keyword[else] :
identifier[call] ()
identifier[self] . identifier[___closed] = keyword[True] | def close(self):
"""
Closes the injector and all sub-injectors. This is also called on
destruction.
close() will be called on all managed objects.
"""
if self.___closed:
return # depends on [control=['if'], data=[]]
for ref in self.___subs:
sub = ref()
if sub is not None:
sub.close() # depends on [control=['if'], data=['sub']] # depends on [control=['for'], data=['ref']]
# Destroy in reverse order as first elements created have more components depending on them
for call in self.___close_list[::-1]:
if inspect.iscoroutinefunction(call):
self.loop.run_until_complete(call()) # depends on [control=['if'], data=[]]
else:
call() # depends on [control=['for'], data=['call']]
self.___closed = True |
def create_output(self, key, value, variable_type=None):
"""Wrapper for Create method of CRUD operation for working with KeyValue DB.
This method will automatically check to see if provided variable was requested by
a downstream app and if so create the data in the KeyValue DB.
Args:
key (string): The variable to write to the DB.
value (any): The data to write to the DB.
variable_type (string): The variable type being written.
Returns:
(string): Result string of DB write.
"""
results = None
if key is not None:
key = key.strip()
key_type = '{}-{}'.format(key, variable_type)
if self.out_variables_type.get(key_type) is not None:
# variable key-type has been requested
v = self.out_variables_type.get(key_type)
self.tcex.log.info(
u'Variable {} was requested by downstream app.'.format(v.get('variable'))
)
if value is not None:
results = self.create(v.get('variable'), value)
else:
self.tcex.log.info(
u'Variable {} has a none value and will not be written.'.format(key)
)
elif self.out_variables.get(key) is not None and variable_type is None:
# variable key has been requested
v = self.out_variables.get(key)
self.tcex.log.info(
u'Variable {} was requested by downstream app.'.format(v.get('variable'))
)
if value is not None:
results = self.create(v.get('variable'), value)
else:
self.tcex.log.info(
u'Variable {} has a none value and will not be written.'.format(
v.get('variable')
)
)
else:
var_value = key
if variable_type is not None:
var_value = key_type
self.tcex.log.info(
u'Variable {} was NOT requested by downstream app.'.format(var_value)
)
return results | def function[create_output, parameter[self, key, value, variable_type]]:
constant[Wrapper for Create method of CRUD operation for working with KeyValue DB.
This method will automatically check to see if provided variable was requested by
a downstream app and if so create the data in the KeyValue DB.
Args:
key (string): The variable to write to the DB.
value (any): The data to write to the DB.
variable_type (string): The variable type being written.
Returns:
(string): Result string of DB write.
]
variable[results] assign[=] constant[None]
if compare[name[key] is_not constant[None]] begin[:]
variable[key] assign[=] call[name[key].strip, parameter[]]
variable[key_type] assign[=] call[constant[{}-{}].format, parameter[name[key], name[variable_type]]]
if compare[call[name[self].out_variables_type.get, parameter[name[key_type]]] is_not constant[None]] begin[:]
variable[v] assign[=] call[name[self].out_variables_type.get, parameter[name[key_type]]]
call[name[self].tcex.log.info, parameter[call[constant[Variable {} was requested by downstream app.].format, parameter[call[name[v].get, parameter[constant[variable]]]]]]]
if compare[name[value] is_not constant[None]] begin[:]
variable[results] assign[=] call[name[self].create, parameter[call[name[v].get, parameter[constant[variable]]], name[value]]]
return[name[results]] | keyword[def] identifier[create_output] ( identifier[self] , identifier[key] , identifier[value] , identifier[variable_type] = keyword[None] ):
literal[string]
identifier[results] = keyword[None]
keyword[if] identifier[key] keyword[is] keyword[not] keyword[None] :
identifier[key] = identifier[key] . identifier[strip] ()
identifier[key_type] = literal[string] . identifier[format] ( identifier[key] , identifier[variable_type] )
keyword[if] identifier[self] . identifier[out_variables_type] . identifier[get] ( identifier[key_type] ) keyword[is] keyword[not] keyword[None] :
identifier[v] = identifier[self] . identifier[out_variables_type] . identifier[get] ( identifier[key_type] )
identifier[self] . identifier[tcex] . identifier[log] . identifier[info] (
literal[string] . identifier[format] ( identifier[v] . identifier[get] ( literal[string] ))
)
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[results] = identifier[self] . identifier[create] ( identifier[v] . identifier[get] ( literal[string] ), identifier[value] )
keyword[else] :
identifier[self] . identifier[tcex] . identifier[log] . identifier[info] (
literal[string] . identifier[format] ( identifier[key] )
)
keyword[elif] identifier[self] . identifier[out_variables] . identifier[get] ( identifier[key] ) keyword[is] keyword[not] keyword[None] keyword[and] identifier[variable_type] keyword[is] keyword[None] :
identifier[v] = identifier[self] . identifier[out_variables] . identifier[get] ( identifier[key] )
identifier[self] . identifier[tcex] . identifier[log] . identifier[info] (
literal[string] . identifier[format] ( identifier[v] . identifier[get] ( literal[string] ))
)
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[results] = identifier[self] . identifier[create] ( identifier[v] . identifier[get] ( literal[string] ), identifier[value] )
keyword[else] :
identifier[self] . identifier[tcex] . identifier[log] . identifier[info] (
literal[string] . identifier[format] (
identifier[v] . identifier[get] ( literal[string] )
)
)
keyword[else] :
identifier[var_value] = identifier[key]
keyword[if] identifier[variable_type] keyword[is] keyword[not] keyword[None] :
identifier[var_value] = identifier[key_type]
identifier[self] . identifier[tcex] . identifier[log] . identifier[info] (
literal[string] . identifier[format] ( identifier[var_value] )
)
keyword[return] identifier[results] | def create_output(self, key, value, variable_type=None):
"""Wrapper for Create method of CRUD operation for working with KeyValue DB.
This method will automatically check to see if provided variable was requested by
a downstream app and if so create the data in the KeyValue DB.
Args:
key (string): The variable to write to the DB.
value (any): The data to write to the DB.
variable_type (string): The variable type being written.
Returns:
(string): Result string of DB write.
"""
results = None
if key is not None:
key = key.strip()
key_type = '{}-{}'.format(key, variable_type)
if self.out_variables_type.get(key_type) is not None:
# variable key-type has been requested
v = self.out_variables_type.get(key_type)
self.tcex.log.info(u'Variable {} was requested by downstream app.'.format(v.get('variable')))
if value is not None:
results = self.create(v.get('variable'), value) # depends on [control=['if'], data=['value']]
else:
self.tcex.log.info(u'Variable {} has a none value and will not be written.'.format(key)) # depends on [control=['if'], data=[]]
elif self.out_variables.get(key) is not None and variable_type is None:
# variable key has been requested
v = self.out_variables.get(key)
self.tcex.log.info(u'Variable {} was requested by downstream app.'.format(v.get('variable')))
if value is not None:
results = self.create(v.get('variable'), value) # depends on [control=['if'], data=['value']]
else:
self.tcex.log.info(u'Variable {} has a none value and will not be written.'.format(v.get('variable'))) # depends on [control=['if'], data=[]]
else:
var_value = key
if variable_type is not None:
var_value = key_type # depends on [control=['if'], data=[]]
self.tcex.log.info(u'Variable {} was NOT requested by downstream app.'.format(var_value)) # depends on [control=['if'], data=['key']]
return results |
def load(self, cookies, **kwargs):
"""Load session from cookies."""
value = cookies.get(self.key, None)
if value is None:
return False
value = self.decrypt(value)
if not value:
return False
data = json.loads(value)
if not isinstance(data, dict):
return False
self.store = data
self.update(self.store) | def function[load, parameter[self, cookies]]:
constant[Load session from cookies.]
variable[value] assign[=] call[name[cookies].get, parameter[name[self].key, constant[None]]]
if compare[name[value] is constant[None]] begin[:]
return[constant[False]]
variable[value] assign[=] call[name[self].decrypt, parameter[name[value]]]
if <ast.UnaryOp object at 0x7da18dc9a3b0> begin[:]
return[constant[False]]
variable[data] assign[=] call[name[json].loads, parameter[name[value]]]
if <ast.UnaryOp object at 0x7da207f03fa0> begin[:]
return[constant[False]]
name[self].store assign[=] name[data]
call[name[self].update, parameter[name[self].store]] | keyword[def] identifier[load] ( identifier[self] , identifier[cookies] ,** identifier[kwargs] ):
literal[string]
identifier[value] = identifier[cookies] . identifier[get] ( identifier[self] . identifier[key] , keyword[None] )
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] keyword[False]
identifier[value] = identifier[self] . identifier[decrypt] ( identifier[value] )
keyword[if] keyword[not] identifier[value] :
keyword[return] keyword[False]
identifier[data] = identifier[json] . identifier[loads] ( identifier[value] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[dict] ):
keyword[return] keyword[False]
identifier[self] . identifier[store] = identifier[data]
identifier[self] . identifier[update] ( identifier[self] . identifier[store] ) | def load(self, cookies, **kwargs):
"""Load session from cookies."""
value = cookies.get(self.key, None)
if value is None:
return False # depends on [control=['if'], data=[]]
value = self.decrypt(value)
if not value:
return False # depends on [control=['if'], data=[]]
data = json.loads(value)
if not isinstance(data, dict):
return False # depends on [control=['if'], data=[]]
self.store = data
self.update(self.store) |
def get_next_departures(stop, filter_line=None, num_line_groups=1, verbose=False):
"""
Get all real-time departure times for given stop and return as filtered table.
Terminate if we can assume there is no connection to the internet.
"""
# Get departures table from online service
# (great: we don't have to iterate over multiple pages).
url = BVG_URL_PAT % stop
if verbose:
print('- Fetching table for URL "%s".' % url)
try:
tables = pd.read_html(url.encode('utf-8'))
except urllib.error.URLError:
msg = 'Not connected to the internet?'
termcolor.cprint(msg, 'red', attrs=['bold'])
sys.exit(1)
except ValueError:
return []
table = tables[0]
table.columns = ['Departure', 'Line', 'Destination']
if verbose:
print('- Got table with %d entries for "%s".' % (len(table), stop))
# Cleanup
# Drop entries with '*' in column Departure...
# (causes trouble when resulting in an empty table!)
# table = table[table.Departure.apply(lambda row: " *" not in row)]
# So, instead remove '*' in column Departure...
table.is_copy = False # prevents SettingWithCopyWarning
table.Departure = table.apply(
lambda row: re.sub('\s*\*\s*', '', row.Departure), axis=1)
# Replace regex ' +' with ' ' in column Line...
table.Line = table.apply(lambda row: re.sub(' +', ' ', row.Line), axis=1)
# Filter desired number of unique combinations of Line and Destination column.
indices = []
for i in range(num_line_groups):
try:
indices += sorted([tab.index.values[i]
for line_dest, tab in table.groupby(['Line', 'Destination'])])
except IndexError:
break
table = table[table.index.map(lambda x: x in indices)]
# Insert a left-most column with minutes and seconds from now
# until the departure time.
table.insert(0, "Wait", table.Departure.apply(lambda dep: wait_time(dep)))
# Filter on desired lines only
if filter_line:
table = table[table.Line.apply(
lambda cell: filter_line.lower().encode('utf-8') in cell.lower())]
return table | def function[get_next_departures, parameter[stop, filter_line, num_line_groups, verbose]]:
constant[
Get all real-time departure times for given stop and return as filtered table.
Terminate if we can assume there is no connection to the internet.
]
variable[url] assign[=] binary_operation[name[BVG_URL_PAT] <ast.Mod object at 0x7da2590d6920> name[stop]]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[constant[- Fetching table for URL "%s".] <ast.Mod object at 0x7da2590d6920> name[url]]]]
<ast.Try object at 0x7da2049624d0>
variable[table] assign[=] call[name[tables]][constant[0]]
name[table].columns assign[=] list[[<ast.Constant object at 0x7da18bc72770>, <ast.Constant object at 0x7da18bc736d0>, <ast.Constant object at 0x7da18bc72350>]]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[constant[- Got table with %d entries for "%s".] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18bc71ed0>, <ast.Name object at 0x7da18bc733d0>]]]]]
name[table].is_copy assign[=] constant[False]
name[table].Departure assign[=] call[name[table].apply, parameter[<ast.Lambda object at 0x7da18bc725f0>]]
name[table].Line assign[=] call[name[table].apply, parameter[<ast.Lambda object at 0x7da18bc711e0>]]
variable[indices] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[num_line_groups]]]] begin[:]
<ast.Try object at 0x7da18bc728f0>
variable[table] assign[=] call[name[table]][call[name[table].index.map, parameter[<ast.Lambda object at 0x7da20c6e7cd0>]]]
call[name[table].insert, parameter[constant[0], constant[Wait], call[name[table].Departure.apply, parameter[<ast.Lambda object at 0x7da20c992650>]]]]
if name[filter_line] begin[:]
variable[table] assign[=] call[name[table]][call[name[table].Line.apply, parameter[<ast.Lambda object at 0x7da20c993e20>]]]
return[name[table]] | keyword[def] identifier[get_next_departures] ( identifier[stop] , identifier[filter_line] = keyword[None] , identifier[num_line_groups] = literal[int] , identifier[verbose] = keyword[False] ):
literal[string]
identifier[url] = identifier[BVG_URL_PAT] % identifier[stop]
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] % identifier[url] )
keyword[try] :
identifier[tables] = identifier[pd] . identifier[read_html] ( identifier[url] . identifier[encode] ( literal[string] ))
keyword[except] identifier[urllib] . identifier[error] . identifier[URLError] :
identifier[msg] = literal[string]
identifier[termcolor] . identifier[cprint] ( identifier[msg] , literal[string] , identifier[attrs] =[ literal[string] ])
identifier[sys] . identifier[exit] ( literal[int] )
keyword[except] identifier[ValueError] :
keyword[return] []
identifier[table] = identifier[tables] [ literal[int] ]
identifier[table] . identifier[columns] =[ literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] %( identifier[len] ( identifier[table] ), identifier[stop] ))
identifier[table] . identifier[is_copy] = keyword[False]
identifier[table] . identifier[Departure] = identifier[table] . identifier[apply] (
keyword[lambda] identifier[row] : identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[row] . identifier[Departure] ), identifier[axis] = literal[int] )
identifier[table] . identifier[Line] = identifier[table] . identifier[apply] ( keyword[lambda] identifier[row] : identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[row] . identifier[Line] ), identifier[axis] = literal[int] )
identifier[indices] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_line_groups] ):
keyword[try] :
identifier[indices] += identifier[sorted] ([ identifier[tab] . identifier[index] . identifier[values] [ identifier[i] ]
keyword[for] identifier[line_dest] , identifier[tab] keyword[in] identifier[table] . identifier[groupby] ([ literal[string] , literal[string] ])])
keyword[except] identifier[IndexError] :
keyword[break]
identifier[table] = identifier[table] [ identifier[table] . identifier[index] . identifier[map] ( keyword[lambda] identifier[x] : identifier[x] keyword[in] identifier[indices] )]
identifier[table] . identifier[insert] ( literal[int] , literal[string] , identifier[table] . identifier[Departure] . identifier[apply] ( keyword[lambda] identifier[dep] : identifier[wait_time] ( identifier[dep] )))
keyword[if] identifier[filter_line] :
identifier[table] = identifier[table] [ identifier[table] . identifier[Line] . identifier[apply] (
keyword[lambda] identifier[cell] : identifier[filter_line] . identifier[lower] (). identifier[encode] ( literal[string] ) keyword[in] identifier[cell] . identifier[lower] ())]
keyword[return] identifier[table] | def get_next_departures(stop, filter_line=None, num_line_groups=1, verbose=False):
"""
Get all real-time departure times for given stop and return as filtered table.
Terminate if we can assume there is no connection to the internet.
"""
# Get departures table from online service
# (great: we don't have to iterate over multiple pages).
url = BVG_URL_PAT % stop
if verbose:
print('- Fetching table for URL "%s".' % url) # depends on [control=['if'], data=[]]
try:
tables = pd.read_html(url.encode('utf-8')) # depends on [control=['try'], data=[]]
except urllib.error.URLError:
msg = 'Not connected to the internet?'
termcolor.cprint(msg, 'red', attrs=['bold'])
sys.exit(1) # depends on [control=['except'], data=[]]
except ValueError:
return [] # depends on [control=['except'], data=[]]
table = tables[0]
table.columns = ['Departure', 'Line', 'Destination']
if verbose:
print('- Got table with %d entries for "%s".' % (len(table), stop)) # depends on [control=['if'], data=[]]
# Cleanup
# Drop entries with '*' in column Departure...
# (causes trouble when resulting in an empty table!)
# table = table[table.Departure.apply(lambda row: " *" not in row)]
# So, instead remove '*' in column Departure...
table.is_copy = False # prevents SettingWithCopyWarning
table.Departure = table.apply(lambda row: re.sub('\\s*\\*\\s*', '', row.Departure), axis=1)
# Replace regex ' +' with ' ' in column Line...
table.Line = table.apply(lambda row: re.sub(' +', ' ', row.Line), axis=1)
# Filter desired number of unique combinations of Line and Destination column.
indices = []
for i in range(num_line_groups):
try:
indices += sorted([tab.index.values[i] for (line_dest, tab) in table.groupby(['Line', 'Destination'])]) # depends on [control=['try'], data=[]]
except IndexError:
break # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']]
table = table[table.index.map(lambda x: x in indices)] # Insert a left-most column with minutes and seconds from now
# until the departure time.
table.insert(0, 'Wait', table.Departure.apply(lambda dep: wait_time(dep)))
# Filter on desired lines only
if filter_line:
table = table[table.Line.apply(lambda cell: filter_line.lower().encode('utf-8') in cell.lower())] # depends on [control=['if'], data=[]]
return table |
def path(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid path-like object.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: The path represented by ``value``.
:rtype: Path-like object / :obj:`None <python:None>`
:raises EmptyValueError: if ``allow_empty`` is ``False`` and ``value`` is empty
:raises NotPathlikeError: if ``value`` is not a valid path-like object
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if hasattr(os, 'PathLike'):
if not isinstance(value, (str, bytes, int, os.PathLike)): # pylint: disable=E1101
raise errors.NotPathlikeError('value (%s) is path-like' % value)
else:
if not isinstance(value, int):
try:
os.path.exists(value)
except TypeError:
raise errors.NotPathlikeError('value (%s) is not path-like' % value)
return value | def function[path, parameter[value, allow_empty]]:
constant[Validate that ``value`` is a valid path-like object.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: The path represented by ``value``.
:rtype: Path-like object / :obj:`None <python:None>`
:raises EmptyValueError: if ``allow_empty`` is ``False`` and ``value`` is empty
:raises NotPathlikeError: if ``value`` is not a valid path-like object
]
if <ast.BoolOp object at 0x7da1b06f27d0> begin[:]
<ast.Raise object at 0x7da1b06f0100>
if call[name[hasattr], parameter[name[os], constant[PathLike]]] begin[:]
if <ast.UnaryOp object at 0x7da1b06f00a0> begin[:]
<ast.Raise object at 0x7da1b06f06d0>
return[name[value]] | keyword[def] identifier[path] ( identifier[value] ,
identifier[allow_empty] = keyword[False] ,
** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[value] keyword[and] keyword[not] identifier[allow_empty] :
keyword[raise] identifier[errors] . identifier[EmptyValueError] ( literal[string] % identifier[value] )
keyword[elif] keyword[not] identifier[value] :
keyword[return] keyword[None]
keyword[if] identifier[hasattr] ( identifier[os] , literal[string] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[str] , identifier[bytes] , identifier[int] , identifier[os] . identifier[PathLike] )):
keyword[raise] identifier[errors] . identifier[NotPathlikeError] ( literal[string] % identifier[value] )
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[int] ):
keyword[try] :
identifier[os] . identifier[path] . identifier[exists] ( identifier[value] )
keyword[except] identifier[TypeError] :
keyword[raise] identifier[errors] . identifier[NotPathlikeError] ( literal[string] % identifier[value] )
keyword[return] identifier[value] | def path(value, allow_empty=False, **kwargs):
"""Validate that ``value`` is a valid path-like object.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: The path represented by ``value``.
:rtype: Path-like object / :obj:`None <python:None>`
:raises EmptyValueError: if ``allow_empty`` is ``False`` and ``value`` is empty
:raises NotPathlikeError: if ``value`` is not a valid path-like object
"""
if not value and (not allow_empty):
raise errors.EmptyValueError('value (%s) was empty' % value) # depends on [control=['if'], data=[]]
elif not value:
return None # depends on [control=['if'], data=[]]
if hasattr(os, 'PathLike'):
if not isinstance(value, (str, bytes, int, os.PathLike)): # pylint: disable=E1101
raise errors.NotPathlikeError('value (%s) is path-like' % value) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not isinstance(value, int):
try:
os.path.exists(value) # depends on [control=['try'], data=[]]
except TypeError:
raise errors.NotPathlikeError('value (%s) is not path-like' % value) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return value |
def load_entry_point_group_templates(self, entry_point_group_templates):
"""Load actions from an entry point group."""
result = []
for ep in iter_entry_points(group=entry_point_group_templates):
with self.app.app_context():
for template_dir in ep.load()():
result.append(self.register_templates(template_dir))
return result | def function[load_entry_point_group_templates, parameter[self, entry_point_group_templates]]:
constant[Load actions from an entry point group.]
variable[result] assign[=] list[[]]
for taget[name[ep]] in starred[call[name[iter_entry_points], parameter[]]] begin[:]
with call[name[self].app.app_context, parameter[]] begin[:]
for taget[name[template_dir]] in starred[call[call[name[ep].load, parameter[]], parameter[]]] begin[:]
call[name[result].append, parameter[call[name[self].register_templates, parameter[name[template_dir]]]]]
return[name[result]] | keyword[def] identifier[load_entry_point_group_templates] ( identifier[self] , identifier[entry_point_group_templates] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[ep] keyword[in] identifier[iter_entry_points] ( identifier[group] = identifier[entry_point_group_templates] ):
keyword[with] identifier[self] . identifier[app] . identifier[app_context] ():
keyword[for] identifier[template_dir] keyword[in] identifier[ep] . identifier[load] ()():
identifier[result] . identifier[append] ( identifier[self] . identifier[register_templates] ( identifier[template_dir] ))
keyword[return] identifier[result] | def load_entry_point_group_templates(self, entry_point_group_templates):
"""Load actions from an entry point group."""
result = []
for ep in iter_entry_points(group=entry_point_group_templates):
with self.app.app_context():
for template_dir in ep.load()():
result.append(self.register_templates(template_dir)) # depends on [control=['for'], data=['template_dir']] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['ep']]
return result |
def read_extra_nodes(self, filename):
"""Read extra nodes in. Format: x y
What happens if we add nodes on the boundaries, which are not included
in the boundaries?
"""
data = np.atleast_2d(np.loadtxt(filename))
for nr, pair in enumerate(data):
index = self.get_point_id(pair, self.char_lengths['extra_node'])
self.ExtraNodes.append(index) | def function[read_extra_nodes, parameter[self, filename]]:
constant[Read extra nodes in. Format: x y
What happens if we add nodes on the boundaries, which are not included
in the boundaries?
]
variable[data] assign[=] call[name[np].atleast_2d, parameter[call[name[np].loadtxt, parameter[name[filename]]]]]
for taget[tuple[[<ast.Name object at 0x7da18f58f0a0>, <ast.Name object at 0x7da18f58c370>]]] in starred[call[name[enumerate], parameter[name[data]]]] begin[:]
variable[index] assign[=] call[name[self].get_point_id, parameter[name[pair], call[name[self].char_lengths][constant[extra_node]]]]
call[name[self].ExtraNodes.append, parameter[name[index]]] | keyword[def] identifier[read_extra_nodes] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[data] = identifier[np] . identifier[atleast_2d] ( identifier[np] . identifier[loadtxt] ( identifier[filename] ))
keyword[for] identifier[nr] , identifier[pair] keyword[in] identifier[enumerate] ( identifier[data] ):
identifier[index] = identifier[self] . identifier[get_point_id] ( identifier[pair] , identifier[self] . identifier[char_lengths] [ literal[string] ])
identifier[self] . identifier[ExtraNodes] . identifier[append] ( identifier[index] ) | def read_extra_nodes(self, filename):
"""Read extra nodes in. Format: x y
What happens if we add nodes on the boundaries, which are not included
in the boundaries?
"""
data = np.atleast_2d(np.loadtxt(filename))
for (nr, pair) in enumerate(data):
index = self.get_point_id(pair, self.char_lengths['extra_node'])
self.ExtraNodes.append(index) # depends on [control=['for'], data=[]] |
def _make_attr_element(parent, attr_i):
"""
create an attribute element from an attribute DB object
"""
attr = etree.SubElement(parent, "attribute")
attr_name = etree.SubElement(attr, 'name')
attr_name.text = attr_i.name
attr_desc = etree.SubElement(attr, 'description')
attr_desc.text = attr_i.description
attr_dimension = etree.SubElement(attr, 'dimension')
attr_dimension.text = units.get_dimension(attr_i.dimension_id, do_accept_dimension_id_none=True).name
return attr | def function[_make_attr_element, parameter[parent, attr_i]]:
constant[
create an attribute element from an attribute DB object
]
variable[attr] assign[=] call[name[etree].SubElement, parameter[name[parent], constant[attribute]]]
variable[attr_name] assign[=] call[name[etree].SubElement, parameter[name[attr], constant[name]]]
name[attr_name].text assign[=] name[attr_i].name
variable[attr_desc] assign[=] call[name[etree].SubElement, parameter[name[attr], constant[description]]]
name[attr_desc].text assign[=] name[attr_i].description
variable[attr_dimension] assign[=] call[name[etree].SubElement, parameter[name[attr], constant[dimension]]]
name[attr_dimension].text assign[=] call[name[units].get_dimension, parameter[name[attr_i].dimension_id]].name
return[name[attr]] | keyword[def] identifier[_make_attr_element] ( identifier[parent] , identifier[attr_i] ):
literal[string]
identifier[attr] = identifier[etree] . identifier[SubElement] ( identifier[parent] , literal[string] )
identifier[attr_name] = identifier[etree] . identifier[SubElement] ( identifier[attr] , literal[string] )
identifier[attr_name] . identifier[text] = identifier[attr_i] . identifier[name]
identifier[attr_desc] = identifier[etree] . identifier[SubElement] ( identifier[attr] , literal[string] )
identifier[attr_desc] . identifier[text] = identifier[attr_i] . identifier[description]
identifier[attr_dimension] = identifier[etree] . identifier[SubElement] ( identifier[attr] , literal[string] )
identifier[attr_dimension] . identifier[text] = identifier[units] . identifier[get_dimension] ( identifier[attr_i] . identifier[dimension_id] , identifier[do_accept_dimension_id_none] = keyword[True] ). identifier[name]
keyword[return] identifier[attr] | def _make_attr_element(parent, attr_i):
"""
create an attribute element from an attribute DB object
"""
attr = etree.SubElement(parent, 'attribute')
attr_name = etree.SubElement(attr, 'name')
attr_name.text = attr_i.name
attr_desc = etree.SubElement(attr, 'description')
attr_desc.text = attr_i.description
attr_dimension = etree.SubElement(attr, 'dimension')
attr_dimension.text = units.get_dimension(attr_i.dimension_id, do_accept_dimension_id_none=True).name
return attr |
def _serializeExclude_eval(parentUnit, obj, isDeclaration, priv):
"""
Always decide not to serialize obj
:param priv: private data for this function first unit of this class
:return: tuple (do serialize this object, next priv)
"""
if isDeclaration:
# prepare entity which will not be serialized
prepareEntity(obj, parentUnit.__class__.__name__, priv)
if priv is None:
priv = parentUnit
return False, priv | def function[_serializeExclude_eval, parameter[parentUnit, obj, isDeclaration, priv]]:
constant[
Always decide not to serialize obj
:param priv: private data for this function first unit of this class
:return: tuple (do serialize this object, next priv)
]
if name[isDeclaration] begin[:]
call[name[prepareEntity], parameter[name[obj], name[parentUnit].__class__.__name__, name[priv]]]
if compare[name[priv] is constant[None]] begin[:]
variable[priv] assign[=] name[parentUnit]
return[tuple[[<ast.Constant object at 0x7da1b03faec0>, <ast.Name object at 0x7da1b03f8fa0>]]] | keyword[def] identifier[_serializeExclude_eval] ( identifier[parentUnit] , identifier[obj] , identifier[isDeclaration] , identifier[priv] ):
literal[string]
keyword[if] identifier[isDeclaration] :
identifier[prepareEntity] ( identifier[obj] , identifier[parentUnit] . identifier[__class__] . identifier[__name__] , identifier[priv] )
keyword[if] identifier[priv] keyword[is] keyword[None] :
identifier[priv] = identifier[parentUnit]
keyword[return] keyword[False] , identifier[priv] | def _serializeExclude_eval(parentUnit, obj, isDeclaration, priv):
"""
Always decide not to serialize obj
:param priv: private data for this function first unit of this class
:return: tuple (do serialize this object, next priv)
"""
if isDeclaration:
# prepare entity which will not be serialized
prepareEntity(obj, parentUnit.__class__.__name__, priv) # depends on [control=['if'], data=[]]
if priv is None:
priv = parentUnit # depends on [control=['if'], data=['priv']]
return (False, priv) |
def percentile(x,
q,
axis=None,
interpolation=None,
keep_dims=False,
validate_args=False,
preserve_gradients=True,
name=None):
"""Compute the `q`-th percentile(s) of `x`.
Given a vector `x`, the `q`-th percentile of `x` is the value `q / 100` of the
way from the minimum to the maximum in a sorted copy of `x`.
The values and distances of the two nearest neighbors as well as the
`interpolation` parameter will determine the percentile if the normalized
ranking does not match the location of `q` exactly.
This function is the same as the median if `q = 50`, the same as the minimum
if `q = 0` and the same as the maximum if `q = 100`.
Multiple percentiles can be computed at once by using `1-D` vector `q`.
Dimension zero of the returned `Tensor` will index the different percentiles.
Compare to `numpy.percentile`.
Args:
x: Numeric `N-D` `Tensor` with `N > 0`. If `axis` is not `None`,
`x` must have statically known number of dimensions.
q: Scalar or vector `Tensor` with values in `[0, 100]`. The percentile(s).
axis: Optional `0-D` or `1-D` integer `Tensor` with constant values. The
axis that index independent samples over which to return the desired
percentile. If `None` (the default), treat every dimension as a sample
dimension, returning a scalar.
interpolation : {'nearest', 'linear', 'lower', 'higher', 'midpoint'}.
Default value: 'nearest'. This specifies the interpolation method to
use when the desired quantile lies between two data points `i < j`:
* linear: i + (j - i) * fraction, where fraction is the fractional part
of the index surrounded by i and j.
* lower: `i`.
* higher: `j`.
* nearest: `i` or `j`, whichever is nearest.
* midpoint: (i + j) / 2.
`linear` and `midpoint` interpolation do not work with integer dtypes.
keep_dims: Python `bool`. If `True`, the last dimension is kept with size 1
If `False`, the last dimension is removed from the output shape.
validate_args: Whether to add runtime checks of argument validity. If
False, and arguments are incorrect, correct behavior is not guaranteed.
preserve_gradients: Python `bool`. If `True`, ensure that gradient w.r.t
the percentile `q` is preserved in the case of linear interpolation.
If `False`, the gradient will be (incorrectly) zero when `q` corresponds
to a point in `x`.
name: A Python string name to give this `Op`. Default is 'percentile'
Returns:
A `(rank(q) + N - len(axis))` dimensional `Tensor` of same dtype as `x`, or,
if `axis` is `None`, a `rank(q)` `Tensor`. The first `rank(q)` dimensions
index quantiles for different values of `q`.
Raises:
ValueError: If argument 'interpolation' is not an allowed type.
ValueError: If interpolation type not compatible with `dtype`.
#### Examples
```python
# Get 30th percentile with default ('nearest') interpolation.
x = [1., 2., 3., 4.]
tfp.stats.percentile(x, q=30.)
==> 2.0
# Get 30th percentile with 'linear' interpolation.
x = [1., 2., 3., 4.]
tfp.stats.percentile(x, q=30., interpolation='linear')
==> 1.9
# Get 30th and 70th percentiles with 'lower' interpolation
x = [1., 2., 3., 4.]
tfp.stats.percentile(x, q=[30., 70.], interpolation='lower')
==> [1., 3.]
# Get 100th percentile (maximum). By default, this is computed over every dim
x = [[1., 2.]
[3., 4.]]
tfp.stats.percentile(x, q=100.)
==> 4.
# Treat the leading dim as indexing samples, and find the 100th quantile (max)
# over all such samples.
x = [[1., 2.]
[3., 4.]]
tfp.stats.percentile(x, q=100., axis=[0])
==> [3., 4.]
```
"""
name = name or 'percentile'
allowed_interpolations = {'linear', 'lower', 'higher', 'nearest', 'midpoint'}
if interpolation is None:
interpolation = 'nearest'
else:
if interpolation not in allowed_interpolations:
raise ValueError('Argument `interpolation` must be in %s. Found %s' %
(allowed_interpolations, interpolation))
with tf.compat.v1.name_scope(name, values=[x, q]):
x = tf.convert_to_tensor(value=x, name='x')
if interpolation in {'linear', 'midpoint'} and x.dtype.is_integer:
raise TypeError('{} interpolation not allowed with dtype {}'.format(
interpolation, x.dtype))
# Double is needed here and below, else we get the wrong index if the array
# is huge along axis.
q = tf.cast(q, tf.float64)
_get_static_ndims(q, expect_ndims_no_more_than=1)
if validate_args:
q = distribution_util.with_dependencies([
tf.compat.v1.assert_rank_in(q, [0, 1]),
tf.compat.v1.assert_greater_equal(q, tf.cast(0., tf.float64)),
tf.compat.v1.assert_less_equal(q, tf.cast(100., tf.float64))
], q)
# Move `axis` dims of `x` to the rightmost, call it `y`.
if axis is None:
y = tf.reshape(x, [-1])
else:
x_ndims = _get_static_ndims(
x, expect_static=True, expect_ndims_at_least=1)
axis = _make_static_axis_non_negative_list(axis, x_ndims)
y = _move_dims_to_flat_end(x, axis, x_ndims, right_end=True)
frac_at_q_or_above = 1. - q / 100.
# Sort everything, not just the top 'k' entries, which allows multiple calls
# to sort only once (under the hood) and use CSE.
sorted_y = _sort_tensor(y)
d = tf.cast(tf.shape(input=y)[-1], tf.float64)
def _get_indices(interp_type):
"""Get values of y at the indices implied by interp_type."""
# Note `lower` <--> ceiling. Confusing, huh? Due to the fact that
# _sort_tensor sorts highest to lowest, tf.ceil corresponds to the higher
# index, but the lower value of y!
if interp_type == 'lower':
indices = tf.math.ceil((d - 1) * frac_at_q_or_above)
elif interp_type == 'higher':
indices = tf.floor((d - 1) * frac_at_q_or_above)
elif interp_type == 'nearest':
indices = tf.round((d - 1) * frac_at_q_or_above)
# d - 1 will be distinct from d in int32, but not necessarily double.
# So clip to avoid out of bounds errors.
return tf.clip_by_value(
tf.cast(indices, tf.int32), 0,
tf.shape(input=y)[-1] - 1)
if interpolation in ['nearest', 'lower', 'higher']:
gathered_y = tf.gather(sorted_y, _get_indices(interpolation), axis=-1)
elif interpolation == 'midpoint':
gathered_y = 0.5 * (
tf.gather(sorted_y, _get_indices('lower'), axis=-1) +
tf.gather(sorted_y, _get_indices('higher'), axis=-1))
elif interpolation == 'linear':
# Copy-paste of docstring on interpolation:
# linear: i + (j - i) * fraction, where fraction is the fractional part
# of the index surrounded by i and j.
larger_y_idx = _get_indices('lower')
exact_idx = (d - 1) * frac_at_q_or_above
if preserve_gradients:
# If q corresponds to a point in x, we will initially have
# larger_y_idx == smaller_y_idx.
# This results in the gradient w.r.t. fraction being zero (recall `q`
# enters only through `fraction`...and see that things cancel).
# The fix is to ensure that smaller_y_idx and larger_y_idx are always
# separated by exactly 1.
smaller_y_idx = tf.maximum(larger_y_idx - 1, 0)
larger_y_idx = tf.minimum(smaller_y_idx + 1, tf.shape(input=y)[-1] - 1)
fraction = tf.cast(larger_y_idx, tf.float64) - exact_idx
else:
smaller_y_idx = _get_indices('higher')
fraction = tf.math.ceil((d - 1) * frac_at_q_or_above) - exact_idx
fraction = tf.cast(fraction, y.dtype)
gathered_y = (
tf.gather(sorted_y, larger_y_idx, axis=-1) * (1 - fraction) +
tf.gather(sorted_y, smaller_y_idx, axis=-1) * fraction)
# Propagate NaNs
if x.dtype in (tf.bfloat16, tf.float16, tf.float32, tf.float64):
# Apparently tf.is_nan doesn't like other dtypes
nan_batch_members = tf.reduce_any(
input_tensor=tf.math.is_nan(x), axis=axis)
right_rank_matched_shape = tf.pad(
tensor=tf.shape(input=nan_batch_members),
paddings=[[0, tf.rank(input=q)]],
constant_values=1)
nan_batch_members = tf.reshape(
nan_batch_members, shape=right_rank_matched_shape)
shape_gathered_y = tf.shape(input=gathered_y)
nan = np.array(np.nan, gathered_y.dtype.as_numpy_dtype)
gathered_y = tf.where(
tf.broadcast_to(nan_batch_members, shape_gathered_y),
tf.fill(shape_gathered_y, nan),
gathered_y)
# Expand dimensions if requested
if keep_dims:
if axis is None:
ones_vec = tf.ones(
shape=[_get_best_effort_ndims(x) + _get_best_effort_ndims(q)],
dtype=tf.int32)
gathered_y *= tf.ones(ones_vec, dtype=x.dtype)
else:
gathered_y = _insert_back_keep_dims(gathered_y, axis)
# If q is a scalar, then result has the right shape.
# If q is a vector, then result has trailing dim of shape q.shape, which
# needs to be rotated to dim 0.
return distribution_util.rotate_transpose(gathered_y, tf.rank(q)) | def function[percentile, parameter[x, q, axis, interpolation, keep_dims, validate_args, preserve_gradients, name]]:
constant[Compute the `q`-th percentile(s) of `x`.
Given a vector `x`, the `q`-th percentile of `x` is the value `q / 100` of the
way from the minimum to the maximum in a sorted copy of `x`.
The values and distances of the two nearest neighbors as well as the
`interpolation` parameter will determine the percentile if the normalized
ranking does not match the location of `q` exactly.
This function is the same as the median if `q = 50`, the same as the minimum
if `q = 0` and the same as the maximum if `q = 100`.
Multiple percentiles can be computed at once by using `1-D` vector `q`.
Dimension zero of the returned `Tensor` will index the different percentiles.
Compare to `numpy.percentile`.
Args:
x: Numeric `N-D` `Tensor` with `N > 0`. If `axis` is not `None`,
`x` must have statically known number of dimensions.
q: Scalar or vector `Tensor` with values in `[0, 100]`. The percentile(s).
axis: Optional `0-D` or `1-D` integer `Tensor` with constant values. The
axis that index independent samples over which to return the desired
percentile. If `None` (the default), treat every dimension as a sample
dimension, returning a scalar.
interpolation : {'nearest', 'linear', 'lower', 'higher', 'midpoint'}.
Default value: 'nearest'. This specifies the interpolation method to
use when the desired quantile lies between two data points `i < j`:
* linear: i + (j - i) * fraction, where fraction is the fractional part
of the index surrounded by i and j.
* lower: `i`.
* higher: `j`.
* nearest: `i` or `j`, whichever is nearest.
* midpoint: (i + j) / 2.
`linear` and `midpoint` interpolation do not work with integer dtypes.
keep_dims: Python `bool`. If `True`, the last dimension is kept with size 1
If `False`, the last dimension is removed from the output shape.
validate_args: Whether to add runtime checks of argument validity. If
False, and arguments are incorrect, correct behavior is not guaranteed.
preserve_gradients: Python `bool`. If `True`, ensure that gradient w.r.t
the percentile `q` is preserved in the case of linear interpolation.
If `False`, the gradient will be (incorrectly) zero when `q` corresponds
to a point in `x`.
name: A Python string name to give this `Op`. Default is 'percentile'
Returns:
A `(rank(q) + N - len(axis))` dimensional `Tensor` of same dtype as `x`, or,
if `axis` is `None`, a `rank(q)` `Tensor`. The first `rank(q)` dimensions
index quantiles for different values of `q`.
Raises:
ValueError: If argument 'interpolation' is not an allowed type.
ValueError: If interpolation type not compatible with `dtype`.
#### Examples
```python
# Get 30th percentile with default ('nearest') interpolation.
x = [1., 2., 3., 4.]
tfp.stats.percentile(x, q=30.)
==> 2.0
# Get 30th percentile with 'linear' interpolation.
x = [1., 2., 3., 4.]
tfp.stats.percentile(x, q=30., interpolation='linear')
==> 1.9
# Get 30th and 70th percentiles with 'lower' interpolation
x = [1., 2., 3., 4.]
tfp.stats.percentile(x, q=[30., 70.], interpolation='lower')
==> [1., 3.]
# Get 100th percentile (maximum). By default, this is computed over every dim
x = [[1., 2.]
[3., 4.]]
tfp.stats.percentile(x, q=100.)
==> 4.
# Treat the leading dim as indexing samples, and find the 100th quantile (max)
# over all such samples.
x = [[1., 2.]
[3., 4.]]
tfp.stats.percentile(x, q=100., axis=[0])
==> [3., 4.]
```
]
variable[name] assign[=] <ast.BoolOp object at 0x7da1b03566e0>
variable[allowed_interpolations] assign[=] <ast.Set object at 0x7da1b0354550>
if compare[name[interpolation] is constant[None]] begin[:]
variable[interpolation] assign[=] constant[nearest]
with call[name[tf].compat.v1.name_scope, parameter[name[name]]] begin[:]
variable[x] assign[=] call[name[tf].convert_to_tensor, parameter[]]
if <ast.BoolOp object at 0x7da1b03e2260> begin[:]
<ast.Raise object at 0x7da1b03f9750>
variable[q] assign[=] call[name[tf].cast, parameter[name[q], name[tf].float64]]
call[name[_get_static_ndims], parameter[name[q]]]
if name[validate_args] begin[:]
variable[q] assign[=] call[name[distribution_util].with_dependencies, parameter[list[[<ast.Call object at 0x7da1b03fa1a0>, <ast.Call object at 0x7da1b03f8790>, <ast.Call object at 0x7da1b03fa5c0>]], name[q]]]
if compare[name[axis] is constant[None]] begin[:]
variable[y] assign[=] call[name[tf].reshape, parameter[name[x], list[[<ast.UnaryOp object at 0x7da1b03f99c0>]]]]
variable[frac_at_q_or_above] assign[=] binary_operation[constant[1.0] - binary_operation[name[q] / constant[100.0]]]
variable[sorted_y] assign[=] call[name[_sort_tensor], parameter[name[y]]]
variable[d] assign[=] call[name[tf].cast, parameter[call[call[name[tf].shape, parameter[]]][<ast.UnaryOp object at 0x7da1b03f8400>], name[tf].float64]]
def function[_get_indices, parameter[interp_type]]:
constant[Get values of y at the indices implied by interp_type.]
if compare[name[interp_type] equal[==] constant[lower]] begin[:]
variable[indices] assign[=] call[name[tf].math.ceil, parameter[binary_operation[binary_operation[name[d] - constant[1]] * name[frac_at_q_or_above]]]]
return[call[name[tf].clip_by_value, parameter[call[name[tf].cast, parameter[name[indices], name[tf].int32]], constant[0], binary_operation[call[call[name[tf].shape, parameter[]]][<ast.UnaryOp object at 0x7da1b0213400>] - constant[1]]]]]
if compare[name[interpolation] in list[[<ast.Constant object at 0x7da1b0213670>, <ast.Constant object at 0x7da1b02135b0>, <ast.Constant object at 0x7da1b0213fa0>]]] begin[:]
variable[gathered_y] assign[=] call[name[tf].gather, parameter[name[sorted_y], call[name[_get_indices], parameter[name[interpolation]]]]]
if compare[name[x].dtype in tuple[[<ast.Attribute object at 0x7da1b0235c00>, <ast.Attribute object at 0x7da1b02355d0>, <ast.Attribute object at 0x7da1b0237f10>, <ast.Attribute object at 0x7da1b0235570>]]] begin[:]
variable[nan_batch_members] assign[=] call[name[tf].reduce_any, parameter[]]
variable[right_rank_matched_shape] assign[=] call[name[tf].pad, parameter[]]
variable[nan_batch_members] assign[=] call[name[tf].reshape, parameter[name[nan_batch_members]]]
variable[shape_gathered_y] assign[=] call[name[tf].shape, parameter[]]
variable[nan] assign[=] call[name[np].array, parameter[name[np].nan, name[gathered_y].dtype.as_numpy_dtype]]
variable[gathered_y] assign[=] call[name[tf].where, parameter[call[name[tf].broadcast_to, parameter[name[nan_batch_members], name[shape_gathered_y]]], call[name[tf].fill, parameter[name[shape_gathered_y], name[nan]]], name[gathered_y]]]
if name[keep_dims] begin[:]
if compare[name[axis] is constant[None]] begin[:]
variable[ones_vec] assign[=] call[name[tf].ones, parameter[]]
<ast.AugAssign object at 0x7da1b0236ce0>
return[call[name[distribution_util].rotate_transpose, parameter[name[gathered_y], call[name[tf].rank, parameter[name[q]]]]]] | keyword[def] identifier[percentile] ( identifier[x] ,
identifier[q] ,
identifier[axis] = keyword[None] ,
identifier[interpolation] = keyword[None] ,
identifier[keep_dims] = keyword[False] ,
identifier[validate_args] = keyword[False] ,
identifier[preserve_gradients] = keyword[True] ,
identifier[name] = keyword[None] ):
literal[string]
identifier[name] = identifier[name] keyword[or] literal[string]
identifier[allowed_interpolations] ={ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] }
keyword[if] identifier[interpolation] keyword[is] keyword[None] :
identifier[interpolation] = literal[string]
keyword[else] :
keyword[if] identifier[interpolation] keyword[not] keyword[in] identifier[allowed_interpolations] :
keyword[raise] identifier[ValueError] ( literal[string] %
( identifier[allowed_interpolations] , identifier[interpolation] ))
keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( identifier[name] , identifier[values] =[ identifier[x] , identifier[q] ]):
identifier[x] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[x] , identifier[name] = literal[string] )
keyword[if] identifier[interpolation] keyword[in] { literal[string] , literal[string] } keyword[and] identifier[x] . identifier[dtype] . identifier[is_integer] :
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] (
identifier[interpolation] , identifier[x] . identifier[dtype] ))
identifier[q] = identifier[tf] . identifier[cast] ( identifier[q] , identifier[tf] . identifier[float64] )
identifier[_get_static_ndims] ( identifier[q] , identifier[expect_ndims_no_more_than] = literal[int] )
keyword[if] identifier[validate_args] :
identifier[q] = identifier[distribution_util] . identifier[with_dependencies] ([
identifier[tf] . identifier[compat] . identifier[v1] . identifier[assert_rank_in] ( identifier[q] ,[ literal[int] , literal[int] ]),
identifier[tf] . identifier[compat] . identifier[v1] . identifier[assert_greater_equal] ( identifier[q] , identifier[tf] . identifier[cast] ( literal[int] , identifier[tf] . identifier[float64] )),
identifier[tf] . identifier[compat] . identifier[v1] . identifier[assert_less_equal] ( identifier[q] , identifier[tf] . identifier[cast] ( literal[int] , identifier[tf] . identifier[float64] ))
], identifier[q] )
keyword[if] identifier[axis] keyword[is] keyword[None] :
identifier[y] = identifier[tf] . identifier[reshape] ( identifier[x] ,[- literal[int] ])
keyword[else] :
identifier[x_ndims] = identifier[_get_static_ndims] (
identifier[x] , identifier[expect_static] = keyword[True] , identifier[expect_ndims_at_least] = literal[int] )
identifier[axis] = identifier[_make_static_axis_non_negative_list] ( identifier[axis] , identifier[x_ndims] )
identifier[y] = identifier[_move_dims_to_flat_end] ( identifier[x] , identifier[axis] , identifier[x_ndims] , identifier[right_end] = keyword[True] )
identifier[frac_at_q_or_above] = literal[int] - identifier[q] / literal[int]
identifier[sorted_y] = identifier[_sort_tensor] ( identifier[y] )
identifier[d] = identifier[tf] . identifier[cast] ( identifier[tf] . identifier[shape] ( identifier[input] = identifier[y] )[- literal[int] ], identifier[tf] . identifier[float64] )
keyword[def] identifier[_get_indices] ( identifier[interp_type] ):
literal[string]
keyword[if] identifier[interp_type] == literal[string] :
identifier[indices] = identifier[tf] . identifier[math] . identifier[ceil] (( identifier[d] - literal[int] )* identifier[frac_at_q_or_above] )
keyword[elif] identifier[interp_type] == literal[string] :
identifier[indices] = identifier[tf] . identifier[floor] (( identifier[d] - literal[int] )* identifier[frac_at_q_or_above] )
keyword[elif] identifier[interp_type] == literal[string] :
identifier[indices] = identifier[tf] . identifier[round] (( identifier[d] - literal[int] )* identifier[frac_at_q_or_above] )
keyword[return] identifier[tf] . identifier[clip_by_value] (
identifier[tf] . identifier[cast] ( identifier[indices] , identifier[tf] . identifier[int32] ), literal[int] ,
identifier[tf] . identifier[shape] ( identifier[input] = identifier[y] )[- literal[int] ]- literal[int] )
keyword[if] identifier[interpolation] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[gathered_y] = identifier[tf] . identifier[gather] ( identifier[sorted_y] , identifier[_get_indices] ( identifier[interpolation] ), identifier[axis] =- literal[int] )
keyword[elif] identifier[interpolation] == literal[string] :
identifier[gathered_y] = literal[int] *(
identifier[tf] . identifier[gather] ( identifier[sorted_y] , identifier[_get_indices] ( literal[string] ), identifier[axis] =- literal[int] )+
identifier[tf] . identifier[gather] ( identifier[sorted_y] , identifier[_get_indices] ( literal[string] ), identifier[axis] =- literal[int] ))
keyword[elif] identifier[interpolation] == literal[string] :
identifier[larger_y_idx] = identifier[_get_indices] ( literal[string] )
identifier[exact_idx] =( identifier[d] - literal[int] )* identifier[frac_at_q_or_above]
keyword[if] identifier[preserve_gradients] :
identifier[smaller_y_idx] = identifier[tf] . identifier[maximum] ( identifier[larger_y_idx] - literal[int] , literal[int] )
identifier[larger_y_idx] = identifier[tf] . identifier[minimum] ( identifier[smaller_y_idx] + literal[int] , identifier[tf] . identifier[shape] ( identifier[input] = identifier[y] )[- literal[int] ]- literal[int] )
identifier[fraction] = identifier[tf] . identifier[cast] ( identifier[larger_y_idx] , identifier[tf] . identifier[float64] )- identifier[exact_idx]
keyword[else] :
identifier[smaller_y_idx] = identifier[_get_indices] ( literal[string] )
identifier[fraction] = identifier[tf] . identifier[math] . identifier[ceil] (( identifier[d] - literal[int] )* identifier[frac_at_q_or_above] )- identifier[exact_idx]
identifier[fraction] = identifier[tf] . identifier[cast] ( identifier[fraction] , identifier[y] . identifier[dtype] )
identifier[gathered_y] =(
identifier[tf] . identifier[gather] ( identifier[sorted_y] , identifier[larger_y_idx] , identifier[axis] =- literal[int] )*( literal[int] - identifier[fraction] )+
identifier[tf] . identifier[gather] ( identifier[sorted_y] , identifier[smaller_y_idx] , identifier[axis] =- literal[int] )* identifier[fraction] )
keyword[if] identifier[x] . identifier[dtype] keyword[in] ( identifier[tf] . identifier[bfloat16] , identifier[tf] . identifier[float16] , identifier[tf] . identifier[float32] , identifier[tf] . identifier[float64] ):
identifier[nan_batch_members] = identifier[tf] . identifier[reduce_any] (
identifier[input_tensor] = identifier[tf] . identifier[math] . identifier[is_nan] ( identifier[x] ), identifier[axis] = identifier[axis] )
identifier[right_rank_matched_shape] = identifier[tf] . identifier[pad] (
identifier[tensor] = identifier[tf] . identifier[shape] ( identifier[input] = identifier[nan_batch_members] ),
identifier[paddings] =[[ literal[int] , identifier[tf] . identifier[rank] ( identifier[input] = identifier[q] )]],
identifier[constant_values] = literal[int] )
identifier[nan_batch_members] = identifier[tf] . identifier[reshape] (
identifier[nan_batch_members] , identifier[shape] = identifier[right_rank_matched_shape] )
identifier[shape_gathered_y] = identifier[tf] . identifier[shape] ( identifier[input] = identifier[gathered_y] )
identifier[nan] = identifier[np] . identifier[array] ( identifier[np] . identifier[nan] , identifier[gathered_y] . identifier[dtype] . identifier[as_numpy_dtype] )
identifier[gathered_y] = identifier[tf] . identifier[where] (
identifier[tf] . identifier[broadcast_to] ( identifier[nan_batch_members] , identifier[shape_gathered_y] ),
identifier[tf] . identifier[fill] ( identifier[shape_gathered_y] , identifier[nan] ),
identifier[gathered_y] )
keyword[if] identifier[keep_dims] :
keyword[if] identifier[axis] keyword[is] keyword[None] :
identifier[ones_vec] = identifier[tf] . identifier[ones] (
identifier[shape] =[ identifier[_get_best_effort_ndims] ( identifier[x] )+ identifier[_get_best_effort_ndims] ( identifier[q] )],
identifier[dtype] = identifier[tf] . identifier[int32] )
identifier[gathered_y] *= identifier[tf] . identifier[ones] ( identifier[ones_vec] , identifier[dtype] = identifier[x] . identifier[dtype] )
keyword[else] :
identifier[gathered_y] = identifier[_insert_back_keep_dims] ( identifier[gathered_y] , identifier[axis] )
keyword[return] identifier[distribution_util] . identifier[rotate_transpose] ( identifier[gathered_y] , identifier[tf] . identifier[rank] ( identifier[q] )) | def percentile(x, q, axis=None, interpolation=None, keep_dims=False, validate_args=False, preserve_gradients=True, name=None):
"""Compute the `q`-th percentile(s) of `x`.
Given a vector `x`, the `q`-th percentile of `x` is the value `q / 100` of the
way from the minimum to the maximum in a sorted copy of `x`.
The values and distances of the two nearest neighbors as well as the
`interpolation` parameter will determine the percentile if the normalized
ranking does not match the location of `q` exactly.
This function is the same as the median if `q = 50`, the same as the minimum
if `q = 0` and the same as the maximum if `q = 100`.
Multiple percentiles can be computed at once by using `1-D` vector `q`.
Dimension zero of the returned `Tensor` will index the different percentiles.
Compare to `numpy.percentile`.
Args:
x: Numeric `N-D` `Tensor` with `N > 0`. If `axis` is not `None`,
`x` must have statically known number of dimensions.
q: Scalar or vector `Tensor` with values in `[0, 100]`. The percentile(s).
axis: Optional `0-D` or `1-D` integer `Tensor` with constant values. The
axis that index independent samples over which to return the desired
percentile. If `None` (the default), treat every dimension as a sample
dimension, returning a scalar.
interpolation : {'nearest', 'linear', 'lower', 'higher', 'midpoint'}.
Default value: 'nearest'. This specifies the interpolation method to
use when the desired quantile lies between two data points `i < j`:
* linear: i + (j - i) * fraction, where fraction is the fractional part
of the index surrounded by i and j.
* lower: `i`.
* higher: `j`.
* nearest: `i` or `j`, whichever is nearest.
* midpoint: (i + j) / 2.
`linear` and `midpoint` interpolation do not work with integer dtypes.
keep_dims: Python `bool`. If `True`, the last dimension is kept with size 1
If `False`, the last dimension is removed from the output shape.
validate_args: Whether to add runtime checks of argument validity. If
False, and arguments are incorrect, correct behavior is not guaranteed.
preserve_gradients: Python `bool`. If `True`, ensure that gradient w.r.t
the percentile `q` is preserved in the case of linear interpolation.
If `False`, the gradient will be (incorrectly) zero when `q` corresponds
to a point in `x`.
name: A Python string name to give this `Op`. Default is 'percentile'
Returns:
A `(rank(q) + N - len(axis))` dimensional `Tensor` of same dtype as `x`, or,
if `axis` is `None`, a `rank(q)` `Tensor`. The first `rank(q)` dimensions
index quantiles for different values of `q`.
Raises:
ValueError: If argument 'interpolation' is not an allowed type.
ValueError: If interpolation type not compatible with `dtype`.
#### Examples
```python
# Get 30th percentile with default ('nearest') interpolation.
x = [1., 2., 3., 4.]
tfp.stats.percentile(x, q=30.)
==> 2.0
# Get 30th percentile with 'linear' interpolation.
x = [1., 2., 3., 4.]
tfp.stats.percentile(x, q=30., interpolation='linear')
==> 1.9
# Get 30th and 70th percentiles with 'lower' interpolation
x = [1., 2., 3., 4.]
tfp.stats.percentile(x, q=[30., 70.], interpolation='lower')
==> [1., 3.]
# Get 100th percentile (maximum). By default, this is computed over every dim
x = [[1., 2.]
[3., 4.]]
tfp.stats.percentile(x, q=100.)
==> 4.
# Treat the leading dim as indexing samples, and find the 100th quantile (max)
# over all such samples.
x = [[1., 2.]
[3., 4.]]
tfp.stats.percentile(x, q=100., axis=[0])
==> [3., 4.]
```
"""
name = name or 'percentile'
allowed_interpolations = {'linear', 'lower', 'higher', 'nearest', 'midpoint'}
if interpolation is None:
interpolation = 'nearest' # depends on [control=['if'], data=['interpolation']]
elif interpolation not in allowed_interpolations:
raise ValueError('Argument `interpolation` must be in %s. Found %s' % (allowed_interpolations, interpolation)) # depends on [control=['if'], data=['interpolation', 'allowed_interpolations']]
with tf.compat.v1.name_scope(name, values=[x, q]):
x = tf.convert_to_tensor(value=x, name='x')
if interpolation in {'linear', 'midpoint'} and x.dtype.is_integer:
raise TypeError('{} interpolation not allowed with dtype {}'.format(interpolation, x.dtype)) # depends on [control=['if'], data=[]]
# Double is needed here and below, else we get the wrong index if the array
# is huge along axis.
q = tf.cast(q, tf.float64)
_get_static_ndims(q, expect_ndims_no_more_than=1)
if validate_args:
q = distribution_util.with_dependencies([tf.compat.v1.assert_rank_in(q, [0, 1]), tf.compat.v1.assert_greater_equal(q, tf.cast(0.0, tf.float64)), tf.compat.v1.assert_less_equal(q, tf.cast(100.0, tf.float64))], q) # depends on [control=['if'], data=[]]
# Move `axis` dims of `x` to the rightmost, call it `y`.
if axis is None:
y = tf.reshape(x, [-1]) # depends on [control=['if'], data=[]]
else:
x_ndims = _get_static_ndims(x, expect_static=True, expect_ndims_at_least=1)
axis = _make_static_axis_non_negative_list(axis, x_ndims)
y = _move_dims_to_flat_end(x, axis, x_ndims, right_end=True)
frac_at_q_or_above = 1.0 - q / 100.0
# Sort everything, not just the top 'k' entries, which allows multiple calls
# to sort only once (under the hood) and use CSE.
sorted_y = _sort_tensor(y)
d = tf.cast(tf.shape(input=y)[-1], tf.float64)
def _get_indices(interp_type):
"""Get values of y at the indices implied by interp_type."""
# Note `lower` <--> ceiling. Confusing, huh? Due to the fact that
# _sort_tensor sorts highest to lowest, tf.ceil corresponds to the higher
# index, but the lower value of y!
if interp_type == 'lower':
indices = tf.math.ceil((d - 1) * frac_at_q_or_above) # depends on [control=['if'], data=[]]
elif interp_type == 'higher':
indices = tf.floor((d - 1) * frac_at_q_or_above) # depends on [control=['if'], data=[]]
elif interp_type == 'nearest':
indices = tf.round((d - 1) * frac_at_q_or_above) # depends on [control=['if'], data=[]]
# d - 1 will be distinct from d in int32, but not necessarily double.
# So clip to avoid out of bounds errors.
return tf.clip_by_value(tf.cast(indices, tf.int32), 0, tf.shape(input=y)[-1] - 1)
if interpolation in ['nearest', 'lower', 'higher']:
gathered_y = tf.gather(sorted_y, _get_indices(interpolation), axis=-1) # depends on [control=['if'], data=['interpolation']]
elif interpolation == 'midpoint':
gathered_y = 0.5 * (tf.gather(sorted_y, _get_indices('lower'), axis=-1) + tf.gather(sorted_y, _get_indices('higher'), axis=-1)) # depends on [control=['if'], data=[]]
elif interpolation == 'linear':
# Copy-paste of docstring on interpolation:
# linear: i + (j - i) * fraction, where fraction is the fractional part
# of the index surrounded by i and j.
larger_y_idx = _get_indices('lower')
exact_idx = (d - 1) * frac_at_q_or_above
if preserve_gradients:
# If q corresponds to a point in x, we will initially have
# larger_y_idx == smaller_y_idx.
# This results in the gradient w.r.t. fraction being zero (recall `q`
# enters only through `fraction`...and see that things cancel).
# The fix is to ensure that smaller_y_idx and larger_y_idx are always
# separated by exactly 1.
smaller_y_idx = tf.maximum(larger_y_idx - 1, 0)
larger_y_idx = tf.minimum(smaller_y_idx + 1, tf.shape(input=y)[-1] - 1)
fraction = tf.cast(larger_y_idx, tf.float64) - exact_idx # depends on [control=['if'], data=[]]
else:
smaller_y_idx = _get_indices('higher')
fraction = tf.math.ceil((d - 1) * frac_at_q_or_above) - exact_idx
fraction = tf.cast(fraction, y.dtype)
gathered_y = tf.gather(sorted_y, larger_y_idx, axis=-1) * (1 - fraction) + tf.gather(sorted_y, smaller_y_idx, axis=-1) * fraction # depends on [control=['if'], data=[]]
# Propagate NaNs
if x.dtype in (tf.bfloat16, tf.float16, tf.float32, tf.float64):
# Apparently tf.is_nan doesn't like other dtypes
nan_batch_members = tf.reduce_any(input_tensor=tf.math.is_nan(x), axis=axis)
right_rank_matched_shape = tf.pad(tensor=tf.shape(input=nan_batch_members), paddings=[[0, tf.rank(input=q)]], constant_values=1)
nan_batch_members = tf.reshape(nan_batch_members, shape=right_rank_matched_shape)
shape_gathered_y = tf.shape(input=gathered_y)
nan = np.array(np.nan, gathered_y.dtype.as_numpy_dtype)
gathered_y = tf.where(tf.broadcast_to(nan_batch_members, shape_gathered_y), tf.fill(shape_gathered_y, nan), gathered_y) # depends on [control=['if'], data=[]]
# Expand dimensions if requested
if keep_dims:
if axis is None:
ones_vec = tf.ones(shape=[_get_best_effort_ndims(x) + _get_best_effort_ndims(q)], dtype=tf.int32)
gathered_y *= tf.ones(ones_vec, dtype=x.dtype) # depends on [control=['if'], data=[]]
else:
gathered_y = _insert_back_keep_dims(gathered_y, axis) # depends on [control=['if'], data=[]]
# If q is a scalar, then result has the right shape.
# If q is a vector, then result has trailing dim of shape q.shape, which
# needs to be rotated to dim 0.
return distribution_util.rotate_transpose(gathered_y, tf.rank(q)) # depends on [control=['with'], data=[]] |
def expected_eye_positions(bounding_box, padding = None):
"""expected_eye_positions(bounding_box, padding) -> eyes
Computes the expected eye positions based on the relative coordinates of the bounding box.
This function can be used to translate between bounding-box-based image cropping and eye-location-based alignment.
The returned eye locations return the **average** eye locations, no landmark detection is performed.
**Parameters:**
``bounding_box`` : :py:class:`BoundingBox`
The face bounding box as detected by one of the functions in ``bob.ip.facedetect``.
``padding`` : {'top':float, 'bottom':float, 'left':float, 'right':float}
The padding that was used for the ``eyes`` source in :py:func:`bounding_box_from_annotation`, has a proper default.
**Returns:**
``eyes`` : {'reye' : (rey, rex), 'leye' : (ley, lex)}
A dictionary containing the average left and right eye annotation.
"""
if padding is None:
padding = default_paddings['eyes']
top, left, right = padding['top'], padding['left'], padding['right']
inter_eye_distance = (bounding_box.size[1]) / (right - left)
return {
'reye':(bounding_box.top_f - top*inter_eye_distance, bounding_box.left_f - left/2.*inter_eye_distance),
'leye':(bounding_box.top_f - top*inter_eye_distance, bounding_box.right_f - right/2.*inter_eye_distance)
} | def function[expected_eye_positions, parameter[bounding_box, padding]]:
constant[expected_eye_positions(bounding_box, padding) -> eyes
Computes the expected eye positions based on the relative coordinates of the bounding box.
This function can be used to translate between bounding-box-based image cropping and eye-location-based alignment.
The returned eye locations return the **average** eye locations, no landmark detection is performed.
**Parameters:**
``bounding_box`` : :py:class:`BoundingBox`
The face bounding box as detected by one of the functions in ``bob.ip.facedetect``.
``padding`` : {'top':float, 'bottom':float, 'left':float, 'right':float}
The padding that was used for the ``eyes`` source in :py:func:`bounding_box_from_annotation`, has a proper default.
**Returns:**
``eyes`` : {'reye' : (rey, rex), 'leye' : (ley, lex)}
A dictionary containing the average left and right eye annotation.
]
if compare[name[padding] is constant[None]] begin[:]
variable[padding] assign[=] call[name[default_paddings]][constant[eyes]]
<ast.Tuple object at 0x7da18dc99990> assign[=] tuple[[<ast.Subscript object at 0x7da18dc98bb0>, <ast.Subscript object at 0x7da18dc99720>, <ast.Subscript object at 0x7da18dc9a500>]]
variable[inter_eye_distance] assign[=] binary_operation[call[name[bounding_box].size][constant[1]] / binary_operation[name[right] - name[left]]]
return[dictionary[[<ast.Constant object at 0x7da204565300>, <ast.Constant object at 0x7da204564670>], [<ast.Tuple object at 0x7da204566ad0>, <ast.Tuple object at 0x7da204565150>]]] | keyword[def] identifier[expected_eye_positions] ( identifier[bounding_box] , identifier[padding] = keyword[None] ):
literal[string]
keyword[if] identifier[padding] keyword[is] keyword[None] :
identifier[padding] = identifier[default_paddings] [ literal[string] ]
identifier[top] , identifier[left] , identifier[right] = identifier[padding] [ literal[string] ], identifier[padding] [ literal[string] ], identifier[padding] [ literal[string] ]
identifier[inter_eye_distance] =( identifier[bounding_box] . identifier[size] [ literal[int] ])/( identifier[right] - identifier[left] )
keyword[return] {
literal[string] :( identifier[bounding_box] . identifier[top_f] - identifier[top] * identifier[inter_eye_distance] , identifier[bounding_box] . identifier[left_f] - identifier[left] / literal[int] * identifier[inter_eye_distance] ),
literal[string] :( identifier[bounding_box] . identifier[top_f] - identifier[top] * identifier[inter_eye_distance] , identifier[bounding_box] . identifier[right_f] - identifier[right] / literal[int] * identifier[inter_eye_distance] )
} | def expected_eye_positions(bounding_box, padding=None):
"""expected_eye_positions(bounding_box, padding) -> eyes
Computes the expected eye positions based on the relative coordinates of the bounding box.
This function can be used to translate between bounding-box-based image cropping and eye-location-based alignment.
The returned eye locations return the **average** eye locations, no landmark detection is performed.
**Parameters:**
``bounding_box`` : :py:class:`BoundingBox`
The face bounding box as detected by one of the functions in ``bob.ip.facedetect``.
``padding`` : {'top':float, 'bottom':float, 'left':float, 'right':float}
The padding that was used for the ``eyes`` source in :py:func:`bounding_box_from_annotation`, has a proper default.
**Returns:**
``eyes`` : {'reye' : (rey, rex), 'leye' : (ley, lex)}
A dictionary containing the average left and right eye annotation.
"""
if padding is None:
padding = default_paddings['eyes'] # depends on [control=['if'], data=['padding']]
(top, left, right) = (padding['top'], padding['left'], padding['right'])
inter_eye_distance = bounding_box.size[1] / (right - left)
return {'reye': (bounding_box.top_f - top * inter_eye_distance, bounding_box.left_f - left / 2.0 * inter_eye_distance), 'leye': (bounding_box.top_f - top * inter_eye_distance, bounding_box.right_f - right / 2.0 * inter_eye_distance)} |
def is_valid_python(tkn: str) -> bool:
"""Determine whether tkn is a valid python identifier
:param tkn:
:return:
"""
try:
root = ast.parse(tkn)
except SyntaxError:
return False
return len(root.body) == 1 and isinstance(root.body[0], ast.Expr) and isinstance(root.body[0].value, ast.Name) | def function[is_valid_python, parameter[tkn]]:
constant[Determine whether tkn is a valid python identifier
:param tkn:
:return:
]
<ast.Try object at 0x7da18dc9a380>
return[<ast.BoolOp object at 0x7da18dc9aa10>] | keyword[def] identifier[is_valid_python] ( identifier[tkn] : identifier[str] )-> identifier[bool] :
literal[string]
keyword[try] :
identifier[root] = identifier[ast] . identifier[parse] ( identifier[tkn] )
keyword[except] identifier[SyntaxError] :
keyword[return] keyword[False]
keyword[return] identifier[len] ( identifier[root] . identifier[body] )== literal[int] keyword[and] identifier[isinstance] ( identifier[root] . identifier[body] [ literal[int] ], identifier[ast] . identifier[Expr] ) keyword[and] identifier[isinstance] ( identifier[root] . identifier[body] [ literal[int] ]. identifier[value] , identifier[ast] . identifier[Name] ) | def is_valid_python(tkn: str) -> bool:
"""Determine whether tkn is a valid python identifier
:param tkn:
:return:
"""
try:
root = ast.parse(tkn) # depends on [control=['try'], data=[]]
except SyntaxError:
return False # depends on [control=['except'], data=[]]
return len(root.body) == 1 and isinstance(root.body[0], ast.Expr) and isinstance(root.body[0].value, ast.Name) |
def select_generic_executable(workflow, exe_tag):
""" Returns a class that is appropriate for setting up jobs to run executables
having specific tags in the workflow config.
Executables should not be "specialized" jobs fitting into one of the
select_XXX_class functions above, i.e. not a matched filter or template
bank job, which require extra setup.
Parameters
----------
workflow : pycbc.workflow.core.Workflow
The Workflow instance.
exe_tag : string
The name of the config section storing options for this executable and
the option giving the executable path in the [executables] section.
Returns
--------
exe_class : Sub-class of pycbc.workflow.core.Executable that holds utility
functions appropriate for the given executable. Instances of the class
('jobs') **must** have a method job.create_node()
"""
exe_path = workflow.cp.get("executables", exe_tag)
exe_name = os.path.basename(exe_path)
exe_to_class_map = {
'ligolw_add' : LigolwAddExecutable,
'ligolw_cbc_sstinca' : LigolwSSthincaExecutable,
'pycbc_sqlite_simplify' : PycbcSqliteSimplifyExecutable,
'ligolw_cbc_cluster_coincs': SQLInOutExecutable,
'ligolw_cbc_repop_coinc' : SQLInOutExecutable,
'repop_coinc_expfit' : SQLInOutExecutable,
'ligolw_cbc_dbinjfind' : SQLInOutExecutable,
'lalapps_inspinj' : LalappsInspinjExecutable,
'pycbc_dark_vs_bright_injections' : PycbcDarkVsBrightInjectionsExecutable,
'pycbc_timeslides' : PycbcTimeslidesExecutable,
'pycbc_compute_durations' : ComputeDurationsExecutable,
'pycbc_calculate_far' : PycbcCalculateFarExecutable,
"pycbc_run_sqlite" : SQLInOutExecutable,
# FIXME: We may end up with more than one class for using ligolw_sqlite
# How to deal with this?
"ligolw_sqlite" : ExtractToXMLExecutable,
"pycbc_inspinjfind" : InspinjfindExecutable,
"pycbc_pickle_horizon_distances" : PycbcPickleHorizonDistsExecutable,
"pycbc_combine_likelihood" : PycbcCombineLikelihoodExecutable,
"pycbc_gen_ranking_data" : PycbcGenerateRankingDataExecutable,
"pycbc_calculate_likelihood" : PycbcCalculateLikelihoodExecutable,
"gstlal_inspiral_marginalize_likelihood" : GstlalMarginalizeLikelihoodExecutable,
"pycbc_compute_far_from_snr_chisq_histograms" : GstlalFarfromsnrchisqhistExecutable,
"gstlal_inspiral_plot_sensitivity" : GstlalPlotSensitivity,
"gstlal_inspiral_plot_background" : GstlalPlotBackground,
"gstlal_inspiral_plotsummary" : GstlalPlotSummary,
"gstlal_inspiral_summary_page" : GstlalSummaryPage,
"pycbc_condition_strain" : PycbcConditionStrainExecutable
}
try:
return exe_to_class_map[exe_name]
except KeyError:
# Should we try some sort of default class??
raise NotImplementedError(
"No job class exists for executable %s, exiting" % exe_name) | def function[select_generic_executable, parameter[workflow, exe_tag]]:
constant[ Returns a class that is appropriate for setting up jobs to run executables
having specific tags in the workflow config.
Executables should not be "specialized" jobs fitting into one of the
select_XXX_class functions above, i.e. not a matched filter or template
bank job, which require extra setup.
Parameters
----------
workflow : pycbc.workflow.core.Workflow
The Workflow instance.
exe_tag : string
The name of the config section storing options for this executable and
the option giving the executable path in the [executables] section.
Returns
--------
exe_class : Sub-class of pycbc.workflow.core.Executable that holds utility
functions appropriate for the given executable. Instances of the class
('jobs') **must** have a method job.create_node()
]
variable[exe_path] assign[=] call[name[workflow].cp.get, parameter[constant[executables], name[exe_tag]]]
variable[exe_name] assign[=] call[name[os].path.basename, parameter[name[exe_path]]]
variable[exe_to_class_map] assign[=] dictionary[[<ast.Constant object at 0x7da207f03580>, <ast.Constant object at 0x7da207f03b50>, <ast.Constant object at 0x7da207f03520>, <ast.Constant object at 0x7da207f03af0>, <ast.Constant object at 0x7da207f009a0>, <ast.Constant object at 0x7da207f00d60>, <ast.Constant object at 0x7da207f03760>, <ast.Constant object at 0x7da207f01150>, <ast.Constant object at 0x7da207f03c70>, <ast.Constant object at 0x7da207f01d50>, <ast.Constant object at 0x7da207f03190>, <ast.Constant object at 0x7da207f032e0>, <ast.Constant object at 0x7da207f038b0>, <ast.Constant object at 0x7da207f00bb0>, <ast.Constant object at 0x7da207f00160>, <ast.Constant object at 0x7da207f00490>, <ast.Constant object at 0x7da207f01fc0>, <ast.Constant object at 0x7da207f011b0>, <ast.Constant object at 0x7da207f01630>, <ast.Constant object at 0x7da207f03550>, <ast.Constant object at 0x7da207f01f30>, <ast.Constant object at 0x7da207f03070>, <ast.Constant object at 0x7da207f03400>, <ast.Constant object at 0x7da207f02590>, <ast.Constant object at 0x7da207f03250>, <ast.Constant object at 0x7da207f017e0>], [<ast.Name object at 0x7da207f01c00>, <ast.Name object at 0x7da207f03340>, <ast.Name object at 0x7da207f03820>, <ast.Name object at 0x7da207f01900>, <ast.Name object at 0x7da207f02d40>, <ast.Name object at 0x7da207f039a0>, <ast.Name object at 0x7da207f03fd0>, <ast.Name object at 0x7da207f003a0>, <ast.Name object at 0x7da207f016f0>, <ast.Name object at 0x7da207f012d0>, <ast.Name object at 0x7da207f023b0>, <ast.Name object at 0x7da207f03910>, <ast.Name object at 0x7da207f024d0>, <ast.Name object at 0x7da207f03700>, <ast.Name object at 0x7da207f02e90>, <ast.Name object at 0x7da207f00610>, <ast.Name object at 0x7da207f03a00>, <ast.Name object at 0x7da207f03fa0>, <ast.Name object at 0x7da207f01330>, <ast.Name object at 0x7da207f03f10>, <ast.Name object at 0x7da207f00280>, <ast.Name object at 0x7da207f03100>, <ast.Name object at 0x7da207f01000>, <ast.Name object at 0x7da207f039d0>, <ast.Name object at 0x7da207f00ee0>, <ast.Name object at 0x7da207f01660>]]
<ast.Try object at 0x7da207f00a90> | keyword[def] identifier[select_generic_executable] ( identifier[workflow] , identifier[exe_tag] ):
literal[string]
identifier[exe_path] = identifier[workflow] . identifier[cp] . identifier[get] ( literal[string] , identifier[exe_tag] )
identifier[exe_name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[exe_path] )
identifier[exe_to_class_map] ={
literal[string] : identifier[LigolwAddExecutable] ,
literal[string] : identifier[LigolwSSthincaExecutable] ,
literal[string] : identifier[PycbcSqliteSimplifyExecutable] ,
literal[string] : identifier[SQLInOutExecutable] ,
literal[string] : identifier[SQLInOutExecutable] ,
literal[string] : identifier[SQLInOutExecutable] ,
literal[string] : identifier[SQLInOutExecutable] ,
literal[string] : identifier[LalappsInspinjExecutable] ,
literal[string] : identifier[PycbcDarkVsBrightInjectionsExecutable] ,
literal[string] : identifier[PycbcTimeslidesExecutable] ,
literal[string] : identifier[ComputeDurationsExecutable] ,
literal[string] : identifier[PycbcCalculateFarExecutable] ,
literal[string] : identifier[SQLInOutExecutable] ,
literal[string] : identifier[ExtractToXMLExecutable] ,
literal[string] : identifier[InspinjfindExecutable] ,
literal[string] : identifier[PycbcPickleHorizonDistsExecutable] ,
literal[string] : identifier[PycbcCombineLikelihoodExecutable] ,
literal[string] : identifier[PycbcGenerateRankingDataExecutable] ,
literal[string] : identifier[PycbcCalculateLikelihoodExecutable] ,
literal[string] : identifier[GstlalMarginalizeLikelihoodExecutable] ,
literal[string] : identifier[GstlalFarfromsnrchisqhistExecutable] ,
literal[string] : identifier[GstlalPlotSensitivity] ,
literal[string] : identifier[GstlalPlotBackground] ,
literal[string] : identifier[GstlalPlotSummary] ,
literal[string] : identifier[GstlalSummaryPage] ,
literal[string] : identifier[PycbcConditionStrainExecutable]
}
keyword[try] :
keyword[return] identifier[exe_to_class_map] [ identifier[exe_name] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[NotImplementedError] (
literal[string] % identifier[exe_name] ) | def select_generic_executable(workflow, exe_tag):
""" Returns a class that is appropriate for setting up jobs to run executables
having specific tags in the workflow config.
Executables should not be "specialized" jobs fitting into one of the
select_XXX_class functions above, i.e. not a matched filter or template
bank job, which require extra setup.
Parameters
----------
workflow : pycbc.workflow.core.Workflow
The Workflow instance.
exe_tag : string
The name of the config section storing options for this executable and
the option giving the executable path in the [executables] section.
Returns
--------
exe_class : Sub-class of pycbc.workflow.core.Executable that holds utility
functions appropriate for the given executable. Instances of the class
('jobs') **must** have a method job.create_node()
"""
exe_path = workflow.cp.get('executables', exe_tag)
exe_name = os.path.basename(exe_path)
# FIXME: We may end up with more than one class for using ligolw_sqlite
# How to deal with this?
exe_to_class_map = {'ligolw_add': LigolwAddExecutable, 'ligolw_cbc_sstinca': LigolwSSthincaExecutable, 'pycbc_sqlite_simplify': PycbcSqliteSimplifyExecutable, 'ligolw_cbc_cluster_coincs': SQLInOutExecutable, 'ligolw_cbc_repop_coinc': SQLInOutExecutable, 'repop_coinc_expfit': SQLInOutExecutable, 'ligolw_cbc_dbinjfind': SQLInOutExecutable, 'lalapps_inspinj': LalappsInspinjExecutable, 'pycbc_dark_vs_bright_injections': PycbcDarkVsBrightInjectionsExecutable, 'pycbc_timeslides': PycbcTimeslidesExecutable, 'pycbc_compute_durations': ComputeDurationsExecutable, 'pycbc_calculate_far': PycbcCalculateFarExecutable, 'pycbc_run_sqlite': SQLInOutExecutable, 'ligolw_sqlite': ExtractToXMLExecutable, 'pycbc_inspinjfind': InspinjfindExecutable, 'pycbc_pickle_horizon_distances': PycbcPickleHorizonDistsExecutable, 'pycbc_combine_likelihood': PycbcCombineLikelihoodExecutable, 'pycbc_gen_ranking_data': PycbcGenerateRankingDataExecutable, 'pycbc_calculate_likelihood': PycbcCalculateLikelihoodExecutable, 'gstlal_inspiral_marginalize_likelihood': GstlalMarginalizeLikelihoodExecutable, 'pycbc_compute_far_from_snr_chisq_histograms': GstlalFarfromsnrchisqhistExecutable, 'gstlal_inspiral_plot_sensitivity': GstlalPlotSensitivity, 'gstlal_inspiral_plot_background': GstlalPlotBackground, 'gstlal_inspiral_plotsummary': GstlalPlotSummary, 'gstlal_inspiral_summary_page': GstlalSummaryPage, 'pycbc_condition_strain': PycbcConditionStrainExecutable}
try:
return exe_to_class_map[exe_name] # depends on [control=['try'], data=[]]
except KeyError:
# Should we try some sort of default class??
raise NotImplementedError('No job class exists for executable %s, exiting' % exe_name) # depends on [control=['except'], data=[]] |
def on_security_data_node(self, node):
"""process a securityData node - FIXME: currently not handling relateDate node """
sid = XmlHelper.get_child_value(node, 'security')
farr = node.GetElement('fieldData')
dmap = defaultdict(list)
for i in range(farr.NumValues):
pt = farr.GetValue(i)
[dmap[f].append(XmlHelper.get_child_value(pt, f, allow_missing=1)) for f in ['date'] + self.fields]
idx = dmap.pop('date')
frame = DataFrame(dmap, columns=self.fields, index=idx)
frame.index.name = 'date'
self.response[sid] = frame | def function[on_security_data_node, parameter[self, node]]:
constant[process a securityData node - FIXME: currently not handling relateDate node ]
variable[sid] assign[=] call[name[XmlHelper].get_child_value, parameter[name[node], constant[security]]]
variable[farr] assign[=] call[name[node].GetElement, parameter[constant[fieldData]]]
variable[dmap] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[i]] in starred[call[name[range], parameter[name[farr].NumValues]]] begin[:]
variable[pt] assign[=] call[name[farr].GetValue, parameter[name[i]]]
<ast.ListComp object at 0x7da1b1eb5d50>
variable[idx] assign[=] call[name[dmap].pop, parameter[constant[date]]]
variable[frame] assign[=] call[name[DataFrame], parameter[name[dmap]]]
name[frame].index.name assign[=] constant[date]
call[name[self].response][name[sid]] assign[=] name[frame] | keyword[def] identifier[on_security_data_node] ( identifier[self] , identifier[node] ):
literal[string]
identifier[sid] = identifier[XmlHelper] . identifier[get_child_value] ( identifier[node] , literal[string] )
identifier[farr] = identifier[node] . identifier[GetElement] ( literal[string] )
identifier[dmap] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[farr] . identifier[NumValues] ):
identifier[pt] = identifier[farr] . identifier[GetValue] ( identifier[i] )
[ identifier[dmap] [ identifier[f] ]. identifier[append] ( identifier[XmlHelper] . identifier[get_child_value] ( identifier[pt] , identifier[f] , identifier[allow_missing] = literal[int] )) keyword[for] identifier[f] keyword[in] [ literal[string] ]+ identifier[self] . identifier[fields] ]
identifier[idx] = identifier[dmap] . identifier[pop] ( literal[string] )
identifier[frame] = identifier[DataFrame] ( identifier[dmap] , identifier[columns] = identifier[self] . identifier[fields] , identifier[index] = identifier[idx] )
identifier[frame] . identifier[index] . identifier[name] = literal[string]
identifier[self] . identifier[response] [ identifier[sid] ]= identifier[frame] | def on_security_data_node(self, node):
"""process a securityData node - FIXME: currently not handling relateDate node """
sid = XmlHelper.get_child_value(node, 'security')
farr = node.GetElement('fieldData')
dmap = defaultdict(list)
for i in range(farr.NumValues):
pt = farr.GetValue(i)
[dmap[f].append(XmlHelper.get_child_value(pt, f, allow_missing=1)) for f in ['date'] + self.fields] # depends on [control=['for'], data=['i']]
idx = dmap.pop('date')
frame = DataFrame(dmap, columns=self.fields, index=idx)
frame.index.name = 'date'
self.response[sid] = frame |
def open_and_close_for_session(self, session_label):
"""
Returns a tuple of timestamps of the open and close of the session
represented by the given label.
Parameters
----------
session_label: pd.Timestamp
The session whose open and close are desired.
Returns
-------
(Timestamp, Timestamp)
The open and close for the given session.
"""
sched = self.schedule
# `market_open` and `market_close` should be timezone aware, but pandas
# 0.16.1 does not appear to support this:
# http://pandas.pydata.org/pandas-docs/stable/whatsnew.html#datetime-with-tz # noqa
return (
sched.at[session_label, 'market_open'].tz_localize(UTC),
sched.at[session_label, 'market_close'].tz_localize(UTC),
) | def function[open_and_close_for_session, parameter[self, session_label]]:
constant[
Returns a tuple of timestamps of the open and close of the session
represented by the given label.
Parameters
----------
session_label: pd.Timestamp
The session whose open and close are desired.
Returns
-------
(Timestamp, Timestamp)
The open and close for the given session.
]
variable[sched] assign[=] name[self].schedule
return[tuple[[<ast.Call object at 0x7da18fe93880>, <ast.Call object at 0x7da18fe93fd0>]]] | keyword[def] identifier[open_and_close_for_session] ( identifier[self] , identifier[session_label] ):
literal[string]
identifier[sched] = identifier[self] . identifier[schedule]
keyword[return] (
identifier[sched] . identifier[at] [ identifier[session_label] , literal[string] ]. identifier[tz_localize] ( identifier[UTC] ),
identifier[sched] . identifier[at] [ identifier[session_label] , literal[string] ]. identifier[tz_localize] ( identifier[UTC] ),
) | def open_and_close_for_session(self, session_label):
"""
Returns a tuple of timestamps of the open and close of the session
represented by the given label.
Parameters
----------
session_label: pd.Timestamp
The session whose open and close are desired.
Returns
-------
(Timestamp, Timestamp)
The open and close for the given session.
"""
sched = self.schedule
# `market_open` and `market_close` should be timezone aware, but pandas
# 0.16.1 does not appear to support this:
# http://pandas.pydata.org/pandas-docs/stable/whatsnew.html#datetime-with-tz # noqa
return (sched.at[session_label, 'market_open'].tz_localize(UTC), sched.at[session_label, 'market_close'].tz_localize(UTC)) |
def read_log(self, idx, offset, size):
"""Reads the VM log file. The chunk size is limited, so even if you
ask for a big piece there might be less data returned.
in idx of type int
Which log file to read. 0=current log file.
in offset of type int
Offset in the log file.
in size of type int
Chunk size to read in the log file.
return data of type str
Data read from the log file. A data size of 0 means end of file
if the requested chunk size was not 0. This is the unprocessed
file data, i.e. the line ending style depends on the platform of
the system the server is running on.
"""
if not isinstance(idx, baseinteger):
raise TypeError("idx can only be an instance of type baseinteger")
if not isinstance(offset, baseinteger):
raise TypeError("offset can only be an instance of type baseinteger")
if not isinstance(size, baseinteger):
raise TypeError("size can only be an instance of type baseinteger")
data = self._call("readLog",
in_p=[idx, offset, size])
return data | def function[read_log, parameter[self, idx, offset, size]]:
constant[Reads the VM log file. The chunk size is limited, so even if you
ask for a big piece there might be less data returned.
in idx of type int
Which log file to read. 0=current log file.
in offset of type int
Offset in the log file.
in size of type int
Chunk size to read in the log file.
return data of type str
Data read from the log file. A data size of 0 means end of file
if the requested chunk size was not 0. This is the unprocessed
file data, i.e. the line ending style depends on the platform of
the system the server is running on.
]
if <ast.UnaryOp object at 0x7da1b26ac490> begin[:]
<ast.Raise object at 0x7da1b26afd30>
if <ast.UnaryOp object at 0x7da1b26af010> begin[:]
<ast.Raise object at 0x7da1b26acee0>
if <ast.UnaryOp object at 0x7da1b26ae350> begin[:]
<ast.Raise object at 0x7da1b26ae770>
variable[data] assign[=] call[name[self]._call, parameter[constant[readLog]]]
return[name[data]] | keyword[def] identifier[read_log] ( identifier[self] , identifier[idx] , identifier[offset] , identifier[size] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[idx] , identifier[baseinteger] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[offset] , identifier[baseinteger] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[size] , identifier[baseinteger] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[data] = identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[idx] , identifier[offset] , identifier[size] ])
keyword[return] identifier[data] | def read_log(self, idx, offset, size):
"""Reads the VM log file. The chunk size is limited, so even if you
ask for a big piece there might be less data returned.
in idx of type int
Which log file to read. 0=current log file.
in offset of type int
Offset in the log file.
in size of type int
Chunk size to read in the log file.
return data of type str
Data read from the log file. A data size of 0 means end of file
if the requested chunk size was not 0. This is the unprocessed
file data, i.e. the line ending style depends on the platform of
the system the server is running on.
"""
if not isinstance(idx, baseinteger):
raise TypeError('idx can only be an instance of type baseinteger') # depends on [control=['if'], data=[]]
if not isinstance(offset, baseinteger):
raise TypeError('offset can only be an instance of type baseinteger') # depends on [control=['if'], data=[]]
if not isinstance(size, baseinteger):
raise TypeError('size can only be an instance of type baseinteger') # depends on [control=['if'], data=[]]
data = self._call('readLog', in_p=[idx, offset, size])
return data |
def create_widgets(self):
"""Build basic components of dialog."""
self.bbox = QDialogButtonBox(
QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
self.idx_ok = self.bbox.button(QDialogButtonBox.Ok)
self.idx_cancel = self.bbox.button(QDialogButtonBox.Cancel)
self.idx_group = FormMenu([gr['name'] for gr in self.groups])
chan_box = QListWidget()
self.idx_chan = chan_box
stage_box = QListWidget()
stage_box.addItems(STAGE_NAME)
stage_box.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.idx_stage = stage_box
cycle_box = QListWidget()
cycle_box.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.idx_cycle = cycle_box | def function[create_widgets, parameter[self]]:
constant[Build basic components of dialog.]
name[self].bbox assign[=] call[name[QDialogButtonBox], parameter[binary_operation[name[QDialogButtonBox].Ok <ast.BitOr object at 0x7da2590d6aa0> name[QDialogButtonBox].Cancel]]]
name[self].idx_ok assign[=] call[name[self].bbox.button, parameter[name[QDialogButtonBox].Ok]]
name[self].idx_cancel assign[=] call[name[self].bbox.button, parameter[name[QDialogButtonBox].Cancel]]
name[self].idx_group assign[=] call[name[FormMenu], parameter[<ast.ListComp object at 0x7da1b0ddd2d0>]]
variable[chan_box] assign[=] call[name[QListWidget], parameter[]]
name[self].idx_chan assign[=] name[chan_box]
variable[stage_box] assign[=] call[name[QListWidget], parameter[]]
call[name[stage_box].addItems, parameter[name[STAGE_NAME]]]
call[name[stage_box].setSelectionMode, parameter[name[QAbstractItemView].ExtendedSelection]]
name[self].idx_stage assign[=] name[stage_box]
variable[cycle_box] assign[=] call[name[QListWidget], parameter[]]
call[name[cycle_box].setSelectionMode, parameter[name[QAbstractItemView].ExtendedSelection]]
name[self].idx_cycle assign[=] name[cycle_box] | keyword[def] identifier[create_widgets] ( identifier[self] ):
literal[string]
identifier[self] . identifier[bbox] = identifier[QDialogButtonBox] (
identifier[QDialogButtonBox] . identifier[Ok] | identifier[QDialogButtonBox] . identifier[Cancel] )
identifier[self] . identifier[idx_ok] = identifier[self] . identifier[bbox] . identifier[button] ( identifier[QDialogButtonBox] . identifier[Ok] )
identifier[self] . identifier[idx_cancel] = identifier[self] . identifier[bbox] . identifier[button] ( identifier[QDialogButtonBox] . identifier[Cancel] )
identifier[self] . identifier[idx_group] = identifier[FormMenu] ([ identifier[gr] [ literal[string] ] keyword[for] identifier[gr] keyword[in] identifier[self] . identifier[groups] ])
identifier[chan_box] = identifier[QListWidget] ()
identifier[self] . identifier[idx_chan] = identifier[chan_box]
identifier[stage_box] = identifier[QListWidget] ()
identifier[stage_box] . identifier[addItems] ( identifier[STAGE_NAME] )
identifier[stage_box] . identifier[setSelectionMode] ( identifier[QAbstractItemView] . identifier[ExtendedSelection] )
identifier[self] . identifier[idx_stage] = identifier[stage_box]
identifier[cycle_box] = identifier[QListWidget] ()
identifier[cycle_box] . identifier[setSelectionMode] ( identifier[QAbstractItemView] . identifier[ExtendedSelection] )
identifier[self] . identifier[idx_cycle] = identifier[cycle_box] | def create_widgets(self):
"""Build basic components of dialog."""
self.bbox = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
self.idx_ok = self.bbox.button(QDialogButtonBox.Ok)
self.idx_cancel = self.bbox.button(QDialogButtonBox.Cancel)
self.idx_group = FormMenu([gr['name'] for gr in self.groups])
chan_box = QListWidget()
self.idx_chan = chan_box
stage_box = QListWidget()
stage_box.addItems(STAGE_NAME)
stage_box.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.idx_stage = stage_box
cycle_box = QListWidget()
cycle_box.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.idx_cycle = cycle_box |
def setType(self, polygonID, polygonType):
"""setType(string, string) -> None
Sets the (abstract) type of the polygon.
"""
self._connection._beginMessage(
tc.CMD_SET_POLYGON_VARIABLE, tc.VAR_TYPE, polygonID, 1 + 4 + len(polygonType))
self._connection._packString(polygonType)
self._connection._sendExact() | def function[setType, parameter[self, polygonID, polygonType]]:
constant[setType(string, string) -> None
Sets the (abstract) type of the polygon.
]
call[name[self]._connection._beginMessage, parameter[name[tc].CMD_SET_POLYGON_VARIABLE, name[tc].VAR_TYPE, name[polygonID], binary_operation[binary_operation[constant[1] + constant[4]] + call[name[len], parameter[name[polygonType]]]]]]
call[name[self]._connection._packString, parameter[name[polygonType]]]
call[name[self]._connection._sendExact, parameter[]] | keyword[def] identifier[setType] ( identifier[self] , identifier[polygonID] , identifier[polygonType] ):
literal[string]
identifier[self] . identifier[_connection] . identifier[_beginMessage] (
identifier[tc] . identifier[CMD_SET_POLYGON_VARIABLE] , identifier[tc] . identifier[VAR_TYPE] , identifier[polygonID] , literal[int] + literal[int] + identifier[len] ( identifier[polygonType] ))
identifier[self] . identifier[_connection] . identifier[_packString] ( identifier[polygonType] )
identifier[self] . identifier[_connection] . identifier[_sendExact] () | def setType(self, polygonID, polygonType):
"""setType(string, string) -> None
Sets the (abstract) type of the polygon.
"""
self._connection._beginMessage(tc.CMD_SET_POLYGON_VARIABLE, tc.VAR_TYPE, polygonID, 1 + 4 + len(polygonType))
self._connection._packString(polygonType)
self._connection._sendExact() |
def _print(self):
"""
Prints the todos in the right format.
Defaults to normal text output (with possible colors and other pretty
printing). If a format was specified on the commandline, this format is
sent to the output.
"""
if self.printer is None:
# create a standard printer with some filters
indent = config().list_indent()
final_format = ' ' * indent + self.format
filters = []
filters.append(PrettyPrinterFormatFilter(self.todolist, final_format))
self.printer = pretty_printer_factory(self.todolist, filters)
try:
if self.group_expression:
self.out(self.printer.print_groups(self._view().groups))
else:
self.out(self.printer.print_list(self._view().todos))
except ListFormatError:
self.error('Error while parsing format string (list_format config'
' option or -F)') | def function[_print, parameter[self]]:
constant[
Prints the todos in the right format.
Defaults to normal text output (with possible colors and other pretty
printing). If a format was specified on the commandline, this format is
sent to the output.
]
if compare[name[self].printer is constant[None]] begin[:]
variable[indent] assign[=] call[call[name[config], parameter[]].list_indent, parameter[]]
variable[final_format] assign[=] binary_operation[binary_operation[constant[ ] * name[indent]] + name[self].format]
variable[filters] assign[=] list[[]]
call[name[filters].append, parameter[call[name[PrettyPrinterFormatFilter], parameter[name[self].todolist, name[final_format]]]]]
name[self].printer assign[=] call[name[pretty_printer_factory], parameter[name[self].todolist, name[filters]]]
<ast.Try object at 0x7da1b2347790> | keyword[def] identifier[_print] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[printer] keyword[is] keyword[None] :
identifier[indent] = identifier[config] (). identifier[list_indent] ()
identifier[final_format] = literal[string] * identifier[indent] + identifier[self] . identifier[format]
identifier[filters] =[]
identifier[filters] . identifier[append] ( identifier[PrettyPrinterFormatFilter] ( identifier[self] . identifier[todolist] , identifier[final_format] ))
identifier[self] . identifier[printer] = identifier[pretty_printer_factory] ( identifier[self] . identifier[todolist] , identifier[filters] )
keyword[try] :
keyword[if] identifier[self] . identifier[group_expression] :
identifier[self] . identifier[out] ( identifier[self] . identifier[printer] . identifier[print_groups] ( identifier[self] . identifier[_view] (). identifier[groups] ))
keyword[else] :
identifier[self] . identifier[out] ( identifier[self] . identifier[printer] . identifier[print_list] ( identifier[self] . identifier[_view] (). identifier[todos] ))
keyword[except] identifier[ListFormatError] :
identifier[self] . identifier[error] ( literal[string]
literal[string] ) | def _print(self):
"""
Prints the todos in the right format.
Defaults to normal text output (with possible colors and other pretty
printing). If a format was specified on the commandline, this format is
sent to the output.
"""
if self.printer is None:
# create a standard printer with some filters
indent = config().list_indent()
final_format = ' ' * indent + self.format
filters = []
filters.append(PrettyPrinterFormatFilter(self.todolist, final_format))
self.printer = pretty_printer_factory(self.todolist, filters) # depends on [control=['if'], data=[]]
try:
if self.group_expression:
self.out(self.printer.print_groups(self._view().groups)) # depends on [control=['if'], data=[]]
else:
self.out(self.printer.print_list(self._view().todos)) # depends on [control=['try'], data=[]]
except ListFormatError:
self.error('Error while parsing format string (list_format config option or -F)') # depends on [control=['except'], data=[]] |
def window_open(dev, temp, duration):
""" Gets and sets the window open settings. """
click.echo("Window open: %s" % dev.window_open)
if temp and duration:
click.echo("Setting window open conf, temp: %s duration: %s" % (temp, duration))
dev.window_open_config(temp, duration) | def function[window_open, parameter[dev, temp, duration]]:
constant[ Gets and sets the window open settings. ]
call[name[click].echo, parameter[binary_operation[constant[Window open: %s] <ast.Mod object at 0x7da2590d6920> name[dev].window_open]]]
if <ast.BoolOp object at 0x7da1b0bcbc40> begin[:]
call[name[click].echo, parameter[binary_operation[constant[Setting window open conf, temp: %s duration: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0bc9d80>, <ast.Name object at 0x7da1b0bc99c0>]]]]]
call[name[dev].window_open_config, parameter[name[temp], name[duration]]] | keyword[def] identifier[window_open] ( identifier[dev] , identifier[temp] , identifier[duration] ):
literal[string]
identifier[click] . identifier[echo] ( literal[string] % identifier[dev] . identifier[window_open] )
keyword[if] identifier[temp] keyword[and] identifier[duration] :
identifier[click] . identifier[echo] ( literal[string] %( identifier[temp] , identifier[duration] ))
identifier[dev] . identifier[window_open_config] ( identifier[temp] , identifier[duration] ) | def window_open(dev, temp, duration):
""" Gets and sets the window open settings. """
click.echo('Window open: %s' % dev.window_open)
if temp and duration:
click.echo('Setting window open conf, temp: %s duration: %s' % (temp, duration))
dev.window_open_config(temp, duration) # depends on [control=['if'], data=[]] |
def dataset_upload_file(self, path, quiet):
""" upload a dataset file
Parameters
==========
path: the complete path to upload
quiet: suppress verbose output (default is False)
"""
file_name = os.path.basename(path)
content_length = os.path.getsize(path)
last_modified_date_utc = int(os.path.getmtime(path))
result = FileUploadInfo(
self.process_response(
self.datasets_upload_file_with_http_info(
file_name, content_length, last_modified_date_utc)))
success = self.upload_complete(path, result.createUrl, quiet)
if success:
return result.token
return None | def function[dataset_upload_file, parameter[self, path, quiet]]:
constant[ upload a dataset file
Parameters
==========
path: the complete path to upload
quiet: suppress verbose output (default is False)
]
variable[file_name] assign[=] call[name[os].path.basename, parameter[name[path]]]
variable[content_length] assign[=] call[name[os].path.getsize, parameter[name[path]]]
variable[last_modified_date_utc] assign[=] call[name[int], parameter[call[name[os].path.getmtime, parameter[name[path]]]]]
variable[result] assign[=] call[name[FileUploadInfo], parameter[call[name[self].process_response, parameter[call[name[self].datasets_upload_file_with_http_info, parameter[name[file_name], name[content_length], name[last_modified_date_utc]]]]]]]
variable[success] assign[=] call[name[self].upload_complete, parameter[name[path], name[result].createUrl, name[quiet]]]
if name[success] begin[:]
return[name[result].token]
return[constant[None]] | keyword[def] identifier[dataset_upload_file] ( identifier[self] , identifier[path] , identifier[quiet] ):
literal[string]
identifier[file_name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] )
identifier[content_length] = identifier[os] . identifier[path] . identifier[getsize] ( identifier[path] )
identifier[last_modified_date_utc] = identifier[int] ( identifier[os] . identifier[path] . identifier[getmtime] ( identifier[path] ))
identifier[result] = identifier[FileUploadInfo] (
identifier[self] . identifier[process_response] (
identifier[self] . identifier[datasets_upload_file_with_http_info] (
identifier[file_name] , identifier[content_length] , identifier[last_modified_date_utc] )))
identifier[success] = identifier[self] . identifier[upload_complete] ( identifier[path] , identifier[result] . identifier[createUrl] , identifier[quiet] )
keyword[if] identifier[success] :
keyword[return] identifier[result] . identifier[token]
keyword[return] keyword[None] | def dataset_upload_file(self, path, quiet):
""" upload a dataset file
Parameters
==========
path: the complete path to upload
quiet: suppress verbose output (default is False)
"""
file_name = os.path.basename(path)
content_length = os.path.getsize(path)
last_modified_date_utc = int(os.path.getmtime(path))
result = FileUploadInfo(self.process_response(self.datasets_upload_file_with_http_info(file_name, content_length, last_modified_date_utc)))
success = self.upload_complete(path, result.createUrl, quiet)
if success:
return result.token # depends on [control=['if'], data=[]]
return None |
def get_thread_hardware_breakpoints(self, dwThreadId):
"""
@see: L{get_process_hardware_breakpoints}
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@rtype: list of L{HardwareBreakpoint}
@return: All hardware breakpoints for the given thread.
"""
result = list()
for (tid, bplist) in compat.iteritems(self.__hardwareBP):
if tid == dwThreadId:
for bp in bplist:
result.append(bp)
return result | def function[get_thread_hardware_breakpoints, parameter[self, dwThreadId]]:
constant[
@see: L{get_process_hardware_breakpoints}
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@rtype: list of L{HardwareBreakpoint}
@return: All hardware breakpoints for the given thread.
]
variable[result] assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b06f9b70>, <ast.Name object at 0x7da1b06face0>]]] in starred[call[name[compat].iteritems, parameter[name[self].__hardwareBP]]] begin[:]
if compare[name[tid] equal[==] name[dwThreadId]] begin[:]
for taget[name[bp]] in starred[name[bplist]] begin[:]
call[name[result].append, parameter[name[bp]]]
return[name[result]] | keyword[def] identifier[get_thread_hardware_breakpoints] ( identifier[self] , identifier[dwThreadId] ):
literal[string]
identifier[result] = identifier[list] ()
keyword[for] ( identifier[tid] , identifier[bplist] ) keyword[in] identifier[compat] . identifier[iteritems] ( identifier[self] . identifier[__hardwareBP] ):
keyword[if] identifier[tid] == identifier[dwThreadId] :
keyword[for] identifier[bp] keyword[in] identifier[bplist] :
identifier[result] . identifier[append] ( identifier[bp] )
keyword[return] identifier[result] | def get_thread_hardware_breakpoints(self, dwThreadId):
"""
@see: L{get_process_hardware_breakpoints}
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@rtype: list of L{HardwareBreakpoint}
@return: All hardware breakpoints for the given thread.
"""
result = list()
for (tid, bplist) in compat.iteritems(self.__hardwareBP):
if tid == dwThreadId:
for bp in bplist:
result.append(bp) # depends on [control=['for'], data=['bp']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return result |
def Range(min=None, max=None, min_message="Must be at least {min}", max_message="Must be at most {max}"):
"""
Creates a validator that checks if the given numeric value is in the
specified range, inclusive.
Accepts values specified by ``numbers.Number`` only, excluding booleans.
The error messages raised can be customized with ``min_message`` and
``max_message``. The ``min`` and ``max`` arguments are formatted.
"""
@wraps(Range)
def built(value):
if not isinstance(value, numbers.Number) or isinstance(value, bool):
raise Error("Not a number")
if min is not None and min > value:
raise Error(min_message.format(min=min, max=max))
if max is not None and value > max:
raise Error(max_message.format(min=min, max=max))
return value
return built | def function[Range, parameter[min, max, min_message, max_message]]:
constant[
Creates a validator that checks if the given numeric value is in the
specified range, inclusive.
Accepts values specified by ``numbers.Number`` only, excluding booleans.
The error messages raised can be customized with ``min_message`` and
``max_message``. The ``min`` and ``max`` arguments are formatted.
]
def function[built, parameter[value]]:
if <ast.BoolOp object at 0x7da20e960730> begin[:]
<ast.Raise object at 0x7da20e960be0>
if <ast.BoolOp object at 0x7da20e962530> begin[:]
<ast.Raise object at 0x7da20e962e90>
if <ast.BoolOp object at 0x7da20e963a90> begin[:]
<ast.Raise object at 0x7da20e9634c0>
return[name[value]]
return[name[built]] | keyword[def] identifier[Range] ( identifier[min] = keyword[None] , identifier[max] = keyword[None] , identifier[min_message] = literal[string] , identifier[max_message] = literal[string] ):
literal[string]
@ identifier[wraps] ( identifier[Range] )
keyword[def] identifier[built] ( identifier[value] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[numbers] . identifier[Number] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[bool] ):
keyword[raise] identifier[Error] ( literal[string] )
keyword[if] identifier[min] keyword[is] keyword[not] keyword[None] keyword[and] identifier[min] > identifier[value] :
keyword[raise] identifier[Error] ( identifier[min_message] . identifier[format] ( identifier[min] = identifier[min] , identifier[max] = identifier[max] ))
keyword[if] identifier[max] keyword[is] keyword[not] keyword[None] keyword[and] identifier[value] > identifier[max] :
keyword[raise] identifier[Error] ( identifier[max_message] . identifier[format] ( identifier[min] = identifier[min] , identifier[max] = identifier[max] ))
keyword[return] identifier[value]
keyword[return] identifier[built] | def Range(min=None, max=None, min_message='Must be at least {min}', max_message='Must be at most {max}'):
"""
Creates a validator that checks if the given numeric value is in the
specified range, inclusive.
Accepts values specified by ``numbers.Number`` only, excluding booleans.
The error messages raised can be customized with ``min_message`` and
``max_message``. The ``min`` and ``max`` arguments are formatted.
"""
@wraps(Range)
def built(value):
if not isinstance(value, numbers.Number) or isinstance(value, bool):
raise Error('Not a number') # depends on [control=['if'], data=[]]
if min is not None and min > value:
raise Error(min_message.format(min=min, max=max)) # depends on [control=['if'], data=[]]
if max is not None and value > max:
raise Error(max_message.format(min=min, max=max)) # depends on [control=['if'], data=[]]
return value
return built |
def _min(self, memory, addr, **kwargs):
"""
Gets the minimum solution of an address.
"""
return memory.state.solver.min(addr, exact=kwargs.pop('exact', self._exact), **kwargs) | def function[_min, parameter[self, memory, addr]]:
constant[
Gets the minimum solution of an address.
]
return[call[name[memory].state.solver.min, parameter[name[addr]]]] | keyword[def] identifier[_min] ( identifier[self] , identifier[memory] , identifier[addr] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[memory] . identifier[state] . identifier[solver] . identifier[min] ( identifier[addr] , identifier[exact] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_exact] ),** identifier[kwargs] ) | def _min(self, memory, addr, **kwargs):
"""
Gets the minimum solution of an address.
"""
return memory.state.solver.min(addr, exact=kwargs.pop('exact', self._exact), **kwargs) |
def countthai(text: str, ignore_chars: str = _DEFAULT_IGNORE_CHARS) -> float:
"""
:param str text: input text
:return: float, proportion of characters in the text that is Thai character
"""
if not text or not isinstance(text, str):
return 0
if not ignore_chars:
ignore_chars = ""
num_thai = 0
num_ignore = 0
for ch in text:
if ch in ignore_chars:
num_ignore += 1
elif isthaichar(ch):
num_thai += 1
num_count = len(text) - num_ignore
return (num_thai / num_count) * 100 | def function[countthai, parameter[text, ignore_chars]]:
constant[
:param str text: input text
:return: float, proportion of characters in the text that is Thai character
]
if <ast.BoolOp object at 0x7da1b1708d90> begin[:]
return[constant[0]]
if <ast.UnaryOp object at 0x7da1b170a290> begin[:]
variable[ignore_chars] assign[=] constant[]
variable[num_thai] assign[=] constant[0]
variable[num_ignore] assign[=] constant[0]
for taget[name[ch]] in starred[name[text]] begin[:]
if compare[name[ch] in name[ignore_chars]] begin[:]
<ast.AugAssign object at 0x7da1b170b8e0>
variable[num_count] assign[=] binary_operation[call[name[len], parameter[name[text]]] - name[num_ignore]]
return[binary_operation[binary_operation[name[num_thai] / name[num_count]] * constant[100]]] | keyword[def] identifier[countthai] ( identifier[text] : identifier[str] , identifier[ignore_chars] : identifier[str] = identifier[_DEFAULT_IGNORE_CHARS] )-> identifier[float] :
literal[string]
keyword[if] keyword[not] identifier[text] keyword[or] keyword[not] identifier[isinstance] ( identifier[text] , identifier[str] ):
keyword[return] literal[int]
keyword[if] keyword[not] identifier[ignore_chars] :
identifier[ignore_chars] = literal[string]
identifier[num_thai] = literal[int]
identifier[num_ignore] = literal[int]
keyword[for] identifier[ch] keyword[in] identifier[text] :
keyword[if] identifier[ch] keyword[in] identifier[ignore_chars] :
identifier[num_ignore] += literal[int]
keyword[elif] identifier[isthaichar] ( identifier[ch] ):
identifier[num_thai] += literal[int]
identifier[num_count] = identifier[len] ( identifier[text] )- identifier[num_ignore]
keyword[return] ( identifier[num_thai] / identifier[num_count] )* literal[int] | def countthai(text: str, ignore_chars: str=_DEFAULT_IGNORE_CHARS) -> float:
"""
:param str text: input text
:return: float, proportion of characters in the text that is Thai character
"""
if not text or not isinstance(text, str):
return 0 # depends on [control=['if'], data=[]]
if not ignore_chars:
ignore_chars = '' # depends on [control=['if'], data=[]]
num_thai = 0
num_ignore = 0
for ch in text:
if ch in ignore_chars:
num_ignore += 1 # depends on [control=['if'], data=[]]
elif isthaichar(ch):
num_thai += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ch']]
num_count = len(text) - num_ignore
return num_thai / num_count * 100 |
def acquire(self, blocking=True, timeout=-1):
"""Acquire a lock, blocking or non-blocking.
Blocks until timeout, if timeout a positive float and blocking=True. A timeout
value of -1 blocks indefinitely, unless blocking=False."""
if not isinstance(timeout, (int, float)):
raise TypeError('a float is required')
if blocking:
# blocking indefinite
if timeout == -1:
with self.__condition:
while not self.__lock.acquire(False):
# condition with timeout is interruptable
self.__condition.wait(60)
return True
# same as non-blocking
elif timeout == 0:
return self.__lock.acquire(False)
elif timeout < 0:
raise ValueError('timeout value must be strictly positive (or -1)')
# blocking finite
else:
start = time()
waited_time = 0
with self.__condition:
while waited_time < timeout:
if self.__lock.acquire(False):
return True
else:
self.__condition.wait(timeout - waited_time)
waited_time = time() - start
return False
elif timeout != -1:
raise ValueError('can\'t specify a timeout for a non-blocking call')
else:
# non-blocking
return self.__lock.acquire(False) | def function[acquire, parameter[self, blocking, timeout]]:
constant[Acquire a lock, blocking or non-blocking.
Blocks until timeout, if timeout a positive float and blocking=True. A timeout
value of -1 blocks indefinitely, unless blocking=False.]
if <ast.UnaryOp object at 0x7da18ede40a0> begin[:]
<ast.Raise object at 0x7da18ede6410>
if name[blocking] begin[:]
if compare[name[timeout] equal[==] <ast.UnaryOp object at 0x7da18ede50f0>] begin[:]
with name[self].__condition begin[:]
while <ast.UnaryOp object at 0x7da18ede4c10> begin[:]
call[name[self].__condition.wait, parameter[constant[60]]]
return[constant[True]] | keyword[def] identifier[acquire] ( identifier[self] , identifier[blocking] = keyword[True] , identifier[timeout] =- literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[timeout] ,( identifier[int] , identifier[float] )):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[blocking] :
keyword[if] identifier[timeout] ==- literal[int] :
keyword[with] identifier[self] . identifier[__condition] :
keyword[while] keyword[not] identifier[self] . identifier[__lock] . identifier[acquire] ( keyword[False] ):
identifier[self] . identifier[__condition] . identifier[wait] ( literal[int] )
keyword[return] keyword[True]
keyword[elif] identifier[timeout] == literal[int] :
keyword[return] identifier[self] . identifier[__lock] . identifier[acquire] ( keyword[False] )
keyword[elif] identifier[timeout] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
identifier[start] = identifier[time] ()
identifier[waited_time] = literal[int]
keyword[with] identifier[self] . identifier[__condition] :
keyword[while] identifier[waited_time] < identifier[timeout] :
keyword[if] identifier[self] . identifier[__lock] . identifier[acquire] ( keyword[False] ):
keyword[return] keyword[True]
keyword[else] :
identifier[self] . identifier[__condition] . identifier[wait] ( identifier[timeout] - identifier[waited_time] )
identifier[waited_time] = identifier[time] ()- identifier[start]
keyword[return] keyword[False]
keyword[elif] identifier[timeout] !=- literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
keyword[return] identifier[self] . identifier[__lock] . identifier[acquire] ( keyword[False] ) | def acquire(self, blocking=True, timeout=-1):
"""Acquire a lock, blocking or non-blocking.
Blocks until timeout, if timeout a positive float and blocking=True. A timeout
value of -1 blocks indefinitely, unless blocking=False."""
if not isinstance(timeout, (int, float)):
raise TypeError('a float is required') # depends on [control=['if'], data=[]]
if blocking:
# blocking indefinite
if timeout == -1:
with self.__condition:
while not self.__lock.acquire(False):
# condition with timeout is interruptable
self.__condition.wait(60) # depends on [control=['while'], data=[]] # depends on [control=['with'], data=[]]
return True # depends on [control=['if'], data=[]]
# same as non-blocking
elif timeout == 0:
return self.__lock.acquire(False) # depends on [control=['if'], data=[]]
elif timeout < 0:
raise ValueError('timeout value must be strictly positive (or -1)') # depends on [control=['if'], data=[]]
else:
# blocking finite
start = time()
waited_time = 0
with self.__condition:
while waited_time < timeout:
if self.__lock.acquire(False):
return True # depends on [control=['if'], data=[]]
else:
self.__condition.wait(timeout - waited_time)
waited_time = time() - start # depends on [control=['while'], data=['waited_time', 'timeout']] # depends on [control=['with'], data=[]]
return False # depends on [control=['if'], data=[]]
elif timeout != -1:
raise ValueError("can't specify a timeout for a non-blocking call") # depends on [control=['if'], data=[]]
else:
# non-blocking
return self.__lock.acquire(False) |
def get_create_options(self):
"""List available reserved capacity plans"""
mask = "mask[attributes,prices[pricingLocationGroup]]"
results = self.ordering_manager.list_items(self.capacity_package, mask=mask)
return results | def function[get_create_options, parameter[self]]:
constant[List available reserved capacity plans]
variable[mask] assign[=] constant[mask[attributes,prices[pricingLocationGroup]]]
variable[results] assign[=] call[name[self].ordering_manager.list_items, parameter[name[self].capacity_package]]
return[name[results]] | keyword[def] identifier[get_create_options] ( identifier[self] ):
literal[string]
identifier[mask] = literal[string]
identifier[results] = identifier[self] . identifier[ordering_manager] . identifier[list_items] ( identifier[self] . identifier[capacity_package] , identifier[mask] = identifier[mask] )
keyword[return] identifier[results] | def get_create_options(self):
"""List available reserved capacity plans"""
mask = 'mask[attributes,prices[pricingLocationGroup]]'
results = self.ordering_manager.list_items(self.capacity_package, mask=mask)
return results |
def map_function(func_str, fw_action_addtion=None,bw_action_addtion=None, alias_func=None):
''' Sample usage:
print map_function('set',alias_func = "ini_items");# -> ini_items
print map_function('set',fw_action_addtion="action_steps_",bw_action_addtion="_for_upd",alias_func = "ini_items"); # -> action_steps_ini_items_for_upd
print map_function('set(a=1,b=2,c=Test())',"action_steps_","_for_upd","ini_items");# -> action_steps_ini_items_for_upd(a=1,b=2,c=Test())
print map_function('set("login",a="good",b=Test())',"action_steps_","_for_upd");# -> action_steps_set_for_upd("login",a="good",b=Test())
'''
split_action_value = re.compile("^(\w+)(\((.*)\)$)?")
matched = split_action_value.match(func_str)
if matched:
action = matched.group(1).lower()
value = matched.group(2)
#params = matched.group(3)
if alias_func:
action = alias_func
if fw_action_addtion:
action = fw_action_addtion + action
if fw_action_addtion:
action = action + bw_action_addtion
if value:
return action+value
else:
return action | def function[map_function, parameter[func_str, fw_action_addtion, bw_action_addtion, alias_func]]:
constant[ Sample usage:
print map_function('set',alias_func = "ini_items");# -> ini_items
print map_function('set',fw_action_addtion="action_steps_",bw_action_addtion="_for_upd",alias_func = "ini_items"); # -> action_steps_ini_items_for_upd
print map_function('set(a=1,b=2,c=Test())',"action_steps_","_for_upd","ini_items");# -> action_steps_ini_items_for_upd(a=1,b=2,c=Test())
print map_function('set("login",a="good",b=Test())',"action_steps_","_for_upd");# -> action_steps_set_for_upd("login",a="good",b=Test())
]
variable[split_action_value] assign[=] call[name[re].compile, parameter[constant[^(\w+)(\((.*)\)$)?]]]
variable[matched] assign[=] call[name[split_action_value].match, parameter[name[func_str]]]
if name[matched] begin[:]
variable[action] assign[=] call[call[name[matched].group, parameter[constant[1]]].lower, parameter[]]
variable[value] assign[=] call[name[matched].group, parameter[constant[2]]]
if name[alias_func] begin[:]
variable[action] assign[=] name[alias_func]
if name[fw_action_addtion] begin[:]
variable[action] assign[=] binary_operation[name[fw_action_addtion] + name[action]]
if name[fw_action_addtion] begin[:]
variable[action] assign[=] binary_operation[name[action] + name[bw_action_addtion]]
if name[value] begin[:]
return[binary_operation[name[action] + name[value]]] | keyword[def] identifier[map_function] ( identifier[func_str] , identifier[fw_action_addtion] = keyword[None] , identifier[bw_action_addtion] = keyword[None] , identifier[alias_func] = keyword[None] ):
literal[string]
identifier[split_action_value] = identifier[re] . identifier[compile] ( literal[string] )
identifier[matched] = identifier[split_action_value] . identifier[match] ( identifier[func_str] )
keyword[if] identifier[matched] :
identifier[action] = identifier[matched] . identifier[group] ( literal[int] ). identifier[lower] ()
identifier[value] = identifier[matched] . identifier[group] ( literal[int] )
keyword[if] identifier[alias_func] :
identifier[action] = identifier[alias_func]
keyword[if] identifier[fw_action_addtion] :
identifier[action] = identifier[fw_action_addtion] + identifier[action]
keyword[if] identifier[fw_action_addtion] :
identifier[action] = identifier[action] + identifier[bw_action_addtion]
keyword[if] identifier[value] :
keyword[return] identifier[action] + identifier[value]
keyword[else] :
keyword[return] identifier[action] | def map_function(func_str, fw_action_addtion=None, bw_action_addtion=None, alias_func=None):
""" Sample usage:
print map_function('set',alias_func = "ini_items");# -> ini_items
print map_function('set',fw_action_addtion="action_steps_",bw_action_addtion="_for_upd",alias_func = "ini_items"); # -> action_steps_ini_items_for_upd
print map_function('set(a=1,b=2,c=Test())',"action_steps_","_for_upd","ini_items");# -> action_steps_ini_items_for_upd(a=1,b=2,c=Test())
print map_function('set("login",a="good",b=Test())',"action_steps_","_for_upd");# -> action_steps_set_for_upd("login",a="good",b=Test())
"""
split_action_value = re.compile('^(\\w+)(\\((.*)\\)$)?')
matched = split_action_value.match(func_str)
if matched:
action = matched.group(1).lower()
value = matched.group(2) #params = matched.group(3)
if alias_func:
action = alias_func # depends on [control=['if'], data=[]]
if fw_action_addtion:
action = fw_action_addtion + action # depends on [control=['if'], data=[]]
if fw_action_addtion:
action = action + bw_action_addtion # depends on [control=['if'], data=[]]
if value:
return action + value # depends on [control=['if'], data=[]]
else:
return action # depends on [control=['if'], data=[]] |
def _handle_lrr(self, data):
"""
Handle Long Range Radio messages.
:param data: LRR message to parse
:type data: string
:returns: :py:class:`~alarmdecoder.messages.LRRMessage`
"""
msg = LRRMessage(data)
if not self._ignore_lrr_states:
self._lrr_system.update(msg)
self.on_lrr_message(message=msg)
return msg | def function[_handle_lrr, parameter[self, data]]:
constant[
Handle Long Range Radio messages.
:param data: LRR message to parse
:type data: string
:returns: :py:class:`~alarmdecoder.messages.LRRMessage`
]
variable[msg] assign[=] call[name[LRRMessage], parameter[name[data]]]
if <ast.UnaryOp object at 0x7da1b27a65f0> begin[:]
call[name[self]._lrr_system.update, parameter[name[msg]]]
call[name[self].on_lrr_message, parameter[]]
return[name[msg]] | keyword[def] identifier[_handle_lrr] ( identifier[self] , identifier[data] ):
literal[string]
identifier[msg] = identifier[LRRMessage] ( identifier[data] )
keyword[if] keyword[not] identifier[self] . identifier[_ignore_lrr_states] :
identifier[self] . identifier[_lrr_system] . identifier[update] ( identifier[msg] )
identifier[self] . identifier[on_lrr_message] ( identifier[message] = identifier[msg] )
keyword[return] identifier[msg] | def _handle_lrr(self, data):
"""
Handle Long Range Radio messages.
:param data: LRR message to parse
:type data: string
:returns: :py:class:`~alarmdecoder.messages.LRRMessage`
"""
msg = LRRMessage(data)
if not self._ignore_lrr_states:
self._lrr_system.update(msg) # depends on [control=['if'], data=[]]
self.on_lrr_message(message=msg)
return msg |
def delete(self, skip_mapping=False):
"""Delete all entries from ElasticSearch."""
for index in self.indexes:
index.destroy()
if not skip_mapping:
index.create_mapping() | def function[delete, parameter[self, skip_mapping]]:
constant[Delete all entries from ElasticSearch.]
for taget[name[index]] in starred[name[self].indexes] begin[:]
call[name[index].destroy, parameter[]]
if <ast.UnaryOp object at 0x7da1b1908430> begin[:]
call[name[index].create_mapping, parameter[]] | keyword[def] identifier[delete] ( identifier[self] , identifier[skip_mapping] = keyword[False] ):
literal[string]
keyword[for] identifier[index] keyword[in] identifier[self] . identifier[indexes] :
identifier[index] . identifier[destroy] ()
keyword[if] keyword[not] identifier[skip_mapping] :
identifier[index] . identifier[create_mapping] () | def delete(self, skip_mapping=False):
"""Delete all entries from ElasticSearch."""
for index in self.indexes:
index.destroy()
if not skip_mapping:
index.create_mapping() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['index']] |
def _apply_Create(self, change):
'''A record from change must be created.
:param change: a change object
:type change: octodns.record.Change
:type return: void
'''
ar = _AzureRecord(self._resource_group, change.new)
create = self._dns_client.record_sets.create_or_update
create(resource_group_name=ar.resource_group,
zone_name=ar.zone_name,
relative_record_set_name=ar.relative_record_set_name,
record_type=ar.record_type,
parameters=ar.params)
self.log.debug('* Success Create/Update: {}'.format(ar)) | def function[_apply_Create, parameter[self, change]]:
constant[A record from change must be created.
:param change: a change object
:type change: octodns.record.Change
:type return: void
]
variable[ar] assign[=] call[name[_AzureRecord], parameter[name[self]._resource_group, name[change].new]]
variable[create] assign[=] name[self]._dns_client.record_sets.create_or_update
call[name[create], parameter[]]
call[name[self].log.debug, parameter[call[constant[* Success Create/Update: {}].format, parameter[name[ar]]]]] | keyword[def] identifier[_apply_Create] ( identifier[self] , identifier[change] ):
literal[string]
identifier[ar] = identifier[_AzureRecord] ( identifier[self] . identifier[_resource_group] , identifier[change] . identifier[new] )
identifier[create] = identifier[self] . identifier[_dns_client] . identifier[record_sets] . identifier[create_or_update]
identifier[create] ( identifier[resource_group_name] = identifier[ar] . identifier[resource_group] ,
identifier[zone_name] = identifier[ar] . identifier[zone_name] ,
identifier[relative_record_set_name] = identifier[ar] . identifier[relative_record_set_name] ,
identifier[record_type] = identifier[ar] . identifier[record_type] ,
identifier[parameters] = identifier[ar] . identifier[params] )
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[ar] )) | def _apply_Create(self, change):
"""A record from change must be created.
:param change: a change object
:type change: octodns.record.Change
:type return: void
"""
ar = _AzureRecord(self._resource_group, change.new)
create = self._dns_client.record_sets.create_or_update
create(resource_group_name=ar.resource_group, zone_name=ar.zone_name, relative_record_set_name=ar.relative_record_set_name, record_type=ar.record_type, parameters=ar.params)
self.log.debug('* Success Create/Update: {}'.format(ar)) |
def main():
"""Run the core."""
parser = ArgumentParser()
subs = parser.add_subparsers(dest='cmd')
setup_parser = subs.add_parser('test')
setup_parser.add_argument('--interface', default=None,
help='Manually pass in the USB connection.')
args = parser.parse_args()
if args.cmd == 'test':
ht = Hottop()
try:
if args.interface:
ht.connect(interface=args.interface)
ht.connect()
except SerialConnectionError as e:
print("[!] Serial interface not accessible: %s" % str(e))
sys.exit(1)
print("[*] Successfully connected to the roaster!") | def function[main, parameter[]]:
constant[Run the core.]
variable[parser] assign[=] call[name[ArgumentParser], parameter[]]
variable[subs] assign[=] call[name[parser].add_subparsers, parameter[]]
variable[setup_parser] assign[=] call[name[subs].add_parser, parameter[constant[test]]]
call[name[setup_parser].add_argument, parameter[constant[--interface]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
if compare[name[args].cmd equal[==] constant[test]] begin[:]
variable[ht] assign[=] call[name[Hottop], parameter[]]
<ast.Try object at 0x7da2044c10c0>
call[name[print], parameter[constant[[*] Successfully connected to the roaster!]]] | keyword[def] identifier[main] ():
literal[string]
identifier[parser] = identifier[ArgumentParser] ()
identifier[subs] = identifier[parser] . identifier[add_subparsers] ( identifier[dest] = literal[string] )
identifier[setup_parser] = identifier[subs] . identifier[add_parser] ( literal[string] )
identifier[setup_parser] . identifier[add_argument] ( literal[string] , identifier[default] = keyword[None] ,
identifier[help] = literal[string] )
identifier[args] = identifier[parser] . identifier[parse_args] ()
keyword[if] identifier[args] . identifier[cmd] == literal[string] :
identifier[ht] = identifier[Hottop] ()
keyword[try] :
keyword[if] identifier[args] . identifier[interface] :
identifier[ht] . identifier[connect] ( identifier[interface] = identifier[args] . identifier[interface] )
identifier[ht] . identifier[connect] ()
keyword[except] identifier[SerialConnectionError] keyword[as] identifier[e] :
identifier[print] ( literal[string] % identifier[str] ( identifier[e] ))
identifier[sys] . identifier[exit] ( literal[int] )
identifier[print] ( literal[string] ) | def main():
"""Run the core."""
parser = ArgumentParser()
subs = parser.add_subparsers(dest='cmd')
setup_parser = subs.add_parser('test')
setup_parser.add_argument('--interface', default=None, help='Manually pass in the USB connection.')
args = parser.parse_args()
if args.cmd == 'test':
ht = Hottop()
try:
if args.interface:
ht.connect(interface=args.interface) # depends on [control=['if'], data=[]]
ht.connect() # depends on [control=['try'], data=[]]
except SerialConnectionError as e:
print('[!] Serial interface not accessible: %s' % str(e))
sys.exit(1) # depends on [control=['except'], data=['e']]
print('[*] Successfully connected to the roaster!') # depends on [control=['if'], data=[]] |
def _init(self, style, streamer, processors=None):
"""Do writer-specific setup.
Parameters
----------
style : dict
Style, as passed to __init__.
streamer : interface.Stream
A stream interface that takes __init__'s `stream` and `interactive`
arguments into account.
processors : field.StyleProcessors, optional
A writer-specific processors instance. Defaults to
field.PlainProcessors().
"""
self._stream = streamer
if streamer.interactive:
if streamer.supports_updates:
self.mode = "update"
else:
self.mode = "incremental"
else:
self.mode = "final"
if style and "width_" not in style and self._stream.width:
style["width_"] = self._stream.width
self._content = ContentWithSummary(
StyleFields(style, processors or PlainProcessors())) | def function[_init, parameter[self, style, streamer, processors]]:
constant[Do writer-specific setup.
Parameters
----------
style : dict
Style, as passed to __init__.
streamer : interface.Stream
A stream interface that takes __init__'s `stream` and `interactive`
arguments into account.
processors : field.StyleProcessors, optional
A writer-specific processors instance. Defaults to
field.PlainProcessors().
]
name[self]._stream assign[=] name[streamer]
if name[streamer].interactive begin[:]
if name[streamer].supports_updates begin[:]
name[self].mode assign[=] constant[update]
if <ast.BoolOp object at 0x7da1b102a560> begin[:]
call[name[style]][constant[width_]] assign[=] name[self]._stream.width
name[self]._content assign[=] call[name[ContentWithSummary], parameter[call[name[StyleFields], parameter[name[style], <ast.BoolOp object at 0x7da1b102a9b0>]]]] | keyword[def] identifier[_init] ( identifier[self] , identifier[style] , identifier[streamer] , identifier[processors] = keyword[None] ):
literal[string]
identifier[self] . identifier[_stream] = identifier[streamer]
keyword[if] identifier[streamer] . identifier[interactive] :
keyword[if] identifier[streamer] . identifier[supports_updates] :
identifier[self] . identifier[mode] = literal[string]
keyword[else] :
identifier[self] . identifier[mode] = literal[string]
keyword[else] :
identifier[self] . identifier[mode] = literal[string]
keyword[if] identifier[style] keyword[and] literal[string] keyword[not] keyword[in] identifier[style] keyword[and] identifier[self] . identifier[_stream] . identifier[width] :
identifier[style] [ literal[string] ]= identifier[self] . identifier[_stream] . identifier[width]
identifier[self] . identifier[_content] = identifier[ContentWithSummary] (
identifier[StyleFields] ( identifier[style] , identifier[processors] keyword[or] identifier[PlainProcessors] ())) | def _init(self, style, streamer, processors=None):
"""Do writer-specific setup.
Parameters
----------
style : dict
Style, as passed to __init__.
streamer : interface.Stream
A stream interface that takes __init__'s `stream` and `interactive`
arguments into account.
processors : field.StyleProcessors, optional
A writer-specific processors instance. Defaults to
field.PlainProcessors().
"""
self._stream = streamer
if streamer.interactive:
if streamer.supports_updates:
self.mode = 'update' # depends on [control=['if'], data=[]]
else:
self.mode = 'incremental' # depends on [control=['if'], data=[]]
else:
self.mode = 'final'
if style and 'width_' not in style and self._stream.width:
style['width_'] = self._stream.width # depends on [control=['if'], data=[]]
self._content = ContentWithSummary(StyleFields(style, processors or PlainProcessors())) |
def identifier(self):
"""Get the identifier for this node.
Extended keys can be identified by the Hash160 (RIPEMD160 after SHA256)
of the public key's `key`. This corresponds exactly to the data used in
traditional Bitcoin addresses. It is not advised to represent this data
in base58 format though, as it may be interpreted as an address that
way (and wallet software is not required to accept payment to the chain
key itself).
"""
key = self.get_public_key_hex()
return ensure_bytes(hexlify(hash160(unhexlify(ensure_bytes(key))))) | def function[identifier, parameter[self]]:
constant[Get the identifier for this node.
Extended keys can be identified by the Hash160 (RIPEMD160 after SHA256)
of the public key's `key`. This corresponds exactly to the data used in
traditional Bitcoin addresses. It is not advised to represent this data
in base58 format though, as it may be interpreted as an address that
way (and wallet software is not required to accept payment to the chain
key itself).
]
variable[key] assign[=] call[name[self].get_public_key_hex, parameter[]]
return[call[name[ensure_bytes], parameter[call[name[hexlify], parameter[call[name[hash160], parameter[call[name[unhexlify], parameter[call[name[ensure_bytes], parameter[name[key]]]]]]]]]]]] | keyword[def] identifier[identifier] ( identifier[self] ):
literal[string]
identifier[key] = identifier[self] . identifier[get_public_key_hex] ()
keyword[return] identifier[ensure_bytes] ( identifier[hexlify] ( identifier[hash160] ( identifier[unhexlify] ( identifier[ensure_bytes] ( identifier[key] ))))) | def identifier(self):
"""Get the identifier for this node.
Extended keys can be identified by the Hash160 (RIPEMD160 after SHA256)
of the public key's `key`. This corresponds exactly to the data used in
traditional Bitcoin addresses. It is not advised to represent this data
in base58 format though, as it may be interpreted as an address that
way (and wallet software is not required to accept payment to the chain
key itself).
"""
key = self.get_public_key_hex()
return ensure_bytes(hexlify(hash160(unhexlify(ensure_bytes(key))))) |
def delete_grade(self, grade_id):
"""Deletes a ``Grade``.
arg: grade_id (osid.id.Id): the ``Id`` of the ``Grade`` to
remove
raise: NotFound - ``grade_id`` not found
raise: NullArgument - ``grade_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.repository.AssetAdminSession.delete_asset_content_template
from dlkit.abstract_osid.id.primitives import Id as ABCId
from .objects import Grade
collection = JSONClientValidated('grading',
collection='GradeSystem',
runtime=self._runtime)
if not isinstance(grade_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
grade_system = collection.find_one({'grades._id': ObjectId(grade_id.get_identifier())})
index = 0
found = False
for i in grade_system['grades']:
if i['_id'] == ObjectId(grade_id.get_identifier()):
grade_map = grade_system['grades'].pop(index)
index += 1
found = True
if not found:
raise errors.OperationFailed()
Grade(
osid_object_map=grade_map,
runtime=self._runtime,
proxy=self._proxy)._delete()
collection.save(grade_system) | def function[delete_grade, parameter[self, grade_id]]:
constant[Deletes a ``Grade``.
arg: grade_id (osid.id.Id): the ``Id`` of the ``Grade`` to
remove
raise: NotFound - ``grade_id`` not found
raise: NullArgument - ``grade_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
from relative_module[dlkit.abstract_osid.id.primitives] import module[Id]
from relative_module[objects] import module[Grade]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[grading]]]
if <ast.UnaryOp object at 0x7da2054a7ee0> begin[:]
<ast.Raise object at 0x7da2054a63e0>
variable[grade_system] assign[=] call[name[collection].find_one, parameter[dictionary[[<ast.Constant object at 0x7da2054a6350>], [<ast.Call object at 0x7da2054a6410>]]]]
variable[index] assign[=] constant[0]
variable[found] assign[=] constant[False]
for taget[name[i]] in starred[call[name[grade_system]][constant[grades]]] begin[:]
if compare[call[name[i]][constant[_id]] equal[==] call[name[ObjectId], parameter[call[name[grade_id].get_identifier, parameter[]]]]] begin[:]
variable[grade_map] assign[=] call[call[name[grade_system]][constant[grades]].pop, parameter[name[index]]]
<ast.AugAssign object at 0x7da2054a51b0>
variable[found] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da2054a4520> begin[:]
<ast.Raise object at 0x7da2054a4130>
call[call[name[Grade], parameter[]]._delete, parameter[]]
call[name[collection].save, parameter[name[grade_system]]] | keyword[def] identifier[delete_grade] ( identifier[self] , identifier[grade_id] ):
literal[string]
keyword[from] identifier[dlkit] . identifier[abstract_osid] . identifier[id] . identifier[primitives] keyword[import] identifier[Id] keyword[as] identifier[ABCId]
keyword[from] . identifier[objects] keyword[import] identifier[Grade]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[grade_id] , identifier[ABCId] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
identifier[grade_system] = identifier[collection] . identifier[find_one] ({ literal[string] : identifier[ObjectId] ( identifier[grade_id] . identifier[get_identifier] ())})
identifier[index] = literal[int]
identifier[found] = keyword[False]
keyword[for] identifier[i] keyword[in] identifier[grade_system] [ literal[string] ]:
keyword[if] identifier[i] [ literal[string] ]== identifier[ObjectId] ( identifier[grade_id] . identifier[get_identifier] ()):
identifier[grade_map] = identifier[grade_system] [ literal[string] ]. identifier[pop] ( identifier[index] )
identifier[index] += literal[int]
identifier[found] = keyword[True]
keyword[if] keyword[not] identifier[found] :
keyword[raise] identifier[errors] . identifier[OperationFailed] ()
identifier[Grade] (
identifier[osid_object_map] = identifier[grade_map] ,
identifier[runtime] = identifier[self] . identifier[_runtime] ,
identifier[proxy] = identifier[self] . identifier[_proxy] ). identifier[_delete] ()
identifier[collection] . identifier[save] ( identifier[grade_system] ) | def delete_grade(self, grade_id):
"""Deletes a ``Grade``.
arg: grade_id (osid.id.Id): the ``Id`` of the ``Grade`` to
remove
raise: NotFound - ``grade_id`` not found
raise: NullArgument - ``grade_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.repository.AssetAdminSession.delete_asset_content_template
from dlkit.abstract_osid.id.primitives import Id as ABCId
from .objects import Grade
collection = JSONClientValidated('grading', collection='GradeSystem', runtime=self._runtime)
if not isinstance(grade_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id') # depends on [control=['if'], data=[]]
grade_system = collection.find_one({'grades._id': ObjectId(grade_id.get_identifier())})
index = 0
found = False
for i in grade_system['grades']:
if i['_id'] == ObjectId(grade_id.get_identifier()):
grade_map = grade_system['grades'].pop(index) # depends on [control=['if'], data=[]]
index += 1
found = True # depends on [control=['for'], data=['i']]
if not found:
raise errors.OperationFailed() # depends on [control=['if'], data=[]]
Grade(osid_object_map=grade_map, runtime=self._runtime, proxy=self._proxy)._delete()
collection.save(grade_system) |
def initialize(self):
"""Initialize the BLE provider. Must be called once before any other
calls are made to the provider.
"""
# Setup the central manager and its delegate.
self._central_manager = CBCentralManager.alloc()
self._central_manager.initWithDelegate_queue_options_(self._central_delegate,
None, None) | def function[initialize, parameter[self]]:
constant[Initialize the BLE provider. Must be called once before any other
calls are made to the provider.
]
name[self]._central_manager assign[=] call[name[CBCentralManager].alloc, parameter[]]
call[name[self]._central_manager.initWithDelegate_queue_options_, parameter[name[self]._central_delegate, constant[None], constant[None]]] | keyword[def] identifier[initialize] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_central_manager] = identifier[CBCentralManager] . identifier[alloc] ()
identifier[self] . identifier[_central_manager] . identifier[initWithDelegate_queue_options_] ( identifier[self] . identifier[_central_delegate] ,
keyword[None] , keyword[None] ) | def initialize(self):
"""Initialize the BLE provider. Must be called once before any other
calls are made to the provider.
"""
# Setup the central manager and its delegate.
self._central_manager = CBCentralManager.alloc()
self._central_manager.initWithDelegate_queue_options_(self._central_delegate, None, None) |
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument(
"--os-data-processing-api-version",
metavar="<data-processing-api-version>",
default=utils.env(
'OS_DATA_PROCESSING_API_VERSION',
default=DEFAULT_DATA_PROCESSING_API_VERSION),
help=("Data processing API version, default=" +
DEFAULT_DATA_PROCESSING_API_VERSION +
' (Env: OS_DATA_PROCESSING_API_VERSION)'))
parser.add_argument(
"--os-data-processing-url",
default=utils.env(
"OS_DATA_PROCESSING_URL"),
help=("Data processing API URL, "
"(Env: OS_DATA_PROCESSING_API_URL)"))
return parser | def function[build_option_parser, parameter[parser]]:
constant[Hook to add global options.]
call[name[parser].add_argument, parameter[constant[--os-data-processing-api-version]]]
call[name[parser].add_argument, parameter[constant[--os-data-processing-url]]]
return[name[parser]] | keyword[def] identifier[build_option_parser] ( identifier[parser] ):
literal[string]
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[metavar] = literal[string] ,
identifier[default] = identifier[utils] . identifier[env] (
literal[string] ,
identifier[default] = identifier[DEFAULT_DATA_PROCESSING_API_VERSION] ),
identifier[help] =( literal[string] +
identifier[DEFAULT_DATA_PROCESSING_API_VERSION] +
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[default] = identifier[utils] . identifier[env] (
literal[string] ),
identifier[help] =( literal[string]
literal[string] ))
keyword[return] identifier[parser] | def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-data-processing-api-version', metavar='<data-processing-api-version>', default=utils.env('OS_DATA_PROCESSING_API_VERSION', default=DEFAULT_DATA_PROCESSING_API_VERSION), help='Data processing API version, default=' + DEFAULT_DATA_PROCESSING_API_VERSION + ' (Env: OS_DATA_PROCESSING_API_VERSION)')
parser.add_argument('--os-data-processing-url', default=utils.env('OS_DATA_PROCESSING_URL'), help='Data processing API URL, (Env: OS_DATA_PROCESSING_API_URL)')
return parser |
def item_length(self):
"""
Length of each element in the current SArray.
Only works on SArrays of dict, array, or list type. If a given element
is a missing value, then the output elements is also a missing value.
This function is equivalent to the following but more performant:
sa_item_len = sa.apply(lambda x: len(x) if x is not None else None)
Returns
-------
out_sf : SArray
A new SArray, each element in the SArray is the len of the corresponding
items in original SArray.
Examples
--------
>>> sa = SArray([
... {"is_restaurant": 1, "is_electronics": 0},
... {"is_restaurant": 1, "is_retail": 1, "is_electronics": 0},
... {"is_restaurant": 0, "is_retail": 1, "is_electronics": 0},
... {"is_restaurant": 0},
... {"is_restaurant": 1, "is_electronics": 1},
... None])
>>> sa.item_length()
dtype: int
Rows: 6
[2, 3, 3, 1, 2, None]
"""
if (self.dtype not in [list, dict, array.array]):
raise TypeError("item_length() is only applicable for SArray of type list, dict and array.")
with cython_context():
return SArray(_proxy = self.__proxy__.item_length()) | def function[item_length, parameter[self]]:
constant[
Length of each element in the current SArray.
Only works on SArrays of dict, array, or list type. If a given element
is a missing value, then the output elements is also a missing value.
This function is equivalent to the following but more performant:
sa_item_len = sa.apply(lambda x: len(x) if x is not None else None)
Returns
-------
out_sf : SArray
A new SArray, each element in the SArray is the len of the corresponding
items in original SArray.
Examples
--------
>>> sa = SArray([
... {"is_restaurant": 1, "is_electronics": 0},
... {"is_restaurant": 1, "is_retail": 1, "is_electronics": 0},
... {"is_restaurant": 0, "is_retail": 1, "is_electronics": 0},
... {"is_restaurant": 0},
... {"is_restaurant": 1, "is_electronics": 1},
... None])
>>> sa.item_length()
dtype: int
Rows: 6
[2, 3, 3, 1, 2, None]
]
if compare[name[self].dtype <ast.NotIn object at 0x7da2590d7190> list[[<ast.Name object at 0x7da1b1f0b640>, <ast.Name object at 0x7da1b1f0a7a0>, <ast.Attribute object at 0x7da1b1f085e0>]]] begin[:]
<ast.Raise object at 0x7da1b1f0a0e0>
with call[name[cython_context], parameter[]] begin[:]
return[call[name[SArray], parameter[]]] | keyword[def] identifier[item_length] ( identifier[self] ):
literal[string]
keyword[if] ( identifier[self] . identifier[dtype] keyword[not] keyword[in] [ identifier[list] , identifier[dict] , identifier[array] . identifier[array] ]):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[with] identifier[cython_context] ():
keyword[return] identifier[SArray] ( identifier[_proxy] = identifier[self] . identifier[__proxy__] . identifier[item_length] ()) | def item_length(self):
"""
Length of each element in the current SArray.
Only works on SArrays of dict, array, or list type. If a given element
is a missing value, then the output elements is also a missing value.
This function is equivalent to the following but more performant:
sa_item_len = sa.apply(lambda x: len(x) if x is not None else None)
Returns
-------
out_sf : SArray
A new SArray, each element in the SArray is the len of the corresponding
items in original SArray.
Examples
--------
>>> sa = SArray([
... {"is_restaurant": 1, "is_electronics": 0},
... {"is_restaurant": 1, "is_retail": 1, "is_electronics": 0},
... {"is_restaurant": 0, "is_retail": 1, "is_electronics": 0},
... {"is_restaurant": 0},
... {"is_restaurant": 1, "is_electronics": 1},
... None])
>>> sa.item_length()
dtype: int
Rows: 6
[2, 3, 3, 1, 2, None]
"""
if self.dtype not in [list, dict, array.array]:
raise TypeError('item_length() is only applicable for SArray of type list, dict and array.') # depends on [control=['if'], data=[]]
with cython_context():
return SArray(_proxy=self.__proxy__.item_length()) # depends on [control=['with'], data=[]] |
def ad_stat(data):
"""
Calculates the Anderson-Darling statistic for sorted values from U(0, 1).
The statistic is not defined if any of the values is exactly 0 or 1. You
will get infinity as a result and a divide-by-zero warning for such values.
The warning can be silenced or raised using numpy.errstate(divide=...).
"""
samples = len(data)
factors = arange(1, 2 * samples, 2)
return -samples - (factors * log(data * (1 - data[::-1]))).sum() / samples | def function[ad_stat, parameter[data]]:
constant[
Calculates the Anderson-Darling statistic for sorted values from U(0, 1).
The statistic is not defined if any of the values is exactly 0 or 1. You
will get infinity as a result and a divide-by-zero warning for such values.
The warning can be silenced or raised using numpy.errstate(divide=...).
]
variable[samples] assign[=] call[name[len], parameter[name[data]]]
variable[factors] assign[=] call[name[arange], parameter[constant[1], binary_operation[constant[2] * name[samples]], constant[2]]]
return[binary_operation[<ast.UnaryOp object at 0x7da1b18650c0> - binary_operation[call[binary_operation[name[factors] * call[name[log], parameter[binary_operation[name[data] * binary_operation[constant[1] - call[name[data]][<ast.Slice object at 0x7da1b1866fe0>]]]]]].sum, parameter[]] / name[samples]]]] | keyword[def] identifier[ad_stat] ( identifier[data] ):
literal[string]
identifier[samples] = identifier[len] ( identifier[data] )
identifier[factors] = identifier[arange] ( literal[int] , literal[int] * identifier[samples] , literal[int] )
keyword[return] - identifier[samples] -( identifier[factors] * identifier[log] ( identifier[data] *( literal[int] - identifier[data] [::- literal[int] ]))). identifier[sum] ()/ identifier[samples] | def ad_stat(data):
"""
Calculates the Anderson-Darling statistic for sorted values from U(0, 1).
The statistic is not defined if any of the values is exactly 0 or 1. You
will get infinity as a result and a divide-by-zero warning for such values.
The warning can be silenced or raised using numpy.errstate(divide=...).
"""
samples = len(data)
factors = arange(1, 2 * samples, 2)
return -samples - (factors * log(data * (1 - data[::-1]))).sum() / samples |
def initialized(name, **kwargs):
r'''
Defines a new VM with specified arguments, but does not start it.
:param name: the Salt_id node name you wish your VM to have.
Each machine must be initialized individually using this function
or the "vagrant.running" function, or the vagrant.init execution module call.
This command will not change the state of a running or paused machine.
Possible keyword arguments:
- cwd: The directory (path) containing the Vagrantfile
- machine: ('') the name of the machine (in the Vagrantfile) if not default
- vagrant_runas: ('root') the username who owns the vagrantbox file
- vagrant_provider: the provider to run the VM (usually 'virtualbox')
- vm: ({}) a dictionary containing these or other keyword arguments
.. code-block:: yaml
node_name1:
vagrant.initialized
- cwd: /projects/my_project
- vagrant_runas: my_username
- machine: machine1
node_name2:
vagrant.initialized
- cwd: /projects/my_project
- vagrant_runas: my_username
- machine: machine2
start_nodes:
vagrant.start:
- name: node_name?
'''
ret = {'name': name,
'changes': {},
'result': True,
'comment': 'The VM is already correctly defined'
}
# define a machine to start later
ret, kwargs = _find_init_change(name, ret, **kwargs)
if ret['changes'] == {}:
return ret
kwargs['start'] = False
__salt__['vagrant.init'](name, **kwargs)
ret['changes'][name] = 'Node initialized'
ret['comment'] = 'Node {0} defined but not started.'.format(name)
return ret | def function[initialized, parameter[name]]:
constant[
Defines a new VM with specified arguments, but does not start it.
:param name: the Salt_id node name you wish your VM to have.
Each machine must be initialized individually using this function
or the "vagrant.running" function, or the vagrant.init execution module call.
This command will not change the state of a running or paused machine.
Possible keyword arguments:
- cwd: The directory (path) containing the Vagrantfile
- machine: ('') the name of the machine (in the Vagrantfile) if not default
- vagrant_runas: ('root') the username who owns the vagrantbox file
- vagrant_provider: the provider to run the VM (usually 'virtualbox')
- vm: ({}) a dictionary containing these or other keyword arguments
.. code-block:: yaml
node_name1:
vagrant.initialized
- cwd: /projects/my_project
- vagrant_runas: my_username
- machine: machine1
node_name2:
vagrant.initialized
- cwd: /projects/my_project
- vagrant_runas: my_username
- machine: machine2
start_nodes:
vagrant.start:
- name: node_name?
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b20ec640>, <ast.Constant object at 0x7da1b20ec760>, <ast.Constant object at 0x7da1b20ecf10>, <ast.Constant object at 0x7da1b20edf60>], [<ast.Name object at 0x7da1b20eca60>, <ast.Dict object at 0x7da1b20ec1c0>, <ast.Constant object at 0x7da1b20ed600>, <ast.Constant object at 0x7da1b20ed330>]]
<ast.Tuple object at 0x7da1b20ec190> assign[=] call[name[_find_init_change], parameter[name[name], name[ret]]]
if compare[call[name[ret]][constant[changes]] equal[==] dictionary[[], []]] begin[:]
return[name[ret]]
call[name[kwargs]][constant[start]] assign[=] constant[False]
call[call[name[__salt__]][constant[vagrant.init]], parameter[name[name]]]
call[call[name[ret]][constant[changes]]][name[name]] assign[=] constant[Node initialized]
call[name[ret]][constant[comment]] assign[=] call[constant[Node {0} defined but not started.].format, parameter[name[name]]]
return[name[ret]] | keyword[def] identifier[initialized] ( identifier[name] ,** identifier[kwargs] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : keyword[True] ,
literal[string] : literal[string]
}
identifier[ret] , identifier[kwargs] = identifier[_find_init_change] ( identifier[name] , identifier[ret] ,** identifier[kwargs] )
keyword[if] identifier[ret] [ literal[string] ]=={}:
keyword[return] identifier[ret]
identifier[kwargs] [ literal[string] ]= keyword[False]
identifier[__salt__] [ literal[string] ]( identifier[name] ,** identifier[kwargs] )
identifier[ret] [ literal[string] ][ identifier[name] ]= literal[string]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret] | def initialized(name, **kwargs):
"""
Defines a new VM with specified arguments, but does not start it.
:param name: the Salt_id node name you wish your VM to have.
Each machine must be initialized individually using this function
or the "vagrant.running" function, or the vagrant.init execution module call.
This command will not change the state of a running or paused machine.
Possible keyword arguments:
- cwd: The directory (path) containing the Vagrantfile
- machine: ('') the name of the machine (in the Vagrantfile) if not default
- vagrant_runas: ('root') the username who owns the vagrantbox file
- vagrant_provider: the provider to run the VM (usually 'virtualbox')
- vm: ({}) a dictionary containing these or other keyword arguments
.. code-block:: yaml
node_name1:
vagrant.initialized
- cwd: /projects/my_project
- vagrant_runas: my_username
- machine: machine1
node_name2:
vagrant.initialized
- cwd: /projects/my_project
- vagrant_runas: my_username
- machine: machine2
start_nodes:
vagrant.start:
- name: node_name?
"""
ret = {'name': name, 'changes': {}, 'result': True, 'comment': 'The VM is already correctly defined'}
# define a machine to start later
(ret, kwargs) = _find_init_change(name, ret, **kwargs)
if ret['changes'] == {}:
return ret # depends on [control=['if'], data=[]]
kwargs['start'] = False
__salt__['vagrant.init'](name, **kwargs)
ret['changes'][name] = 'Node initialized'
ret['comment'] = 'Node {0} defined but not started.'.format(name)
return ret |
def get_virtual_transactions(blockchain_name, blockchain_opts, first_block_height, last_block_height, tx_filter=None, **hints):
"""
Get the sequence of virtualchain transactions from a particular blockchain over a given range of block heights.
Returns a list of tuples in the format of [(block height, [txs])], where
each tx in [txs] is the parsed transaction. The parsed transaction will conform to... # TODO write a spec for this
Each transaction has at least the following fields:
`version`: the version of the transaction
`txindex`: the index into the block where this tx occurs
`ins`: a list of transaction inputs, where each member is a dict with:
`outpoint`: a dict of {'hash': txid of transaction that fed it in, 'index': the index into the feeder tx's outputs list}
`script`: the signature script for this input
`outs`: a list of transaction outputs, where each member is a dict with:
`value`: the amount of currency units spent (in the fundamental units of the chain)
`script`: the spending script for this input
`senders`: a list of information in 1-to-1 correspondence with each input regarding the transactions that funded it:
`value`: the amount of currency units sent (in fundamental units of the chain)
`script_pubkey`: the spending script for the sending transaction
Returns [(block height, [txs])] on success
Returns None on error.
Raises ValueError on unknown blockchain
"""
if blockchain_name == 'bitcoin':
return get_bitcoin_virtual_transactions(blockchain_opts, first_block_height, last_block_height, tx_filter=tx_filter, **hints)
else:
raise ValueError("Unknown blockchain {}".format(blockchain_name)) | def function[get_virtual_transactions, parameter[blockchain_name, blockchain_opts, first_block_height, last_block_height, tx_filter]]:
constant[
Get the sequence of virtualchain transactions from a particular blockchain over a given range of block heights.
Returns a list of tuples in the format of [(block height, [txs])], where
each tx in [txs] is the parsed transaction. The parsed transaction will conform to... # TODO write a spec for this
Each transaction has at least the following fields:
`version`: the version of the transaction
`txindex`: the index into the block where this tx occurs
`ins`: a list of transaction inputs, where each member is a dict with:
`outpoint`: a dict of {'hash': txid of transaction that fed it in, 'index': the index into the feeder tx's outputs list}
`script`: the signature script for this input
`outs`: a list of transaction outputs, where each member is a dict with:
`value`: the amount of currency units spent (in the fundamental units of the chain)
`script`: the spending script for this input
`senders`: a list of information in 1-to-1 correspondence with each input regarding the transactions that funded it:
`value`: the amount of currency units sent (in fundamental units of the chain)
`script_pubkey`: the spending script for the sending transaction
Returns [(block height, [txs])] on success
Returns None on error.
Raises ValueError on unknown blockchain
]
if compare[name[blockchain_name] equal[==] constant[bitcoin]] begin[:]
return[call[name[get_bitcoin_virtual_transactions], parameter[name[blockchain_opts], name[first_block_height], name[last_block_height]]]] | keyword[def] identifier[get_virtual_transactions] ( identifier[blockchain_name] , identifier[blockchain_opts] , identifier[first_block_height] , identifier[last_block_height] , identifier[tx_filter] = keyword[None] ,** identifier[hints] ):
literal[string]
keyword[if] identifier[blockchain_name] == literal[string] :
keyword[return] identifier[get_bitcoin_virtual_transactions] ( identifier[blockchain_opts] , identifier[first_block_height] , identifier[last_block_height] , identifier[tx_filter] = identifier[tx_filter] ,** identifier[hints] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[blockchain_name] )) | def get_virtual_transactions(blockchain_name, blockchain_opts, first_block_height, last_block_height, tx_filter=None, **hints):
"""
Get the sequence of virtualchain transactions from a particular blockchain over a given range of block heights.
Returns a list of tuples in the format of [(block height, [txs])], where
each tx in [txs] is the parsed transaction. The parsed transaction will conform to... # TODO write a spec for this
Each transaction has at least the following fields:
`version`: the version of the transaction
`txindex`: the index into the block where this tx occurs
`ins`: a list of transaction inputs, where each member is a dict with:
`outpoint`: a dict of {'hash': txid of transaction that fed it in, 'index': the index into the feeder tx's outputs list}
`script`: the signature script for this input
`outs`: a list of transaction outputs, where each member is a dict with:
`value`: the amount of currency units spent (in the fundamental units of the chain)
`script`: the spending script for this input
`senders`: a list of information in 1-to-1 correspondence with each input regarding the transactions that funded it:
`value`: the amount of currency units sent (in fundamental units of the chain)
`script_pubkey`: the spending script for the sending transaction
Returns [(block height, [txs])] on success
Returns None on error.
Raises ValueError on unknown blockchain
"""
if blockchain_name == 'bitcoin':
return get_bitcoin_virtual_transactions(blockchain_opts, first_block_height, last_block_height, tx_filter=tx_filter, **hints) # depends on [control=['if'], data=[]]
else:
raise ValueError('Unknown blockchain {}'.format(blockchain_name)) |
def addNoiseToVector(inputVector, noiseLevel, vectorType):
"""
Add noise to SDRs
@param inputVector (array) binary vector to be corrupted
@param noiseLevel (float) amount of noise to be applied on the vector.
@param vectorType (string) "sparse" or "dense"
"""
if vectorType == 'sparse':
corruptSparseVector(inputVector, noiseLevel)
elif vectorType == 'dense':
corruptDenseVector(inputVector, noiseLevel)
else:
raise ValueError("vectorType must be 'sparse' or 'dense' ") | def function[addNoiseToVector, parameter[inputVector, noiseLevel, vectorType]]:
constant[
Add noise to SDRs
@param inputVector (array) binary vector to be corrupted
@param noiseLevel (float) amount of noise to be applied on the vector.
@param vectorType (string) "sparse" or "dense"
]
if compare[name[vectorType] equal[==] constant[sparse]] begin[:]
call[name[corruptSparseVector], parameter[name[inputVector], name[noiseLevel]]] | keyword[def] identifier[addNoiseToVector] ( identifier[inputVector] , identifier[noiseLevel] , identifier[vectorType] ):
literal[string]
keyword[if] identifier[vectorType] == literal[string] :
identifier[corruptSparseVector] ( identifier[inputVector] , identifier[noiseLevel] )
keyword[elif] identifier[vectorType] == literal[string] :
identifier[corruptDenseVector] ( identifier[inputVector] , identifier[noiseLevel] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def addNoiseToVector(inputVector, noiseLevel, vectorType):
"""
Add noise to SDRs
@param inputVector (array) binary vector to be corrupted
@param noiseLevel (float) amount of noise to be applied on the vector.
@param vectorType (string) "sparse" or "dense"
"""
if vectorType == 'sparse':
corruptSparseVector(inputVector, noiseLevel) # depends on [control=['if'], data=[]]
elif vectorType == 'dense':
corruptDenseVector(inputVector, noiseLevel) # depends on [control=['if'], data=[]]
else:
raise ValueError("vectorType must be 'sparse' or 'dense' ") |
def tofile(self, fileobj):
"""
write a cache object to the fileobj as a lal cache file
"""
for entry in self:
print >>fileobj, str(entry)
fileobj.close() | def function[tofile, parameter[self, fileobj]]:
constant[
write a cache object to the fileobj as a lal cache file
]
for taget[name[entry]] in starred[name[self]] begin[:]
tuple[[<ast.BinOp object at 0x7da18f723ca0>, <ast.Call object at 0x7da18f7207c0>]]
call[name[fileobj].close, parameter[]] | keyword[def] identifier[tofile] ( identifier[self] , identifier[fileobj] ):
literal[string]
keyword[for] identifier[entry] keyword[in] identifier[self] :
identifier[print] >> identifier[fileobj] , identifier[str] ( identifier[entry] )
identifier[fileobj] . identifier[close] () | def tofile(self, fileobj):
"""
write a cache object to the fileobj as a lal cache file
"""
for entry in self:
(print >> fileobj, str(entry)) # depends on [control=['for'], data=['entry']]
fileobj.close() |
def delete_firewall_rule(self, datacenter_id, server_id,
nic_id, firewall_rule_id):
"""
Removes a firewall rule from the NIC.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param server_id: The unique ID of the server.
:type server_id: ``str``
:param nic_id: The unique ID of the NIC.
:type nic_id: ``str``
:param firewall_rule_id: The unique ID of the firewall rule.
:type firewall_rule_id: ``str``
"""
response = self._perform_request(
url='/datacenters/%s/servers/%s/nics/%s/firewallrules/%s' % (
datacenter_id,
server_id,
nic_id,
firewall_rule_id),
method='DELETE')
return response | def function[delete_firewall_rule, parameter[self, datacenter_id, server_id, nic_id, firewall_rule_id]]:
constant[
Removes a firewall rule from the NIC.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param server_id: The unique ID of the server.
:type server_id: ``str``
:param nic_id: The unique ID of the NIC.
:type nic_id: ``str``
:param firewall_rule_id: The unique ID of the firewall rule.
:type firewall_rule_id: ``str``
]
variable[response] assign[=] call[name[self]._perform_request, parameter[]]
return[name[response]] | keyword[def] identifier[delete_firewall_rule] ( identifier[self] , identifier[datacenter_id] , identifier[server_id] ,
identifier[nic_id] , identifier[firewall_rule_id] ):
literal[string]
identifier[response] = identifier[self] . identifier[_perform_request] (
identifier[url] = literal[string] %(
identifier[datacenter_id] ,
identifier[server_id] ,
identifier[nic_id] ,
identifier[firewall_rule_id] ),
identifier[method] = literal[string] )
keyword[return] identifier[response] | def delete_firewall_rule(self, datacenter_id, server_id, nic_id, firewall_rule_id):
"""
Removes a firewall rule from the NIC.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param server_id: The unique ID of the server.
:type server_id: ``str``
:param nic_id: The unique ID of the NIC.
:type nic_id: ``str``
:param firewall_rule_id: The unique ID of the firewall rule.
:type firewall_rule_id: ``str``
"""
response = self._perform_request(url='/datacenters/%s/servers/%s/nics/%s/firewallrules/%s' % (datacenter_id, server_id, nic_id, firewall_rule_id), method='DELETE')
return response |
def is_dsub_operation(op):
"""Determine if a pipelines operation is a dsub request.
We don't have a rigorous way to identify an operation as being submitted
by dsub. Our best option is to check for certain fields that have always
been part of dsub operations.
- labels: job-id, job-name, and user-id have always existed. The dsub-version
label has always existed for the google-v2 provider.
Args:
op: a pipelines operation.
Returns:
Boolean, true if the pipeline run was generated by dsub.
"""
if not is_pipeline(op):
return False
for name in ['dsub-version', 'job-id', 'job-name', 'user-id']:
if not get_label(op, name):
return False
return True | def function[is_dsub_operation, parameter[op]]:
constant[Determine if a pipelines operation is a dsub request.
We don't have a rigorous way to identify an operation as being submitted
by dsub. Our best option is to check for certain fields that have always
been part of dsub operations.
- labels: job-id, job-name, and user-id have always existed. The dsub-version
label has always existed for the google-v2 provider.
Args:
op: a pipelines operation.
Returns:
Boolean, true if the pipeline run was generated by dsub.
]
if <ast.UnaryOp object at 0x7da1b0088dc0> begin[:]
return[constant[False]]
for taget[name[name]] in starred[list[[<ast.Constant object at 0x7da1b0089330>, <ast.Constant object at 0x7da1b00893f0>, <ast.Constant object at 0x7da1b0089300>, <ast.Constant object at 0x7da1b0089870>]]] begin[:]
if <ast.UnaryOp object at 0x7da1b00898d0> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[is_dsub_operation] ( identifier[op] ):
literal[string]
keyword[if] keyword[not] identifier[is_pipeline] ( identifier[op] ):
keyword[return] keyword[False]
keyword[for] identifier[name] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[if] keyword[not] identifier[get_label] ( identifier[op] , identifier[name] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_dsub_operation(op):
"""Determine if a pipelines operation is a dsub request.
We don't have a rigorous way to identify an operation as being submitted
by dsub. Our best option is to check for certain fields that have always
been part of dsub operations.
- labels: job-id, job-name, and user-id have always existed. The dsub-version
label has always existed for the google-v2 provider.
Args:
op: a pipelines operation.
Returns:
Boolean, true if the pipeline run was generated by dsub.
"""
if not is_pipeline(op):
return False # depends on [control=['if'], data=[]]
for name in ['dsub-version', 'job-id', 'job-name', 'user-id']:
if not get_label(op, name):
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
return True |
def has_max_attempts(self):
"""stub"""
if 'maxAttempts' not in self.my_osid_object._my_map or \
self.my_osid_object._my_map['maxAttempts'] is None:
return False
return True | def function[has_max_attempts, parameter[self]]:
constant[stub]
if <ast.BoolOp object at 0x7da18f812650> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[has_max_attempts] ( identifier[self] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[my_osid_object] . identifier[_my_map] keyword[or] identifier[self] . identifier[my_osid_object] . identifier[_my_map] [ literal[string] ] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def has_max_attempts(self):
"""stub"""
if 'maxAttempts' not in self.my_osid_object._my_map or self.my_osid_object._my_map['maxAttempts'] is None:
return False # depends on [control=['if'], data=[]]
return True |
def XOR(classical_reg1, classical_reg2):
"""
Produce an exclusive OR instruction.
:param classical_reg1: The first classical register, which gets modified.
:param classical_reg2: The second classical register or immediate value.
:return: A ClassicalOr instance.
"""
left, right = unpack_reg_val_pair(classical_reg1, classical_reg2)
return ClassicalExclusiveOr(left, right) | def function[XOR, parameter[classical_reg1, classical_reg2]]:
constant[
Produce an exclusive OR instruction.
:param classical_reg1: The first classical register, which gets modified.
:param classical_reg2: The second classical register or immediate value.
:return: A ClassicalOr instance.
]
<ast.Tuple object at 0x7da1b1bc8730> assign[=] call[name[unpack_reg_val_pair], parameter[name[classical_reg1], name[classical_reg2]]]
return[call[name[ClassicalExclusiveOr], parameter[name[left], name[right]]]] | keyword[def] identifier[XOR] ( identifier[classical_reg1] , identifier[classical_reg2] ):
literal[string]
identifier[left] , identifier[right] = identifier[unpack_reg_val_pair] ( identifier[classical_reg1] , identifier[classical_reg2] )
keyword[return] identifier[ClassicalExclusiveOr] ( identifier[left] , identifier[right] ) | def XOR(classical_reg1, classical_reg2):
"""
Produce an exclusive OR instruction.
:param classical_reg1: The first classical register, which gets modified.
:param classical_reg2: The second classical register or immediate value.
:return: A ClassicalOr instance.
"""
(left, right) = unpack_reg_val_pair(classical_reg1, classical_reg2)
return ClassicalExclusiveOr(left, right) |
def keyPressEvent(self, event):
"""Qt Override."""
key = event.key()
if key in [Qt.Key_Enter, Qt.Key_Return]:
self.show_editor()
elif key in [Qt.Key_Tab]:
self.finder.setFocus()
elif key in [Qt.Key_Backtab]:
self.parent().reset_btn.setFocus()
elif key in [Qt.Key_Up, Qt.Key_Down, Qt.Key_Left, Qt.Key_Right]:
super(ShortcutsTable, self).keyPressEvent(event)
elif key not in [Qt.Key_Escape, Qt.Key_Space]:
text = event.text()
if text:
if re.search(VALID_FINDER_CHARS, text) is not None:
self.finder.setFocus()
self.finder.set_text(text)
elif key in [Qt.Key_Escape]:
self.finder.keyPressEvent(event) | def function[keyPressEvent, parameter[self, event]]:
constant[Qt Override.]
variable[key] assign[=] call[name[event].key, parameter[]]
if compare[name[key] in list[[<ast.Attribute object at 0x7da1b26af760>, <ast.Attribute object at 0x7da1b26ad8d0>]]] begin[:]
call[name[self].show_editor, parameter[]] | keyword[def] identifier[keyPressEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[key] = identifier[event] . identifier[key] ()
keyword[if] identifier[key] keyword[in] [ identifier[Qt] . identifier[Key_Enter] , identifier[Qt] . identifier[Key_Return] ]:
identifier[self] . identifier[show_editor] ()
keyword[elif] identifier[key] keyword[in] [ identifier[Qt] . identifier[Key_Tab] ]:
identifier[self] . identifier[finder] . identifier[setFocus] ()
keyword[elif] identifier[key] keyword[in] [ identifier[Qt] . identifier[Key_Backtab] ]:
identifier[self] . identifier[parent] (). identifier[reset_btn] . identifier[setFocus] ()
keyword[elif] identifier[key] keyword[in] [ identifier[Qt] . identifier[Key_Up] , identifier[Qt] . identifier[Key_Down] , identifier[Qt] . identifier[Key_Left] , identifier[Qt] . identifier[Key_Right] ]:
identifier[super] ( identifier[ShortcutsTable] , identifier[self] ). identifier[keyPressEvent] ( identifier[event] )
keyword[elif] identifier[key] keyword[not] keyword[in] [ identifier[Qt] . identifier[Key_Escape] , identifier[Qt] . identifier[Key_Space] ]:
identifier[text] = identifier[event] . identifier[text] ()
keyword[if] identifier[text] :
keyword[if] identifier[re] . identifier[search] ( identifier[VALID_FINDER_CHARS] , identifier[text] ) keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[finder] . identifier[setFocus] ()
identifier[self] . identifier[finder] . identifier[set_text] ( identifier[text] )
keyword[elif] identifier[key] keyword[in] [ identifier[Qt] . identifier[Key_Escape] ]:
identifier[self] . identifier[finder] . identifier[keyPressEvent] ( identifier[event] ) | def keyPressEvent(self, event):
"""Qt Override."""
key = event.key()
if key in [Qt.Key_Enter, Qt.Key_Return]:
self.show_editor() # depends on [control=['if'], data=[]]
elif key in [Qt.Key_Tab]:
self.finder.setFocus() # depends on [control=['if'], data=[]]
elif key in [Qt.Key_Backtab]:
self.parent().reset_btn.setFocus() # depends on [control=['if'], data=[]]
elif key in [Qt.Key_Up, Qt.Key_Down, Qt.Key_Left, Qt.Key_Right]:
super(ShortcutsTable, self).keyPressEvent(event) # depends on [control=['if'], data=[]]
elif key not in [Qt.Key_Escape, Qt.Key_Space]:
text = event.text()
if text:
if re.search(VALID_FINDER_CHARS, text) is not None:
self.finder.setFocus()
self.finder.set_text(text) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif key in [Qt.Key_Escape]:
self.finder.keyPressEvent(event) # depends on [control=['if'], data=[]] |
def _merge_headers(self, call_specific_headers):
"""
Merge headers from different sources together. Headers passed to the
post/get methods have highest priority, then headers associated with
the connection object itself have next priority.
:param call_specific_headers: A header dict from the get/post call, or
None (the default for those methods).
:return: A key-case-insensitive MutableMapping object which contains
the merged headers. (This doesn't actually return a dict.)
"""
# A case-insensitive mapping is necessary here so that there is
# predictable behavior. If a plain dict were used, you'd get keys in
# the merged dict which differ only in case. The requests library
# would merge them internally, and it would be unpredictable which key
# is chosen for the final set of headers. Another possible approach
# would be to upper/lower-case everything, but this seemed easier. On
# the other hand, I don't know if CaseInsensitiveDict is public API...?
# First establish defaults
merged_headers = requests.structures.CaseInsensitiveDict({
"User-Agent": self.user_agent
})
# Then overlay with specifics from post/get methods
if call_specific_headers:
merged_headers.update(call_specific_headers)
# Special "User-Agent" header check, to ensure one is always sent.
# The call-specific overlay could have null'd out that header.
if not merged_headers.get("User-Agent"):
merged_headers["User-Agent"] = self.user_agent
return merged_headers | def function[_merge_headers, parameter[self, call_specific_headers]]:
constant[
Merge headers from different sources together. Headers passed to the
post/get methods have highest priority, then headers associated with
the connection object itself have next priority.
:param call_specific_headers: A header dict from the get/post call, or
None (the default for those methods).
:return: A key-case-insensitive MutableMapping object which contains
the merged headers. (This doesn't actually return a dict.)
]
variable[merged_headers] assign[=] call[name[requests].structures.CaseInsensitiveDict, parameter[dictionary[[<ast.Constant object at 0x7da1aff6f880>], [<ast.Attribute object at 0x7da1aff6f0d0>]]]]
if name[call_specific_headers] begin[:]
call[name[merged_headers].update, parameter[name[call_specific_headers]]]
if <ast.UnaryOp object at 0x7da1aff6c6a0> begin[:]
call[name[merged_headers]][constant[User-Agent]] assign[=] name[self].user_agent
return[name[merged_headers]] | keyword[def] identifier[_merge_headers] ( identifier[self] , identifier[call_specific_headers] ):
literal[string]
identifier[merged_headers] = identifier[requests] . identifier[structures] . identifier[CaseInsensitiveDict] ({
literal[string] : identifier[self] . identifier[user_agent]
})
keyword[if] identifier[call_specific_headers] :
identifier[merged_headers] . identifier[update] ( identifier[call_specific_headers] )
keyword[if] keyword[not] identifier[merged_headers] . identifier[get] ( literal[string] ):
identifier[merged_headers] [ literal[string] ]= identifier[self] . identifier[user_agent]
keyword[return] identifier[merged_headers] | def _merge_headers(self, call_specific_headers):
"""
Merge headers from different sources together. Headers passed to the
post/get methods have highest priority, then headers associated with
the connection object itself have next priority.
:param call_specific_headers: A header dict from the get/post call, or
None (the default for those methods).
:return: A key-case-insensitive MutableMapping object which contains
the merged headers. (This doesn't actually return a dict.)
"""
# A case-insensitive mapping is necessary here so that there is
# predictable behavior. If a plain dict were used, you'd get keys in
# the merged dict which differ only in case. The requests library
# would merge them internally, and it would be unpredictable which key
# is chosen for the final set of headers. Another possible approach
# would be to upper/lower-case everything, but this seemed easier. On
# the other hand, I don't know if CaseInsensitiveDict is public API...?
# First establish defaults
merged_headers = requests.structures.CaseInsensitiveDict({'User-Agent': self.user_agent})
# Then overlay with specifics from post/get methods
if call_specific_headers:
merged_headers.update(call_specific_headers) # depends on [control=['if'], data=[]]
# Special "User-Agent" header check, to ensure one is always sent.
# The call-specific overlay could have null'd out that header.
if not merged_headers.get('User-Agent'):
merged_headers['User-Agent'] = self.user_agent # depends on [control=['if'], data=[]]
return merged_headers |
def get_sitetree():
"""Returns SiteTree (thread-singleton) object, implementing utility methods.
:rtype: SiteTree
"""
sitetree = getattr(_THREAD_LOCAL, _THREAD_SITETREE, None)
if sitetree is None:
sitetree = SiteTree()
setattr(_THREAD_LOCAL, _THREAD_SITETREE, sitetree)
return sitetree | def function[get_sitetree, parameter[]]:
constant[Returns SiteTree (thread-singleton) object, implementing utility methods.
:rtype: SiteTree
]
variable[sitetree] assign[=] call[name[getattr], parameter[name[_THREAD_LOCAL], name[_THREAD_SITETREE], constant[None]]]
if compare[name[sitetree] is constant[None]] begin[:]
variable[sitetree] assign[=] call[name[SiteTree], parameter[]]
call[name[setattr], parameter[name[_THREAD_LOCAL], name[_THREAD_SITETREE], name[sitetree]]]
return[name[sitetree]] | keyword[def] identifier[get_sitetree] ():
literal[string]
identifier[sitetree] = identifier[getattr] ( identifier[_THREAD_LOCAL] , identifier[_THREAD_SITETREE] , keyword[None] )
keyword[if] identifier[sitetree] keyword[is] keyword[None] :
identifier[sitetree] = identifier[SiteTree] ()
identifier[setattr] ( identifier[_THREAD_LOCAL] , identifier[_THREAD_SITETREE] , identifier[sitetree] )
keyword[return] identifier[sitetree] | def get_sitetree():
"""Returns SiteTree (thread-singleton) object, implementing utility methods.
:rtype: SiteTree
"""
sitetree = getattr(_THREAD_LOCAL, _THREAD_SITETREE, None)
if sitetree is None:
sitetree = SiteTree()
setattr(_THREAD_LOCAL, _THREAD_SITETREE, sitetree) # depends on [control=['if'], data=['sitetree']]
return sitetree |
def run_thread_values(run, estimator_list):
"""Helper function for parallelising thread_values_df.
Parameters
----------
ns_run: dict
Nested sampling run dictionary.
estimator_list: list of functions
Returns
-------
vals_array: numpy array
Array of estimator values for each thread.
Has shape (len(estimator_list), len(theads)).
"""
threads = nestcheck.ns_run_utils.get_run_threads(run)
vals_list = [nestcheck.ns_run_utils.run_estimators(th, estimator_list)
for th in threads]
vals_array = np.stack(vals_list, axis=1)
assert vals_array.shape == (len(estimator_list), len(threads))
return vals_array | def function[run_thread_values, parameter[run, estimator_list]]:
constant[Helper function for parallelising thread_values_df.
Parameters
----------
ns_run: dict
Nested sampling run dictionary.
estimator_list: list of functions
Returns
-------
vals_array: numpy array
Array of estimator values for each thread.
Has shape (len(estimator_list), len(theads)).
]
variable[threads] assign[=] call[name[nestcheck].ns_run_utils.get_run_threads, parameter[name[run]]]
variable[vals_list] assign[=] <ast.ListComp object at 0x7da18f09ec20>
variable[vals_array] assign[=] call[name[np].stack, parameter[name[vals_list]]]
assert[compare[name[vals_array].shape equal[==] tuple[[<ast.Call object at 0x7da18f09e560>, <ast.Call object at 0x7da18f09fdf0>]]]]
return[name[vals_array]] | keyword[def] identifier[run_thread_values] ( identifier[run] , identifier[estimator_list] ):
literal[string]
identifier[threads] = identifier[nestcheck] . identifier[ns_run_utils] . identifier[get_run_threads] ( identifier[run] )
identifier[vals_list] =[ identifier[nestcheck] . identifier[ns_run_utils] . identifier[run_estimators] ( identifier[th] , identifier[estimator_list] )
keyword[for] identifier[th] keyword[in] identifier[threads] ]
identifier[vals_array] = identifier[np] . identifier[stack] ( identifier[vals_list] , identifier[axis] = literal[int] )
keyword[assert] identifier[vals_array] . identifier[shape] ==( identifier[len] ( identifier[estimator_list] ), identifier[len] ( identifier[threads] ))
keyword[return] identifier[vals_array] | def run_thread_values(run, estimator_list):
"""Helper function for parallelising thread_values_df.
Parameters
----------
ns_run: dict
Nested sampling run dictionary.
estimator_list: list of functions
Returns
-------
vals_array: numpy array
Array of estimator values for each thread.
Has shape (len(estimator_list), len(theads)).
"""
threads = nestcheck.ns_run_utils.get_run_threads(run)
vals_list = [nestcheck.ns_run_utils.run_estimators(th, estimator_list) for th in threads]
vals_array = np.stack(vals_list, axis=1)
assert vals_array.shape == (len(estimator_list), len(threads))
return vals_array |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.