text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import sys; sys.path.append('..') # help python find open_bci_v3.py relative to scripts folder
import open_bci_v3 as bci
import os
import logging
import time
def printData(sample):
#os.system('clear')
print "----------------"
print("%f" %(sample.id))
print sample.channel_data
print sample.aux_data
print "----------------"
if __name__ == '__main__':
port = '/dev/tty.OpenBCI-DN008VTF'
#port = '/dev/tty.OpenBCI-DN0096XA'
baud = 115200
logging.basicConfig(filename="test.log",format='%(asctime)s - %(levelname)s : %(message)s',level=logging.DEBUG)
logging.info('---------LOG START-------------')
board = bci.OpenBCIBoard(port=port, scaled_output=False, log=True)
print("Board Instantiated")
board.ser.write('v')
time.sleep(10)
#board.start_streaming(printData)
board.print_bytes_in()
|
{
"content_hash": "1ed55c075bab2a04eb0aca78965b7b7c",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 112,
"avg_line_length": 28.75,
"alnum_prop": 0.6596273291925466,
"repo_name": "jfrey-xx/OpenBCI_Python",
"id": "230a2ff690408f09c15beda497f405421311a847",
"size": "805",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "scripts/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "96999"
}
],
"symlink_target": ""
}
|
import os
import signal
import subprocess
import logging
import string
import traceback
import errno
import itertools
import shlex
from collections import OrderedDict
# Ordered JSON, , read & write json, internal
from yotta.lib import ordered_json
# Pack, , common parts of Components/Targets, internal
from yotta.lib import pack
from yotta.lib.pack import tryTerminate as _tryTerminate
# fsutils, , misc filesystem utils, internal
from yotta.lib import fsutils
Target_Description_File = 'target.json'
App_Config_File = 'config.json'
Registry_Namespace = 'targets'
Schema_File = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema', 'target.json')
logger = logging.getLogger('target')
def _ignoreSignal(signum, frame):
logger.debug('ignoring signal %s, traceback:\n%s' % (
signum, ''.join(traceback.format_list(traceback.extract_stack(frame)))
))
def _newPGroup():
os.setpgrp()
def _mergeDictionaries(*args):
''' merge dictionaries of dictionaries recursively, with elements from
dictionaries earlier in the argument sequence taking precedence
'''
# to support merging of OrderedDicts, copy the result type from the first
# argument:
result = type(args[0])()
for k, v in itertools.chain(*[x.items() for x in args]):
if not k in result:
result[k] = v
elif isinstance(result[k], dict) and isinstance(v, dict):
result[k] = _mergeDictionaries(result[k], v)
return result
def _mirrorStructure(dictionary, value):
''' create a new nested dictionary object with the same structure as
'dictionary', but with all scalar values replaced with 'value'
'''
result = type(dictionary)()
for k in dictionary.keys():
if isinstance(dictionary[k], dict):
result[k] = _mirrorStructure(dictionary[k], value)
else:
result[k] = value
return result
def _encodePathForEnv(s):
# under python 2.7, be sure that paths being exported as environment
# variables are encoded (as utf-8 byte strings), not unicode objects:
import sys
if sys.version_info[0] == 2:
assert(isinstance(s, type(u'')) or isinstance(s, type(b'')))
if not isinstance(s, type(b'')):
s = s.encode('utf-8')
return s
# API
def loadAdditionalConfig(config_path):
''' returns (error, config)
'''
error = None
config = {}
if not config_path:
return (error, config)
if os.path.isfile(config_path):
try:
config = ordered_json.load(config_path)
except Exception as e:
error = "Invalid syntax in file %s: %s" % (config_path, e)
else:
# try to interpret the argument as literal JSON
try:
config = ordered_json.loads(config_path)
except Exception as e:
# if this fails too, guess whether it was intended to be JSON or
# not, and display an appropriate error message
if '{' in config_path or '}' in config_path:
error = "Invalid syntax in literal JSON: %s" % e
else:
error = "File \"%s\" does not exist" % config_path
logger.debug('read additional config: %s', config)
return (error, config)
def getDerivedTarget(
target_name_and_version,
targets_path,
application_dir = None,
install_missing = True,
update_installed = False,
additional_config = None,
shrinkwrap = None
):
# access, , get components, internal
from yotta.lib import access
from yotta.lib import access_common
''' Get the specified target description, optionally ensuring that it (and
all dependencies) are installed in targets_path.
Returns (DerivedTarget, errors), or (None, errors) if the leaf target
could not be found/installed.
'''
logger.debug('satisfy target: %s' % target_name_and_version);
if ',' in target_name_and_version:
name, version_req = target_name_and_version.split(',')
else:
name = target_name_and_version
version_req = '*'
# shrinkwrap is the raw json form, not mapping form here, so rearrange it
# before indexing:
if shrinkwrap is not None:
shrinkwrap_version_req = {
x['name']: x['version'] for x in shrinkwrap.get('targets', [])
}.get(name, None)
else:
shrinkwrap_version_req = None
if shrinkwrap_version_req is not None:
logger.debug(
'respecting shrinkwrap version %s for %s', shrinkwrap_version_req, name
)
dspec = pack.DependencySpec(
name,
version_req,
shrinkwrap_version_req = shrinkwrap_version_req
)
leaf_target = None
previous_name = dspec.name
search_dirs = [targets_path]
target_hierarchy = []
errors = []
while True:
t = None
try:
if install_missing:
t = access.satisfyVersion(
name = dspec.name,
version_required = dspec.versionReq(),
available = target_hierarchy,
search_paths = search_dirs,
working_directory = targets_path,
update_installed = ('Update' if update_installed else None),
type = 'target',
inherit_shrinkwrap = shrinkwrap
)
else:
t = access.satisfyVersionFromSearchPaths(
name = dspec.name,
version_required = dspec.versionReq(),
search_paths = search_dirs,
type = 'target',
inherit_shrinkwrap = shrinkwrap
)
except access_common.AccessException as e:
errors.append(e)
if not t:
if install_missing:
logger.error(
'could not install target %s for %s' %
(dspec, previous_name)
)
break
else:
target_hierarchy.append(t)
previous_name = dspec.name
assert(isinstance(t, Target))
dspec = t.baseTargetSpec() #pylint: disable=no-member
if not leaf_target:
leaf_target = t
if dspec is None:
break
if leaf_target is None:
return (None, errors)
# if we have a valid target, try to load the app-specific config data (if
# any):
app_config = {}
if application_dir is not None:
app_config_fname = os.path.join(application_dir, App_Config_File)
if os.path.exists(app_config_fname):
try:
app_config = ordered_json.load(app_config_fname)
except Exception as e:
errors.append(Exception("Invalid application config.json: %s" % (e)))
return (DerivedTarget(leaf_target, target_hierarchy[1:], app_config, additional_config), errors)
class Target(pack.Pack):
def __init__(
self,
path,
installed_linked = False,
latest_suitable_version = None,
inherit_shrinkwrap = None
):
''' Initialise a Target based on a directory. If the directory does not
contain a valid target.json file the initialised object will test
false, and will contain an error property containing the failure.
'''
# re-initialise with the information from the most-derived target
super(Target, self).__init__(
path,
description_filename = Target_Description_File,
installed_linked = installed_linked,
schema_filename = Schema_File,
latest_suitable_version = latest_suitable_version,
inherit_shrinkwrap = inherit_shrinkwrap
)
if self.description and inherit_shrinkwrap is not None:
# when inheriting a shrinkwrap, check that this module is
# listed in the shrinkwrap, otherwise emit a warning:
if next((x for x in inherit_shrinkwrap.get('targets', []) if x['name'] == self.getName()), None) is None:
logger.warning("%s missing from shrinkwrap", self.getName())
def baseTargetSpec(self):
''' returns pack.DependencySpec for the base target of this target (or
None if this target does not inherit from another target.
'''
inherits = self.description.get('inherits', {})
if len(inherits) == 1:
name, version_req = list(inherits.items())[0]
shrinkwrap_version_req = self.getShrinkwrapMapping('targets').get(name, None)
if shrinkwrap_version_req is not None:
logger.debug(
'respecting shrinkwrap version %s for %s', shrinkwrap_version_req, name
)
return pack.DependencySpec(
name,
version_req,
shrinkwrap_version_req = shrinkwrap_version_req
)
elif len(inherits) > 1:
logger.error('target %s specifies multiple base targets, but only one is allowed', self.getName())
return None
def getRegistryNamespace(self):
return Registry_Namespace
def getConfig(self):
return self.description.get('config', OrderedDict())
class DerivedTarget(Target):
def __init__(self, leaf_target, base_targets, app_config, additional_config):
''' Initialise a DerivedTarget (representing an inheritance hierarchy of
Targets.), given the most-derived Target description, and a set of
available Targets to compose the rest of the lineage from.
DerivedTarget provides build & debug commands, and access to the
derived target config info (merged with the application config
info from config.json, if any).
It's possible to update the application config for an existing
DerivedTarget instance.
DerivedTarget can also be used as a stand-in for the most-derived
(leaf) target in the inheritance hierarchy.
'''
# initialise the base class as a copy of leaf_target
super(DerivedTarget, self).__init__(
path = leaf_target.path,
installed_linked = leaf_target.installed_linked,
latest_suitable_version = leaf_target.latest_suitable_version
)
self.hierarchy = [leaf_target] + base_targets[:]
self.config = None
self.config_blame = None
self.app_config = app_config
self.additional_config = additional_config or {}
# override truthiness to test validity of the entire hierarchy:
def __nonzero__(self):
for t in self.hierarchy:
if not t: return False
return bool(len(self.hierarchy))
def __bool__(self):
return self.__nonzero__()
def getScript(self, scriptname):
''' return the specified script if one exists (possibly inherited from
a base target)
'''
for t in self.hierarchy:
s = t.getScript(scriptname)
if s:
return s
return None
def _loadConfig(self):
''' load the configuration information from the target hierarchy '''
config_dicts = [self.additional_config, self.app_config] + [t.getConfig() for t in self.hierarchy]
# create an identical set of dictionaries, but with the names of the
# sources in place of the values. When these are merged they will show
# where each merged property came from:
config_blame = [
_mirrorStructure(self.additional_config, 'command-line config'),
_mirrorStructure(self.app_config, 'application\'s config.json'),
] + [
_mirrorStructure(t.getConfig(), t.getName()) for t in self.hierarchy
]
self.config = _mergeDictionaries(*config_dicts)
self.config_blame = _mergeDictionaries(*config_blame)
# note that backwards compatibility with the "similarTo" data that used
# to be used for target-dependencies is ensured at the point of use. We
# don't merge similarTo into the config because it might break things
# in the config (clobber objects with scalar values, for example)
def _ensureConfig(self):
if self.config is None:
self._loadConfig()
def getConfigValue(self, conf_key):
self._ensureConfig()
# jsonpointer, pip install jsonpointer, BSD 3 Clause
import jsonpointer
try:
return jsonpointer.resolve_pointer(self.config, conf_key)
except jsonpointer.JsonPointerException as e:
# fall back to legacy dot-separated pointers
key_path = conf_key.split('.');
c = self.config
for part in key_path:
if part in c:
c = c[part]
else:
return None
return c
def getSimilarTo_Deprecated(self):
r = []
for t in self.hierarchy:
r.append(t.getName())
r += t.description.get('similarTo', [])
return r
def getMergedConfig(self):
self._ensureConfig()
return self.config
def getConfigBlame(self):
self._ensureConfig()
return self.config_blame
def getToolchainFiles(self):
''' return a list of toolchain file paths in override order (starting
at the bottom/leaf of the hierarchy and ending at the base).
The list is returned in the order they should be included
(most-derived last).
'''
return reversed([
os.path.join(x.path, x.description['toolchain']) for x in self.hierarchy if 'toolchain' in x.description
])
def getAdditionalIncludes(self):
''' Return the list of cmake files which are to be included by yotta in
every module built. The list is returned in the order they should
be included (most-derived last).
'''
return reversed([
os.path.join(t.path, include_file)
for t in self.hierarchy
for include_file in t.description.get('cmakeIncludes', [])
])
def inheritsFrom(self, target_name):
''' Return true if this target inherits from the named target (directly
or indirectly. Also returns true if this target is the named
target. Otherwise return false.
'''
for t in self.hierarchy:
if t and t.getName() == target_name or target_name in t.description.get('inherits', {}):
return True
return False
@classmethod
def addBuildOptions(cls, parser):
parser.add_argument('-G', '--cmake-generator', dest='cmake_generator',
default='Ninja',
help='CMake generator to use (defaults to Ninja). You can use this '+
'to generate IDE project files instead, see cmake --help for '+
'possible generator names. Note that only Ninja or Unix Makefile '+
'based generators will work correctly with yotta.',
metavar='CMAKE_GENERATOR',
type=str
)
@classmethod
def _findNinja(cls):
# sometimes ninja is called ninja-build
for name in ('ninja', 'ninja-build'):
if fsutils.which(name) is not None:
return name
# default to ninja:
return 'ninja'
@classmethod
def overrideBuildCommand(cls, generator_name, targets=None):
if targets is None:
targets = []
# when we build using cmake --build, the nice colourised output is lost
# - so override with the actual build command for command-line
# generators where people will care:
try:
r = {
'Unix Makefiles': ['make'],
'Ninja': [cls._findNinja()]
}[generator_name]
# all of the above build programs take the build targets (e.g.
# "all") as the last arguments
if targets is not None:
r += targets
return r
except KeyError:
return None
def hintForCMakeGenerator(self, generator_name, component):
if generator_name in ('Ninja', 'Unix Makefiles'):
return None
try:
name = self.getName()
component_name = component.getName()
return {
'Xcode':
'a project file has been generated at ./build/%s/%s.xcodeproj' % (name, component_name),
'Sublime Text 2 - Ninja':
'a project file has been generated at ./build/%s/%s.sublime-project' % (name, component_name),
'Sublime Text 2 - Unix Makefiles':
'a project file has been generated at ./build/%s/%s.sublime-project' % (name, component_name),
'Eclipse CDT4 - Ninja':
'a project file has been generated at ./build/%s/.project' % name,
'Eclipse CDT4 - Unix Makefiles':
'a project file has been generated at ./build/%s/.project' % name
}[generator_name]
except KeyError:
return 'project files for %s have been generated in ./build/%s' % (component_name, name)
def exec_helper(self, cmd, builddir):
''' Execute the given command, returning an error message if an error occured
or None if the command was succesful.'''
try:
child = subprocess.Popen(cmd, cwd=builddir)
child.wait()
except OSError as e:
if e.errno == errno.ENOENT:
if cmd[0] == 'cmake':
return 'CMake is not installed, please follow the installation instructions at http://docs.yottabuild.org/#installing'
else:
return '%s is not installed' % (cmd[0])
else:
return 'command %s failed' % (cmd)
if child.returncode:
return 'command %s failed' % (cmd)
@fsutils.dropRootPrivs
def build(self, builddir, component, args, release_build=False, build_args=None, targets=None,
release_no_debug_info_build=False):
''' Execute the commands necessary to build this component, and all of
its dependencies. '''
if build_args is None:
build_args = []
if targets is None:
targets = []
# in the future this may be specified in the target description, but
# for now we only support cmake, so everything is simple:
if release_no_debug_info_build:
build_type = 'Release'
elif release_build:
build_type = 'RelWithDebInfo'
else:
build_type = 'Debug'
cmd = ['cmake', '-D', 'CMAKE_BUILD_TYPE=%s' % build_type, '-G', args.cmake_generator, '.']
res = self.exec_helper(cmd, builddir)
if res is not None:
return res
# work-around various yotta-specific issues with the generated
# Ninja/project files:
from yotta.lib import cmake_fixups
cmake_fixups.applyFixupsForFenerator(args.cmake_generator, builddir, component)
build_command = self.overrideBuildCommand(args.cmake_generator, targets=targets)
if build_command:
cmd = build_command + build_args
else:
cmd = ['cmake', '--build', builddir]
if len(targets):
# !!! FIXME: support multiple targets with the default CMake
# build command
cmd += ['--target', targets[0]]
cmd += build_args
res = self.exec_helper(cmd, builddir)
if res is not None:
return res
hint = self.hintForCMakeGenerator(args.cmake_generator, component)
if hint:
logger.info(hint)
def findProgram(self, builddir, program):
''' Return the builddir-relative path of program, if only a partial
path is specified. Returns None and logs an error message if the
program is ambiguous or not found
'''
# if this is an exact match, do no further checking:
if os.path.isfile(os.path.join(builddir, program)):
logging.info('found %s' % program)
return program
exact_matches = []
insensitive_matches = []
approx_matches = []
for path, dirs, files in os.walk(builddir):
if program in files:
exact_matches.append(os.path.relpath(os.path.join(path, program), builddir))
continue
files_lower = [f.lower() for f in files]
if program.lower() in files_lower:
insensitive_matches.append(
os.path.relpath(
os.path.join(path, files[files_lower.index(program.lower())]),
builddir
)
)
continue
# !!! TODO: in the future add approximate string matching (typos,
# etc.), for now we just test stripping any paths off program, and
# looking for substring matches:
pg_basen_lower_noext = os.path.splitext(os.path.basename(program).lower())[0]
for f in files_lower:
if pg_basen_lower_noext in f:
approx_matches.append(
os.path.relpath(
os.path.join(path, files[files_lower.index(f)]),
builddir
)
)
if len(exact_matches) == 1:
logging.info('found %s at %s', program, exact_matches[0])
return exact_matches[0]
elif len(exact_matches) > 1:
logging.error(
'%s matches multiple executables, please use a full path (one of %s)' % (
program,
', or '.join(['"'+os.path.join(m, program)+'"' for m in exact_matches])
)
)
return None
# if we have matches with and without a file extension, prefer the
# no-file extension version, and discard the others (so we avoid
# picking up post-processed files):
reduced_approx_matches = []
for m in approx_matches:
root = os.path.splitext(m)[0]
if (m == root) or (root not in approx_matches):
reduced_approx_matches.append(m)
approx_matches = reduced_approx_matches
for matches in (insensitive_matches, approx_matches):
if len(matches) == 1:
logging.info('found %s at %s' % (
program, matches[0]
))
return matches[0]
elif len(matches) > 1:
logging.error(
'%s is similar to several executables found. Please use an exact name:\n%s' % (
program,
'\n'.join(matches)
)
)
return None
logging.error('could not find program "%s" to debug' % program)
return None
def buildProgEnvAndVars(self, program, build_dir):
prog_env = os.environ.copy()
prog_env['YOTTA_PROGRAM'] = _encodePathForEnv(program)
prog_env['YOTTA_BUILD_DIR'] = _encodePathForEnv(build_dir)
prog_env['YOTTA_TARGET_DIR'] = _encodePathForEnv(self.path)
prog_vars = dict(program=program,
build_dir=build_dir,
target_dir=self.path)
return (prog_env, prog_vars)
@fsutils.dropRootPrivs
def start(self, builddir, program, forward_args):
''' Launch the specified program. Uses the `start` script if specified
by the target, attempts to run it natively if that script is not
defined.
'''
child = None
try:
prog_path = self.findProgram(builddir, program)
if prog_path is None:
return
start_env, start_vars = self.buildProgEnvAndVars(prog_path, builddir)
if self.getScript('start'):
cmd = [
os.path.expandvars(string.Template(x).safe_substitute(**start_vars))
for x in self.getScript('start')
] + forward_args
else:
cmd = shlex.split('./' + prog_path) + forward_args
logger.debug('starting program: %s', cmd)
child = subprocess.Popen(
cmd, cwd = builddir, env = start_env
)
child.wait()
if child.returncode:
return "process exited with status %s" % child.returncode
child = None
except OSError as e:
import errno
if e.errno == errno.ENOEXEC:
return ("the program %s cannot be run (perhaps your target "+
"needs to define a 'start' script to start it on its "
"intended execution target?)") % prog_path
finally:
if child is not None:
_tryTerminate(child)
def debug(self, builddir, program):
''' Launch a debugger for the specified program. Uses the `debug`
script if specified by the target, falls back to the `debug` and
`debugServer` commands if not. `program` is inserted into the
$program variable in commands.
'''
try:
signal.signal(signal.SIGINT, _ignoreSignal);
if self.getScript('debug') is not None:
return self._debugWithScript(builddir, program)
elif 'debug' in self.description:
logger.warning(
'target %s provides deprecated debug property. It should '+
'provide script.debug instead.', self.getName()
)
return self._debugDeprecated(builddir, program)
else:
return "Target %s does not specify debug commands" % self
finally:
# clear the sigint handler
signal.signal(signal.SIGINT, signal.SIG_DFL);
@fsutils.dropRootPrivs
def _debugWithScript(self, builddir, program):
child = None
try:
prog_path = self.findProgram(builddir, program)
if prog_path is None:
return
debug_env, debug_vars = self.buildProgEnvAndVars(prog_path, builddir)
cmd = [
os.path.expandvars(string.Template(x).safe_substitute(**debug_vars))
for x in self.getScript('debug')
]
logger.debug('starting debugger: %s', cmd)
child = subprocess.Popen(
cmd, cwd = builddir, env = debug_env
)
child.wait()
if child.returncode:
return "debug process exited with status %s" % child.returncode
child = None
except:
# reset the terminal, in case the debugger has screwed it up
os.system('reset')
raise
finally:
if child is not None:
_tryTerminate(child)
@fsutils.dropRootPrivs
def _debugDeprecated(self, builddir, program):
prog_path = self.findProgram(builddir, program)
if prog_path is None:
return
with open(os.devnull, "w") as dev_null:
daemon = None
child = None
try:
# debug-server is the old name, debugServer is the new name
debug_server_prop = 'debugServer'
if not debug_server_prop in self.description:
debug_server_prop = 'debug-server'
if debug_server_prop in self.description:
logger.debug('starting debug server...')
daemon = subprocess.Popen(
self.description[debug_server_prop],
cwd = builddir,
stdout = dev_null,
stderr = dev_null,
preexec_fn = _newPGroup
)
else:
daemon = None
cmd = [
os.path.expandvars(string.Template(x).safe_substitute(program=prog_path))
for x in self.description['debug']
]
logger.debug('starting debugger: %s', cmd)
child = subprocess.Popen(
cmd, cwd = builddir
)
child.wait()
if child.returncode:
return "debug process executed with status %s" % child.returncode
child = None
except:
# reset the terminal, in case the debugger has screwed it up
os.system('reset')
raise
finally:
if child is not None:
try:
child.terminate()
except OSError as e:
pass
if daemon is not None:
logger.debug('shutting down debug server...')
try:
daemon.terminate()
except OSError as e:
pass
@fsutils.dropRootPrivs
def test(self, test_dir, module_dir, test_command, filter_command, forward_args):
# we assume that test commands are relative to the current directory
# (filter commands are relative to the module dir to make it possible
# to use filter scripts shipped with the module)
test_command = './' + test_command
test_script = self.getScript('test')
test_env, test_vars = self.buildProgEnvAndVars(os.path.abspath(os.path.join(test_dir, test_command)), test_dir)
if test_script is None:
cmd = shlex.split(test_command) + forward_args
else:
cmd = [
os.path.expandvars(string.Template(x).safe_substitute(**test_vars))
for x in test_script
] + forward_args
# if the command is a python script, run it with the python interpreter
# being used to run yotta:
if test_command[0].lower().endswith('.py'):
import sys
python_interpreter = sys.executable
cmd = [python_interpreter] + cmd
if filter_command and filter_command[0].lower().endswith('.py'):
import sys
python_interpreter = sys.executable
filter_command = [python_interpreter] + filter_command
test_child = None
test_filter = None
try:
logger.debug('running test: %s', cmd)
if filter_command:
logger.debug('using output filter command: %s', filter_command)
test_child = subprocess.Popen(
cmd, cwd = test_dir, stdout = subprocess.PIPE, env = test_env
)
try:
test_filter = subprocess.Popen(
filter_command, cwd = module_dir, stdin = test_child.stdout, env = test_env
)
except OSError as e:
logger.error('error starting test output filter "%s": %s', filter_command, e)
_tryTerminate(test_child)
return 1
logger.debug('waiting for filter process')
test_filter.communicate()
if test_child.poll() is None:
logger.warning('test child has not exited and will be terminated')
_tryTerminate(test_child)
test_child.stdout.close()
returncode = test_filter.returncode
test_child = None
test_filter = None
if returncode:
logger.debug("test filter exited with status %s (=fail)", returncode)
return 1
else:
try:
test_child = subprocess.Popen(
cmd, cwd = test_dir, env = test_env
)
logger.debug('waiting for test child')
except OSError as e:
if e.errno == errno.ENOENT:
logger.error('Error: no such file or directory: "%s"', cmd[0])
return 1
raise
test_child.wait()
returncode = test_child.returncode
test_child = None
if returncode:
logger.debug("test process exited with status %s (=fail)", returncode)
return 1
finally:
if test_child is not None:
_tryTerminate(test_child)
if test_filter is not None:
_tryTerminate(test_filter)
logger.debug("test %s passed", test_command)
return 0
|
{
"content_hash": "333f3c95a5bc06245189d65499a537cb",
"timestamp": "",
"source": "github",
"line_count": 831,
"max_line_length": 138,
"avg_line_length": 40.25150421179302,
"alnum_prop": 0.5523632993512512,
"repo_name": "ARMmbed/yotta",
"id": "7a6b8249803ef8048126d466d0a3ca7ea65cb7be",
"size": "33599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yotta/lib/target.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "285"
},
{
"name": "Python",
"bytes": "548421"
},
{
"name": "Shell",
"bytes": "4958"
}
],
"symlink_target": ""
}
|
from typing import Any, Dict, Optional
from django.db.models import Q
from django.utils.timezone import now as timezone_now
from zerver.models import UserStatus
def get_user_info_dict(realm_id: int) -> Dict[int, Dict[str, Any]]:
rows = UserStatus.objects.filter(
user_profile__realm_id=realm_id,
user_profile__is_active=True,
).exclude(
Q(status=UserStatus.NORMAL) &
Q(status_text=''),
).values(
'user_profile_id',
'status',
'status_text',
)
user_dict: Dict[int, Dict[str, Any]] = dict()
for row in rows:
away = row['status'] == UserStatus.AWAY
status_text = row['status_text']
user_id = row['user_profile_id']
dct = dict()
if away:
dct['away'] = away
if status_text:
dct['status_text'] = status_text
user_dict[user_id] = dct
return user_dict
def update_user_status(user_profile_id: int,
status: Optional[int],
status_text: Optional[str],
client_id: int) -> None:
timestamp = timezone_now()
defaults = dict(
client_id=client_id,
timestamp=timestamp,
)
if status is not None:
defaults['status'] = status
if status_text is not None:
defaults['status_text'] = status_text
UserStatus.objects.update_or_create(
user_profile_id=user_profile_id,
defaults=defaults,
)
|
{
"content_hash": "61d945b458113e8b1bb322d4d881a1af",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 67,
"avg_line_length": 25.135593220338983,
"alnum_prop": 0.5697909642616318,
"repo_name": "synicalsyntax/zulip",
"id": "da4aa90f39f5b958434d98d3b21e776de13c87d9",
"size": "1483",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "zerver/lib/user_status.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "399944"
},
{
"name": "Dockerfile",
"bytes": "2939"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "746831"
},
{
"name": "JavaScript",
"bytes": "3096246"
},
{
"name": "Perl",
"bytes": "398763"
},
{
"name": "Puppet",
"bytes": "71057"
},
{
"name": "Python",
"bytes": "6904144"
},
{
"name": "Ruby",
"bytes": "6110"
},
{
"name": "Shell",
"bytes": "119974"
},
{
"name": "TypeScript",
"bytes": "14601"
}
],
"symlink_target": ""
}
|
from pymongo import MongoClient
class Mongo():
def __init__(self):
self.client = MongoClient()
self.db = self.client.test
def populate(self):
self.db.things.remove()
things = [
{"name": "Vishnu"},
{"name": "Lakshmi"},
{"name": "Ganesha"},
{"name": "Krishna"},
{"name": "Murugan"}
]
self.db.things.insert(things)
def count(self):
return self.db.things.count()
if __name__ == "__main__":
mongo = Mongo()
mongo.populate()
print(mongo.count())
|
{
"content_hash": "ef6bde80adf66a4aece3f5b5d32e18f1",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 37,
"avg_line_length": 21.703703703703702,
"alnum_prop": 0.4948805460750853,
"repo_name": "Varsha-Arun/sample_python_mongodb",
"id": "86f4cdab69d458d82a2df164cf5e99b954c139e5",
"size": "586",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mongo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "871"
}
],
"symlink_target": ""
}
|
VAGRANT_IP = '192.168.66.77'
EC2_IP = '54.179.183.245'
# Simple Tasks
def hello():
print 'Hello ThaiPy!'
def hi(name='Kan'):
print 'Hi ' + name
# Local Commands
from fabric.api import local, lcd
def deploy_fizzbuzz():
with lcd('fizzbuzz'):
local('python fizzbuzz_test.py')
local('git add fizzbuzz.py fizzbuzz_test.py')
local('git commit')
local('git push origin master')
# Remote Commands
from fabric.api import cd, env, run
env.hosts = [
'vagrant@' + VAGRANT_IP + ':22',
]
env.passwords = {
'vagrant@' + VAGRANT_IP + ':22': 'vagrant'
}
def create_empty_file(name='test'):
env.forward_agent = True
run('touch ' + name)
run('ls -al')
# ssh-add ~/.ssh/thaipy-demo.pem since accessing EC2 requires a key pair
def my_ec2():
env.hosts = [
'ubuntu@' + EC2_IP + ':22',
]
def deploy_page():
run('rm -rf fabric-workshop')
run('git clone https://github.com/zkan/fabric-workshop.git')
run('sudo cp fabric-workshop/index.html /usr/share/nginx/html')
run('sudo service nginx restart')
# Parallel Execution
from fabric.api import parallel
def my_servers():
env.hosts = [
'vagrant@' + VAGRANT_IP + ':22',
'ubuntu@' + EC2_IP + ':22',
]
@parallel
def run_in_parallel():
run('whoami')
|
{
"content_hash": "4eacc892a560a96a8426d690e2587e67",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 72,
"avg_line_length": 17.586666666666666,
"alnum_prop": 0.6050037907505686,
"repo_name": "zkan/fabric-workshop",
"id": "5998b07176575ddc8b34361d9543a78bf7c488b0",
"size": "1319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabfile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "35"
},
{
"name": "Python",
"bytes": "1370"
}
],
"symlink_target": ""
}
|
'''
Created on 2015-11-18
@author: dell
'''
import sys
import ConfigParser
reload(sys)
sys.setdefaultencoding("utf8")
import matlab.engine
import jieba
jieba.initialize()
from TimeRecognition import *
regexFilePath = '/home/shin/DeepLearning/MemoryNetwork/QA/Interface/date.regex'
TimeR = TimeRecognition(regexFilePath)
# different working space console=False / eclipse=True
workSpace_working = False
# get Config
cf = ConfigParser.ConfigParser()
cf.read("/home/shin/DeepLearning/MemoryNetwork/QA/Interface/FieldDialog.conf")
MatlabPath = cf.get('PathConfig', 'MatlabPath')
StoryPath = cf.get('PathConfig', 'StoryPath')
ResultPath = cf.get('PathConfig', 'ResultPath')
ErrorPath = cf.get('PathConfig', 'ErrorPath')
Field = cf.get('OtherConfig', 'Field')
CurrentSlotString = cf.get('PropertyConfig', '%sSlot' % (Field))
MachineName = cf.get('OtherConfig', 'MachineName')
UserName = cf.get('OtherConfig', 'UserName')
ErrorPath = '%s_%s' % (ErrorPath, Field)
# get FieldSlot
Slot = []
for value in CurrentSlotString.split(','):
Slot.append(value)
CurrentSlot = set(Slot)
# read regex rules
regexFilePath = '/home/shin/DeepLearning/MemoryNetwork/QA/Interface/date.regex'
regexFid = open(regexFilePath, 'r')
regexRules = {}
for line in regexFid.readlines():
[rules, tag] = line.strip().split('\t')
regexRules[rules] = tag
# start Matlab Engine
eng = matlab.engine.start_matlab()
eng.eval('cd %s;' % (MatlabPath), nargout=0)
# read machine request
requestTemplatePath = '/home/shin/DeepLearning/MemoryNetwork/QA/Interface/OnlineTest/RequestTemplate/%sRequestTemplate.txt' % (Field)
def readTemplate(requestTemplatePath):
requestFid = open(requestTemplatePath, 'r')
lines = requestFid.readlines()
queryDict = {}
for line in lines:
[tag, query] = line.strip().split('=')
queryDict[tag] = query
return queryDict
queryDict = readTemplate(requestTemplatePath)
def getQueryType(TicketSlot, SlotValueDict):
EmptySlot = TicketSlot - set(SlotValueDict.keys())
return 0 if len(EmptySlot) == 0 else list(EmptySlot)
def saveStory(StoryPath, storyList, TicketSlot, UserFirstResponse, writeMode='w'):
StoryFid = open(StoryPath, '%s' % (writeMode))
for i, sentence in enumerate(storyList):
# print sentence
StoryFid.write('%d %s\n' % (i + 1, sentence))
queryTypes = getQueryType(TicketSlot, SlotValueDict)
if UserFirstResponse:
for queryType in queryTypes:
StoryFid.write("%d %s\t%s\t%d\n" % (len(storyList) + 1, queryType + ' ?', 'nil', len(storyList)))
UserFirstResponse = False
else:
StoryFid.write("%d %s\t%s\t%d\n" % (len(storyList) + 1, queryTypes[0] + ' ?', 'nil', len(storyList)))
def copyStory(StoryPath, ErrorPath):
srcFid = open(StoryPath, 'r')
dstFid = open(ErrorPath, 'a')
for line in srcFid.readlines():
dstFid.write(line)
srcFid.close()
dstFid.close()
print 'Save Error Success!'
def AllSlotFilled(Slot, SlotDict):
EmptySlot = Slot - set(SlotDict.keys())
return 1 if len(EmptySlot) == 0 else 0
# start interface
DialogCounter = 1
while 1:
FinishFlag = 0
storyList = []
UserFirstResponse = True
SlotValueDict = {}
print('\n\n\n\n\n\n\n\n--------------------Start Dialog %d----------------------------------') % (DialogCounter)
DialogCounter += 1
greeting = "%s:%s" % (MachineName, queryDict['greeting'])
greeting = greeting if workSpace_working else greeting.decode('utf-8').encode('utf-8')
print '%s\n' % (greeting)
storyList.append(' '.join(jieba.cut(greeting[len(MachineName) + 1:])))
while FinishFlag == 0:
input_a = raw_input('%s: ' % (UserName))
input_a = str(input_a)
if input_a.lower() == 'save':
copyStory(StoryPath, ErrorPath)
else:
UserInput = ' '.join(jieba.cut(input_a))
UserInput = TimeR.process(UserInput)
storyList.append(UserInput)
# print storyList
saveStory(StoryPath, storyList, CurrentSlot, UserFirstResponse)
UserFirstResponse = False
# execute matlab
#eng.eval('OnlineTest_shin(\'%s\')' % (Field), nargout=0)
slot_all=eng.eval("OnlineTest_shin('hhh',6)")
if slot_all[0]!='nil':
SlotValueDict['count']=slot_all[0]
if slot_all[1]!='nil':
SlotValueDict['name']=slot_all[1]
if slot_all[2]!='nil':
SlotValueDict['destination']=slot_all[2]
if slot_all[3]!='nil':
SlotValueDict['departure']=slot_all[3]
if slot_all[4]!='nil':
SlotValueDict['idnumber']=slot_all[4]
if slot_all[0]!='nil':
SlotValueDict['time']=slot_all[5]
currentStatment = []
for k, v in SlotValueDict.iteritems():
currentStatment += '%s=%s,' % (k, v.decode('utf-8').encode('utf8'))
print (' SlotStatus---(%s)') % (''.join(currentStatment))
# judeg Ticket get all slot info
if AllSlotFilled(CurrentSlot, SlotValueDict):
FinishFlag = 1
FinishGreeting = ('%s' % (queryDict['end']))
FinishGreeting = FinishGreeting if workSpace_working else FinishGreeting.decode('utf-8').encode('gbk')
print '%s:%s\n' % (MachineName, FinishGreeting)
print 'Does the Response Right(y/n)'
input_a = str(raw_input(' '))
if input_a.lower() == 'n': copyStory(StoryPath, ErrorPath)
# new request for slot info
queryType = getQueryType(CurrentSlot, SlotValueDict)
if queryType > 0:
query = queryDict[queryType[0]].decode('utf-8').encode('utf8')
print '%s:%s\n' % (MachineName, query)
storyList.append('%s' % (' '.join(jieba.cut(query))))
'''
1 您好 , 很 高兴 为您服务
2 你好 , 小美女 。
3 请问 您 要 订 几张 机票
4 我要 八张 机票 。
5 请问 您 要 订 几张 机票
6 从 石家庄 到 昆明 的 机票 。
7 请问 您 要 订 几张 机票
8 我 的 名字 是 周杰伦 。
9 count ? nil 8
10 name ? nil 8
11 destination ? nil 8
12 departure ? nil 8
13 idnumber ? nil 8
14 time ? nil 8
'''
|
{
"content_hash": "73d276cf11707250e0a27956c71c4b60",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 133,
"avg_line_length": 31.876923076923077,
"alnum_prop": 0.6121299871299871,
"repo_name": "shincling/MemNN_and_Varieties",
"id": "e502866c9cbf6107ce5fccb64453579ed4a4f1f7",
"size": "6375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "QA/Interface/FieldDialog_shin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "24560"
},
{
"name": "Lua",
"bytes": "12448"
},
{
"name": "Matlab",
"bytes": "273273"
},
{
"name": "Python",
"bytes": "335673"
},
{
"name": "Shell",
"bytes": "593"
}
],
"symlink_target": ""
}
|
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class nsencryptionparams(base_resource) :
def __init__(self) :
self._method = ""
self._keyvalue = ""
@property
def method(self) :
ur"""Cipher method (and key length) to be used to encrypt and decrypt content. The default value is AES256.<br/>Possible values = NONE, RC4, DES3, AES128, AES192, AES256.
"""
try :
return self._method
except Exception as e:
raise e
@method.setter
def method(self, method) :
ur"""Cipher method (and key length) to be used to encrypt and decrypt content. The default value is AES256.<br/>Possible values = NONE, RC4, DES3, AES128, AES192, AES256
"""
try :
self._method = method
except Exception as e:
raise e
@property
def keyvalue(self) :
ur"""The base64-encoded key generation number, method, and key value.
Note:
* Do not include this argument if you are changing the encryption method.
* To generate a new key value for the current encryption method, specify an empty string \(""\) as the value of this parameter. The parameter is passed implicitly, with its automatically generated value, to the NetScaler packet engines even when it is not included in the command. Passing the parameter to the packet engines enables the appliance to save the key value to the configuration file and to propagate the key value to the secondary appliance in a high availability setup.
"""
try :
return self._keyvalue
except Exception as e:
raise e
@keyvalue.setter
def keyvalue(self, keyvalue) :
ur"""The base64-encoded key generation number, method, and key value.
Note:
* Do not include this argument if you are changing the encryption method.
* To generate a new key value for the current encryption method, specify an empty string \(""\) as the value of this parameter. The parameter is passed implicitly, with its automatically generated value, to the NetScaler packet engines even when it is not included in the command. Passing the parameter to the packet engines enables the appliance to save the key value to the configuration file and to propagate the key value to the secondary appliance in a high availability setup.
"""
try :
self._keyvalue = keyvalue
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(nsencryptionparams_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.nsencryptionparams
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
ur""" Use this API to update nsencryptionparams.
"""
try :
if type(resource) is not list :
updateresource = nsencryptionparams()
updateresource.method = resource.method
updateresource.keyvalue = resource.keyvalue
return updateresource.update_resource(client)
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the nsencryptionparams resources that are configured on netscaler.
"""
try :
if not name :
obj = nsencryptionparams()
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
class Method:
NONE = "NONE"
RC4 = "RC4"
DES3 = "DES3"
AES128 = "AES128"
AES192 = "AES192"
AES256 = "AES256"
class nsencryptionparams_response(base_response) :
def __init__(self, length=1) :
self.nsencryptionparams = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.nsencryptionparams = [nsencryptionparams() for _ in range(length)]
|
{
"content_hash": "aa2c8d23b58938ec260c92f876f46f5d",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 484,
"avg_line_length": 36.78225806451613,
"alnum_prop": 0.7261565446174084,
"repo_name": "benfinke/ns_python",
"id": "0f58240453e9875af723d5ca9608157ff272333c",
"size": "5175",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "build/lib/nssrc/com/citrix/netscaler/nitro/resource/config/ns/nsencryptionparams.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "21836782"
},
{
"name": "Shell",
"bytes": "513"
}
],
"symlink_target": ""
}
|
import unittest2
class TestIterator(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.iterator import Iterator
return Iterator
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor(self):
connection = _Connection()
PATH = '/foo'
iterator = self._makeOne(connection, PATH)
self.assertTrue(iterator.connection is connection)
self.assertEqual(iterator.path, PATH)
self.assertEqual(iterator.page_number, 0)
self.assertEqual(iterator.next_page_token, None)
def test___iter__(self):
PATH = '/foo'
KEY1 = 'key1'
KEY2 = 'key2'
ITEM1, ITEM2 = object(), object()
ITEMS = {KEY1: ITEM1, KEY2: ITEM2}
def _get_items(response):
for item in response.get('items', []):
yield ITEMS[item['name']]
connection = _Connection({'items': [{'name': KEY1}, {'name': KEY2}]})
iterator = self._makeOne(connection, PATH)
iterator.get_items_from_response = _get_items
self.assertEqual(list(iterator), [ITEM1, ITEM2])
kw, = connection._requested
self.assertEqual(kw['method'], 'GET')
self.assertEqual(kw['path'], PATH)
self.assertEqual(kw['query_params'], {})
def test_has_next_page_new(self):
connection = _Connection()
PATH = '/foo'
iterator = self._makeOne(connection, PATH)
self.assertTrue(iterator.has_next_page())
def test_has_next_page_w_number_no_token(self):
connection = _Connection()
PATH = '/foo'
iterator = self._makeOne(connection, PATH)
iterator.page_number = 1
self.assertFalse(iterator.has_next_page())
def test_has_next_page_w_number_w_token(self):
connection = _Connection()
PATH = '/foo'
TOKEN = 'token'
iterator = self._makeOne(connection, PATH)
iterator.page_number = 1
iterator.next_page_token = TOKEN
self.assertTrue(iterator.has_next_page())
def test_get_query_params_no_token(self):
connection = _Connection()
PATH = '/foo'
iterator = self._makeOne(connection, PATH)
self.assertEqual(iterator.get_query_params(), {})
def test_get_query_params_w_token(self):
connection = _Connection()
PATH = '/foo'
TOKEN = 'token'
iterator = self._makeOne(connection, PATH)
iterator.next_page_token = TOKEN
self.assertEqual(iterator.get_query_params(),
{'pageToken': TOKEN})
def test_get_query_params_extra_params(self):
connection = _Connection()
PATH = '/foo'
extra_params = {'key': 'val'}
iterator = self._makeOne(connection, PATH, extra_params=extra_params)
self.assertEqual(iterator.get_query_params(), extra_params)
def test_get_query_params_w_token_and_extra_params(self):
connection = _Connection()
PATH = '/foo'
TOKEN = 'token'
extra_params = {'key': 'val'}
iterator = self._makeOne(connection, PATH, extra_params=extra_params)
iterator.next_page_token = TOKEN
expected_query = extra_params.copy()
expected_query.update({'pageToken': TOKEN})
self.assertEqual(iterator.get_query_params(), expected_query)
def test_get_query_params_w_token_collision(self):
connection = _Connection()
PATH = '/foo'
extra_params = {'pageToken': 'val'}
self.assertRaises(ValueError, self._makeOne, connection, PATH,
extra_params=extra_params)
def test_get_next_page_response_new_no_token_in_response(self):
PATH = '/foo'
TOKEN = 'token'
KEY1 = 'key1'
KEY2 = 'key2'
connection = _Connection({'items': [{'name': KEY1}, {'name': KEY2}],
'nextPageToken': TOKEN})
iterator = self._makeOne(connection, PATH)
response = iterator.get_next_page_response()
self.assertEqual(response['items'], [{'name': KEY1}, {'name': KEY2}])
self.assertEqual(iterator.page_number, 1)
self.assertEqual(iterator.next_page_token, TOKEN)
kw, = connection._requested
self.assertEqual(kw['method'], 'GET')
self.assertEqual(kw['path'], PATH)
self.assertEqual(kw['query_params'], {})
def test_get_next_page_response_no_token(self):
connection = _Connection()
PATH = '/foo'
iterator = self._makeOne(connection, PATH)
iterator.page_number = 1
self.assertRaises(RuntimeError, iterator.get_next_page_response)
def test_reset(self):
connection = _Connection()
PATH = '/foo'
TOKEN = 'token'
iterator = self._makeOne(connection, PATH)
iterator.page_number = 1
iterator.next_page_token = TOKEN
iterator.reset()
self.assertEqual(iterator.page_number, 0)
self.assertEqual(iterator.next_page_token, None)
def test_get_items_from_response_raises_NotImplementedError(self):
PATH = '/foo'
connection = _Connection()
iterator = self._makeOne(connection, PATH)
self.assertRaises(NotImplementedError,
iterator.get_items_from_response, object())
class _Connection(object):
def __init__(self, *responses):
self._responses = responses
self._requested = []
def api_request(self, **kw):
self._requested.append(kw)
response, self._responses = self._responses[0], self._responses[1:]
return response
|
{
"content_hash": "c50639bcee59bb5659fd0bf8bf01d8ef",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 77,
"avg_line_length": 36.09615384615385,
"alnum_prop": 0.5963416799857929,
"repo_name": "GrimDerp/gcloud-python",
"id": "83be77266703dc510f2de45d574da53b6c9d6809",
"size": "6228",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gcloud/test_iterator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Protocol Buffer",
"bytes": "20396"
},
{
"name": "Python",
"bytes": "835658"
},
{
"name": "Shell",
"bytes": "9043"
}
],
"symlink_target": ""
}
|
from typing import (
Any,
List,
Optional,
Sequence,
Union,
cast,
TYPE_CHECKING,
Mapping,
)
import pyspark.sql.connect.proto as proto
from pyspark.sql.connect.column import (
Column,
Expression,
SortOrder,
)
if TYPE_CHECKING:
from pyspark.sql.connect._typing import ColumnOrName, ExpressionOrString
from pyspark.sql.connect.client import SparkConnectClient
class InputValidationError(Exception):
pass
class LogicalPlan(object):
INDENT = 2
def __init__(self, child: Optional["LogicalPlan"]) -> None:
self._child = child
def unresolved_attr(self, colName: str) -> proto.Expression:
"""Creates an unresolved attribute from a column name."""
exp = proto.Expression()
exp.unresolved_attribute.unparsed_identifier = colName
return exp
def to_attr_or_expression(
self, col: "ColumnOrName", session: "SparkConnectClient"
) -> proto.Expression:
"""Returns either an instance of an unresolved attribute or the serialized
expression value of the column."""
if type(col) is str:
return self.unresolved_attr(col)
else:
return cast(Column, col).to_plan(session)
def plan(self, session: "SparkConnectClient") -> proto.Relation:
...
def command(self, session: "SparkConnectClient") -> proto.Command:
...
def _verify(self, session: "SparkConnectClient") -> bool:
"""This method is used to verify that the current logical plan
can be serialized to Proto and back and afterwards is identical."""
plan = proto.Plan()
plan.root.CopyFrom(self.plan(session))
serialized_plan = plan.SerializeToString()
test_plan = proto.Plan()
test_plan.ParseFromString(serialized_plan)
return test_plan == plan
def to_proto(self, session: "SparkConnectClient", debug: bool = False) -> proto.Plan:
"""
Generates connect proto plan based on this LogicalPlan.
Parameters
----------
session : :class:`SparkConnectClient`, optional.
a session that connects remote spark cluster.
debug: bool
if enabled, the proto plan will be printed.
"""
plan = proto.Plan()
plan.root.CopyFrom(self.plan(session))
if debug:
print(plan)
return plan
def print(self, indent: int = 0) -> str:
...
def _repr_html_(self) -> str:
...
def _child_repr_(self) -> str:
return self._child._repr_html_() if self._child is not None else ""
class DataSource(LogicalPlan):
"""A datasource with a format and optional a schema from which Spark reads data"""
def __init__(
self,
format: str = "",
schema: Optional[str] = None,
options: Optional[Mapping[str, str]] = None,
) -> None:
super().__init__(None)
self.format = format
self.schema = schema
self.options = options
def plan(self, session: "SparkConnectClient") -> proto.Relation:
plan = proto.Relation()
if self.format is not None:
plan.read.data_source.format = self.format
if self.schema is not None:
plan.read.data_source.schema = self.schema
if self.options is not None:
for k in self.options.keys():
v = self.options.get(k)
if v is not None:
plan.read.data_source.options[k] = v
return plan
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>DataSource</b><br />
format: {self.format}
schema: {self.schema}
options: {self.options}
</li>
</ul>
"""
class Read(LogicalPlan):
def __init__(self, table_name: str) -> None:
super().__init__(None)
self.table_name = table_name
def plan(self, session: "SparkConnectClient") -> proto.Relation:
plan = proto.Relation()
plan.read.named_table.unparsed_identifier = self.table_name
return plan
def print(self, indent: int = 0) -> str:
return f"{' ' * indent}<Read table_name={self.table_name}>\n"
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Read</b><br />
table name: {self.table_name}
</li>
</ul>
"""
class ShowString(LogicalPlan):
def __init__(
self, child: Optional["LogicalPlan"], numRows: int, truncate: int, vertical: bool
) -> None:
super().__init__(child)
self.numRows = numRows
self.truncate = truncate
self.vertical = vertical
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.show_string.input.CopyFrom(self._child.plan(session))
plan.show_string.numRows = self.numRows
plan.show_string.truncate = self.truncate
plan.show_string.vertical = self.vertical
return plan
def print(self, indent: int = 0) -> str:
return (
f"{' ' * indent}"
f"<ShowString numRows='{self.numRows}', "
f"truncate='{self.truncate}', "
f"vertical='{self.vertical}'>"
)
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>ShowString</b><br />
NumRows: {self.numRows} <br />
Truncate: {self.truncate} <br />
Vertical: {self.vertical} <br />
{self._child_repr_()}
</li>
</ul>
"""
class Project(LogicalPlan):
"""Logical plan object for a projection.
All input arguments are directly serialized into the corresponding protocol buffer
objects. This class only provides very limited error handling and input validation.
To be compatible with PySpark, we validate that the input arguments are all
expressions to be able to serialize them to the server.
"""
def __init__(self, child: Optional["LogicalPlan"], *columns: "ExpressionOrString") -> None:
super().__init__(child)
self._raw_columns = list(columns)
self.alias: Optional[str] = None
self._verify_expressions()
def _verify_expressions(self) -> None:
"""Ensures that all input arguments are instances of Expression or String."""
for c in self._raw_columns:
if not isinstance(c, (Expression, str)):
raise InputValidationError(
f"Only Expressions or String can be used for projections: '{c}'."
)
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
proj_exprs = []
for c in self._raw_columns:
if isinstance(c, Expression):
proj_exprs.append(c.to_plan(session))
elif c == "*":
exp = proto.Expression()
exp.unresolved_star.SetInParent()
proj_exprs.append(exp)
else:
proj_exprs.append(self.unresolved_attr(c))
plan = proto.Relation()
plan.project.input.CopyFrom(self._child.plan(session))
plan.project.expressions.extend(proj_exprs)
return plan
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return f"{' ' * indent}<Project cols={self._raw_columns}>\n{c_buf}"
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Project</b><br />
Columns: {",".join([str(c) for c in self._raw_columns])}
{self._child._repr_html_() if self._child is not None else ""}
</li>
</uL>
"""
class Filter(LogicalPlan):
def __init__(self, child: Optional["LogicalPlan"], filter: Expression) -> None:
super().__init__(child)
self.filter = filter
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.filter.input.CopyFrom(self._child.plan(session))
plan.filter.condition.CopyFrom(self.filter.to_plan(session))
return plan
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return f"{' ' * indent}<Filter filter={self.filter}>\n{c_buf}"
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Filter</b><br />
Condition: {self.filter}
{self._child_repr_()}
</li>
</uL>
"""
class Limit(LogicalPlan):
def __init__(self, child: Optional["LogicalPlan"], limit: int) -> None:
super().__init__(child)
self.limit = limit
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.limit.input.CopyFrom(self._child.plan(session))
plan.limit.limit = self.limit
return plan
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return f"{' ' * indent}<Limit limit={self.limit}>\n{c_buf}"
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Limit</b><br />
Limit: {self.limit} <br />
{self._child_repr_()}
</li>
</uL>
"""
class Offset(LogicalPlan):
def __init__(self, child: Optional["LogicalPlan"], offset: int = 0) -> None:
super().__init__(child)
self.offset = offset
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.offset.input.CopyFrom(self._child.plan(session))
plan.offset.offset = self.offset
return plan
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return f"{' ' * indent}<Offset={self.offset}>\n{c_buf}"
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Limit</b><br />
Offset: {self.offset} <br />
{self._child_repr_()}
</li>
</uL>
"""
class Deduplicate(LogicalPlan):
def __init__(
self,
child: Optional["LogicalPlan"],
all_columns_as_keys: bool = False,
column_names: Optional[List[str]] = None,
) -> None:
super().__init__(child)
self.all_columns_as_keys = all_columns_as_keys
self.column_names = column_names
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.deduplicate.all_columns_as_keys = self.all_columns_as_keys
if self.column_names is not None:
plan.deduplicate.column_names.extend(self.column_names)
return plan
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return (
f"{' ' * indent}<all_columns_as_keys={self.all_columns_as_keys} "
f"column_names={self.column_names}>\n{c_buf}"
)
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b></b>Deduplicate<br />
all_columns_as_keys: {self.all_columns_as_keys} <br />
column_names: {self.column_names} <br />
{self._child_repr_()}
</li>
</uL>
"""
class Sort(LogicalPlan):
def __init__(
self,
child: Optional["LogicalPlan"],
columns: List[Union[SortOrder, Column, str]],
is_global: bool,
) -> None:
super().__init__(child)
self.columns = columns
self.is_global = is_global
def col_to_sort_field(
self, col: Union[SortOrder, Column, str], session: "SparkConnectClient"
) -> proto.Sort.SortField:
if isinstance(col, SortOrder):
sf = proto.Sort.SortField()
sf.expression.CopyFrom(col.ref.to_plan(session))
sf.direction = (
proto.Sort.SortDirection.SORT_DIRECTION_ASCENDING
if col.ascending
else proto.Sort.SortDirection.SORT_DIRECTION_DESCENDING
)
sf.nulls = (
proto.Sort.SortNulls.SORT_NULLS_FIRST
if not col.nullsLast
else proto.Sort.SortNulls.SORT_NULLS_LAST
)
return sf
else:
sf = proto.Sort.SortField()
# Check string
if isinstance(col, Column):
sf.expression.CopyFrom(col.to_plan(session))
else:
sf.expression.CopyFrom(self.unresolved_attr(col))
sf.direction = proto.Sort.SortDirection.SORT_DIRECTION_ASCENDING
sf.nulls = proto.Sort.SortNulls.SORT_NULLS_LAST
return sf
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.sort.input.CopyFrom(self._child.plan(session))
plan.sort.sort_fields.extend([self.col_to_sort_field(x, session) for x in self.columns])
plan.sort.is_global = self.is_global
return plan
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return f"{' ' * indent}<Sort columns={self.columns}, global={self.is_global}>\n{c_buf}"
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Sort</b><br />
{", ".join([str(c) for c in self.columns])}
global: {self.is_global} <br />
{self._child_repr_()}
</li>
</uL>
"""
class Drop(LogicalPlan):
def __init__(
self,
child: Optional["LogicalPlan"],
columns: List[Union[Column, str]],
) -> None:
super().__init__(child)
assert len(columns) > 0 and all(isinstance(c, (Column, str)) for c in columns)
self.columns = columns
def _convert_to_expr(
self, col: Union[Column, str], session: "SparkConnectClient"
) -> proto.Expression:
expr = proto.Expression()
if isinstance(col, Column):
expr.CopyFrom(col.to_plan(session))
else:
expr.CopyFrom(self.unresolved_attr(col))
return expr
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.drop.input.CopyFrom(self._child.plan(session))
plan.drop.cols.extend([self._convert_to_expr(c, session) for c in self.columns])
return plan
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return f"{' ' * indent}<Drop columns={self.columns}>\n{c_buf}"
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Drop</b><br />
columns: {self.columns} <br />
{self._child_repr_()}
</li>
</uL>
"""
class Sample(LogicalPlan):
def __init__(
self,
child: Optional["LogicalPlan"],
lower_bound: float,
upper_bound: float,
with_replacement: bool,
seed: Optional[int],
) -> None:
super().__init__(child)
self.lower_bound = lower_bound
self.upper_bound = upper_bound
self.with_replacement = with_replacement
self.seed = seed
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.sample.input.CopyFrom(self._child.plan(session))
plan.sample.lower_bound = self.lower_bound
plan.sample.upper_bound = self.upper_bound
plan.sample.with_replacement = self.with_replacement
if self.seed is not None:
plan.sample.seed = self.seed
return plan
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return (
f"{' ' * indent}"
f"<Sample lowerBound={self.lower_bound}, upperBound={self.upper_bound}, "
f"withReplacement={self.with_replacement}, seed={self.seed}>"
f"\n{c_buf}"
)
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Sample</b><br />
LowerBound: {self.lower_bound} <br />
UpperBound: {self.upper_bound} <br />
WithReplacement: {self.with_replacement} <br />
Seed: {self.seed} <br />
{self._child_repr_()}
</li>
</uL>
"""
class Aggregate(LogicalPlan):
def __init__(
self,
child: Optional["LogicalPlan"],
grouping_cols: List[Column],
measures: Sequence[Expression],
) -> None:
super().__init__(child)
self.grouping_cols = grouping_cols
self.measures = measures
def _convert_measure(self, m: Expression, session: "SparkConnectClient") -> proto.Expression:
proto_expr = proto.Expression()
proto_expr.CopyFrom(m.to_plan(session))
return proto_expr
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
groupings = [x.to_plan(session) for x in self.grouping_cols]
agg = proto.Relation()
agg.aggregate.input.CopyFrom(self._child.plan(session))
agg.aggregate.result_expressions.extend(
list(map(lambda x: self._convert_measure(x, session), self.measures))
)
agg.aggregate.grouping_expressions.extend(groupings)
return agg
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return (
f"{' ' * indent}<Sort columns={self.grouping_cols}"
f"measures={self.measures}>\n{c_buf}"
)
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Aggregation</b><br />
{self._child_repr_()}
</li>
</uL>
"""
class Join(LogicalPlan):
def __init__(
self,
left: Optional["LogicalPlan"],
right: "LogicalPlan",
on: Optional[Union[str, List[str], Column]],
how: Optional[str],
) -> None:
super().__init__(left)
self.left = cast(LogicalPlan, left)
self.right = right
self.on = on
if how is None:
join_type = proto.Join.JoinType.JOIN_TYPE_INNER
elif how == "inner":
join_type = proto.Join.JoinType.JOIN_TYPE_INNER
elif how in ["outer", "full", "fullouter"]:
join_type = proto.Join.JoinType.JOIN_TYPE_FULL_OUTER
elif how in ["leftouter", "left"]:
join_type = proto.Join.JoinType.JOIN_TYPE_LEFT_OUTER
elif how in ["rightouter", "right"]:
join_type = proto.Join.JoinType.JOIN_TYPE_RIGHT_OUTER
elif how in ["leftsemi", "semi"]:
join_type = proto.Join.JoinType.JOIN_TYPE_LEFT_SEMI
elif how in ["leftanti", "anti"]:
join_type = proto.Join.JoinType.JOIN_TYPE_LEFT_ANTI
else:
raise NotImplementedError(
"""
Unsupported join type: %s. Supported join types include:
"inner", "outer", "full", "fullouter", "full_outer",
"leftouter", "left", "left_outer", "rightouter",
"right", "right_outer", "leftsemi", "left_semi",
"semi", "leftanti", "left_anti", "anti",
"""
% how
)
self.how = join_type
def plan(self, session: "SparkConnectClient") -> proto.Relation:
rel = proto.Relation()
rel.join.left.CopyFrom(self.left.plan(session))
rel.join.right.CopyFrom(self.right.plan(session))
if self.on is not None:
if not isinstance(self.on, list):
if isinstance(self.on, str):
rel.join.using_columns.append(self.on)
else:
rel.join.join_condition.CopyFrom(self.to_attr_or_expression(self.on, session))
else:
rel.join.using_columns.extend(self.on)
rel.join.join_type = self.how
return rel
def print(self, indent: int = 0) -> str:
i = " " * indent
o = " " * (indent + LogicalPlan.INDENT)
n = indent + LogicalPlan.INDENT * 2
return (
f"{i}<Join on={self.on} how={self.how}>\n{o}"
f"left=\n{self.left.print(n)}\n{o}right=\n{self.right.print(n)}"
)
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Join</b><br />
Left: {self.left._repr_html_()}
Right: {self.right._repr_html_()}
</li>
</uL>
"""
class SetOperation(LogicalPlan):
def __init__(
self,
child: Optional["LogicalPlan"],
other: Optional["LogicalPlan"],
set_op: str,
is_all: bool = True,
by_name: bool = False,
) -> None:
super().__init__(child)
self.other = other
self.by_name = by_name
self.is_all = is_all
self.set_op = set_op
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
rel = proto.Relation()
if self._child is not None:
rel.set_op.left_input.CopyFrom(self._child.plan(session))
if self.other is not None:
rel.set_op.right_input.CopyFrom(self.other.plan(session))
if self.set_op == "union":
rel.set_op.set_op_type = proto.SetOperation.SET_OP_TYPE_UNION
elif self.set_op == "intersect":
rel.set_op.set_op_type = proto.SetOperation.SET_OP_TYPE_INTERSECT
elif self.set_op == "except":
rel.set_op.set_op_type = proto.SetOperation.SET_OP_TYPE_EXCEPT
else:
raise NotImplementedError(
"""
Unsupported set operation type: %s.
"""
% rel.set_op.set_op_type
)
rel.set_op.is_all = self.is_all
rel.set_op.by_name = self.by_name
return rel
def print(self, indent: int = 0) -> str:
assert self._child is not None
assert self.other is not None
i = " " * indent
o = " " * (indent + LogicalPlan.INDENT)
n = indent + LogicalPlan.INDENT * 2
return (
f"{i}SetOperation\n{o}child1=\n{self._child.print(n)}"
f"\n{o}child2=\n{self.other.print(n)}"
)
def _repr_html_(self) -> str:
assert self._child is not None
assert self.other is not None
return f"""
<ul>
<li>
<b>SetOperation</b><br />
Left: {self._child._repr_html_()}
Right: {self.other._repr_html_()}
</li>
</uL>
"""
class Repartition(LogicalPlan):
"""Repartition Relation into a different number of partitions."""
def __init__(self, child: Optional["LogicalPlan"], num_partitions: int, shuffle: bool) -> None:
super().__init__(child)
self._num_partitions = num_partitions
self._shuffle = shuffle
def plan(self, session: "SparkConnectClient") -> proto.Relation:
rel = proto.Relation()
if self._child is not None:
rel.repartition.input.CopyFrom(self._child.plan(session))
rel.repartition.shuffle = self._shuffle
rel.repartition.num_partitions = self._num_partitions
return rel
def print(self, indent: int = 0) -> str:
plan_name = "repartition" if self._shuffle else "coalesce"
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return f"{' ' * indent}<{plan_name} num_partitions={self._num_partitions}>\n{c_buf}"
def _repr_html_(self) -> str:
plan_name = "repartition" if self._shuffle else "coalesce"
return f"""
<ul>
<li>
<b>{plan_name}</b><br />
Child: {self._child_repr_()}
num_partitions: {self._num_partitions}
</li>
</ul>
"""
class SubqueryAlias(LogicalPlan):
"""Alias for a relation."""
def __init__(self, child: Optional["LogicalPlan"], alias: str) -> None:
super().__init__(child)
self._alias = alias
def plan(self, session: "SparkConnectClient") -> proto.Relation:
rel = proto.Relation()
if self._child is not None:
rel.subquery_alias.input.CopyFrom(self._child.plan(session))
rel.subquery_alias.alias = self._alias
return rel
def print(self, indent: int = 0) -> str:
c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child else ""
return f"{' ' * indent}<SubqueryAlias alias={self._alias}>\n{c_buf}"
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>SubqueryAlias</b><br />
Child: {self._child_repr_()}
Alias: {self._alias}
</li>
</ul>
"""
class SQL(LogicalPlan):
def __init__(self, query: str) -> None:
super().__init__(None)
self._query = query
def plan(self, session: "SparkConnectClient") -> proto.Relation:
rel = proto.Relation()
rel.sql.query = self._query
return rel
def print(self, indent: int = 0) -> str:
i = " " * indent
sub_query = self._query.replace("\n", "")[:50]
return f"""{i}<SQL query='{sub_query}...'>"""
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>SQL</b><br />
Statement: <pre>{self._query}</pre>
</li>
</ul>
"""
class Range(LogicalPlan):
def __init__(
self,
start: int,
end: int,
step: int,
num_partitions: Optional[int] = None,
) -> None:
super().__init__(None)
self._start = start
self._end = end
self._step = step
self._num_partitions = num_partitions
def plan(self, session: "SparkConnectClient") -> proto.Relation:
rel = proto.Relation()
rel.range.start = self._start
rel.range.end = self._end
rel.range.step = self._step
if self._num_partitions is not None:
rel.range.num_partitions = self._num_partitions
return rel
def print(self, indent: int = 0) -> str:
return (
f"{' ' * indent}"
f"<Range start={self._start}, end={self._end}, "
f"step={self._step}, num_partitions={self._num_partitions}>"
)
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Range</b><br />
Start: {self._start} <br />
End: {self._end} <br />
Step: {self._step} <br />
NumPartitions: {self._num_partitions} <br />
{self._child_repr_()}
</li>
</uL>
"""
class NAFill(LogicalPlan):
def __init__(
self, child: Optional["LogicalPlan"], cols: Optional[List[str]], values: List[Any]
) -> None:
super().__init__(child)
assert (
isinstance(values, list)
and len(values) > 0
and all(isinstance(v, (bool, int, float, str)) for v in values)
)
if cols is not None and len(cols) > 0:
assert isinstance(cols, list) and all(isinstance(c, str) for c in cols)
if len(values) > 1:
assert len(cols) == len(values)
self.cols = cols
self.values = values
def _convert_value(self, v: Any) -> proto.Expression.Literal:
value = proto.Expression.Literal()
if isinstance(v, bool):
value.boolean = v
elif isinstance(v, int):
value.i64 = v
elif isinstance(v, float):
value.fp64 = v
else:
value.string = v
return value
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.fill_na.input.CopyFrom(self._child.plan(session))
if self.cols is not None and len(self.cols) > 0:
plan.fill_na.cols.extend(self.cols)
plan.fill_na.values.extend([self._convert_value(v) for v in self.values])
return plan
def print(self, indent: int = 0) -> str:
return f"""{" " * indent}<NAFill cols='{self.cols}', values='{self.values}'>"""
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>NAFill</b><br />
Cols: {self.cols} <br />
Values: {self.values} <br />
{self._child_repr_()}
</li>
</ul>
"""
class StatSummary(LogicalPlan):
def __init__(self, child: Optional["LogicalPlan"], statistics: List[str]) -> None:
super().__init__(child)
self.statistics = statistics
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.summary.input.CopyFrom(self._child.plan(session))
plan.summary.statistics.extend(self.statistics)
return plan
def print(self, indent: int = 0) -> str:
i = " " * indent
return f"""{i}<Summary statistics='{self.statistics}'>"""
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Summary</b><br />
Statistics: {self.statistics} <br />
{self._child_repr_()}
</li>
</ul>
"""
class StatCrosstab(LogicalPlan):
def __init__(self, child: Optional["LogicalPlan"], col1: str, col2: str) -> None:
super().__init__(child)
self.col1 = col1
self.col2 = col2
def plan(self, session: "SparkConnectClient") -> proto.Relation:
assert self._child is not None
plan = proto.Relation()
plan.crosstab.input.CopyFrom(self._child.plan(session))
plan.crosstab.col1 = self.col1
plan.crosstab.col2 = self.col2
return plan
def print(self, indent: int = 0) -> str:
i = " " * indent
return f"""{i}<Crosstab col1='{self.col1}' col2='{self.col2}'>"""
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>Crosstab</b><br />
Col1: {self.col1} <br />
Col2: {self.col2} <br />
{self._child_repr_()}
</li>
</ul>
"""
class CreateView(LogicalPlan):
def __init__(
self, child: Optional["LogicalPlan"], name: str, is_global: bool, replace: bool
) -> None:
super().__init__(child)
self._name = name
self._is_gloal = is_global
self._replace = replace
def command(self, session: "SparkConnectClient") -> proto.Command:
assert self._child is not None
plan = proto.Command()
plan.create_dataframe_view.replace = self._replace
plan.create_dataframe_view.is_global = self._is_gloal
plan.create_dataframe_view.name = self._name
plan.create_dataframe_view.input.CopyFrom(self._child.plan(session))
return plan
def print(self, indent: int = 0) -> str:
i = " " * indent
return (
f"{i}"
f"<CreateView name='{self._name}' "
f"is_global='{self._is_gloal} "
f"replace='{self._replace}'>"
)
def _repr_html_(self) -> str:
return f"""
<ul>
<li>
<b>CreateView</b><br />
name: {self._name} <br />
is_global: {self._is_gloal} <br />
replace: {self._replace} <br />
{self._child_repr_()}
</li>
</ul>
"""
|
{
"content_hash": "d39824fc9a514127cd4fa427e5a1feab",
"timestamp": "",
"source": "github",
"line_count": 1022,
"max_line_length": 99,
"avg_line_length": 32.03816046966732,
"alnum_prop": 0.5360535076199493,
"repo_name": "zzcclp/spark",
"id": "9a22d6ea38ecc6b488ce657d2ddeaaad861d6e2c",
"size": "33528",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/pyspark/sql/connect/plan.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "61530"
},
{
"name": "Batchfile",
"bytes": "27482"
},
{
"name": "C",
"bytes": "1493"
},
{
"name": "CSS",
"bytes": "26338"
},
{
"name": "Dockerfile",
"bytes": "16279"
},
{
"name": "HTML",
"bytes": "42080"
},
{
"name": "HiveQL",
"bytes": "1859465"
},
{
"name": "Java",
"bytes": "4753582"
},
{
"name": "JavaScript",
"bytes": "223014"
},
{
"name": "Jupyter Notebook",
"bytes": "4310512"
},
{
"name": "Makefile",
"bytes": "2379"
},
{
"name": "PLpgSQL",
"bytes": "352609"
},
{
"name": "PowerShell",
"bytes": "4221"
},
{
"name": "Python",
"bytes": "8575381"
},
{
"name": "R",
"bytes": "1287477"
},
{
"name": "ReScript",
"bytes": "240"
},
{
"name": "Roff",
"bytes": "32470"
},
{
"name": "Scala",
"bytes": "44604224"
},
{
"name": "Shell",
"bytes": "245400"
},
{
"name": "Thrift",
"bytes": "2016"
},
{
"name": "q",
"bytes": "111129"
}
],
"symlink_target": ""
}
|
import os
import re
from oslo_serialization import jsonutils
import six
from nova import test
from nova.tests.functional import integrated_helpers
PROJECT_ID = "6f70656e737461636b20342065766572"
class NoMatch(test.TestingException):
pass
def pretty_data(data):
data = jsonutils.dumps(jsonutils.loads(data), sort_keys=True,
indent=4)
return '\n'.join(line.rstrip() for line in data.split('\n')).strip()
def objectify(data):
if not data:
return {}
# NOTE(sdague): templates will contain values like %(foo)s
# throughout them. If these are inside of double quoted
# strings, life is good, and we can treat it just like valid
# json to load it to python.
#
# However we've got some fields which are ints, like
# aggregate_id. This means we've got a snippet in the sample
# that looks like:
#
# "id": %(aggregate_id)s,
#
# which is not valid json, and will explode. We do a quick and
# dirty transform of this to:
#
# "id": "%(int:aggregate_id)s",
#
# That makes it valid data to convert to json, but keeps
# around the information that we need to drop those strings
# later. The regex anchors from the ': ', as all of these will
# be top rooted keys.
data = re.sub(r'(\: )%\((.+)\)s([^"])', r'\1"%(int:\2)s"\3', data)
return jsonutils.loads(data)
class ApiSampleTestBase(integrated_helpers._IntegratedTestBase):
all_extensions = False
extension_name = None
sample_dir = None
microversion = None
_use_common_server_api_samples = False
def __init__(self, *args, **kwargs):
super(ApiSampleTestBase, self).__init__(*args, **kwargs)
self.subs = {} # TODO(auggy): subs should really be a class
@property
def subs(self):
return self._subs
@subs.setter
def subs(self, value):
non_strings = \
{k: v for k, v in value.items() if
(not k == 'compute_host') and
(not isinstance(v, six.string_types))}
if len(non_strings) > 0:
raise TypeError("subs can't contain non-string values:"
"\n%(non_strings)s" %
{'non_strings': non_strings})
else:
self._subs = value
@classmethod
def _get_sample_path(cls, name, dirname, suffix='', api_version=None):
parts = [dirname]
parts.append('api_samples')
# TODO(gmann): Once all tests gets merged for all extension
# then we need to have a simple logic here to select sample file
# directory which will be based on cls.sample_dir and api_version.
# All other things will go away from here. Currently hacking this
# till we merge every extensions tests.
if cls.all_extensions and not cls.sample_dir:
parts.append('all_extensions')
# Note(gmann): if _use_common_server_api_samples is set to True
# then common server sample files present in 'servers' directory
# will be used. As of now it is being used for server POST request
# to avoid duplicate copy of server req and resp sample files.
# Example - ServersSampleBase's _post_server method.
elif cls._use_common_server_api_samples:
parts.append('servers')
else:
if cls.sample_dir:
parts.append(cls.sample_dir)
elif cls.extension_name:
parts.append(cls.extension_name)
if api_version:
parts.append('v' + api_version)
parts.append(name + ".json" + suffix)
return os.path.join(*parts)
@classmethod
def _get_sample(cls, name, api_version=None):
dirname = os.path.dirname(os.path.abspath(__file__))
dirname = os.path.normpath(os.path.join(dirname,
"../../../doc"))
return cls._get_sample_path(name, dirname, api_version=api_version)
@classmethod
def _get_template(cls, name, api_version=None):
dirname = os.path.dirname(os.path.abspath(__file__))
dirname = os.path.normpath(os.path.join(dirname,
"./api_sample_tests"))
return cls._get_sample_path(name, dirname, suffix='.tpl',
api_version=api_version)
def _read_template(self, name):
template = self._get_template(name, self.microversion)
with open(template) as inf:
return inf.read().strip()
def _write_template(self, name, data):
with open(self._get_template(name,
self.microversion), 'w') as outf:
outf.write(data)
def _write_sample(self, name, data):
with open(self._get_sample(
name, self.microversion), 'w') as outf:
outf.write(data)
def _compare_result(self, expected, result, result_str):
matched_value = None
# None
if expected is None:
if result is None:
pass
elif result == u'':
pass # TODO(auggy): known issue Bug#1544720
else:
raise NoMatch('%(result_str)s: Expected None, got %(result)s.'
% {'result_str': result_str, 'result': result})
# dictionary
elif isinstance(expected, dict):
if not isinstance(result, dict):
raise NoMatch('%(result_str)s: %(result)s is not a dict.'
% {'result_str': result_str, 'result': result})
ex_keys = sorted(expected.keys())
res_keys = sorted(result.keys())
if ex_keys != res_keys:
ex_delta = []
res_delta = []
for key in ex_keys:
if key not in res_keys:
ex_delta.append(key)
for key in res_keys:
if key not in ex_keys:
res_delta.append(key)
raise NoMatch(
'Dictionary key mismatch:\n'
'Extra key(s) in template:\n%(ex_delta)s\n'
'Extra key(s) in %(result_str)s:\n%(res_delta)s\n' %
{'ex_delta': ex_delta, 'result_str': result_str,
'res_delta': res_delta})
for key in ex_keys:
# TODO(auggy): pass key name along as well for error reporting
res = self._compare_result(expected[key], result[key],
result_str)
matched_value = res or matched_value
# list
elif isinstance(expected, list):
if not isinstance(result, list):
raise NoMatch(
'%(result_str)s: %(result)s is not a list.' %
{'result_str': result_str, 'result': result})
expected = expected[:]
extra = []
for res_obj in result:
for i, ex_obj in enumerate(expected):
try:
matched_value = self._compare_result(ex_obj,
res_obj,
result_str)
del expected[i]
break
except NoMatch:
pass
else:
extra.append(res_obj)
error = []
if expected:
error.append('Extra list items in template:')
error.extend([repr(o) for o in expected])
if extra:
error.append('Extra list items in %(result_str)s:' %
{'result_str': result_str})
error.extend([repr(o) for o in extra])
if error:
raise NoMatch('\n'.join(error))
# template string
elif isinstance(expected, six.string_types) and '%' in expected:
# NOTE(vish): escape stuff for regex
for char in '[]<>?':
expected = expected.replace(char, '\\%s' % char)
# NOTE(vish): special handling of subs that are not quoted. We are
# expecting an int but we had to pass in a string
# so the json would parse properly.
if expected.startswith("%(int:"):
result = str(result)
expected = expected.replace('int:', '')
expected = expected % self.subs
expected = '^%s$' % expected
match = re.match(expected, result)
if not match:
raise NoMatch(
'Values do not match:\n'
'Template: %(expected)s\n%(result_str)s: %(result)s' %
{'expected': expected, 'result_str': result_str,
'result': result})
try:
matched_value = match.group('id')
except IndexError:
if match.groups():
matched_value = match.groups()[0]
# string
elif isinstance(expected, six.string_types):
# NOTE(danms): Ignore whitespace in this comparison
expected = expected.strip()
if isinstance(result, six.string_types):
result = result.strip()
if expected != result:
# NOTE(tdurakov):this attempt to parse string as JSON
# is needed for correct comparison of hypervisor.cpu_info,
# which is stringified JSON object
#
# TODO(tdurakov): remove this check as soon as
# hypervisor.cpu_info become common JSON object in REST API.
try:
expected = objectify(expected)
result = objectify(result)
return self._compare_result(expected, result,
result_str)
except ValueError:
pass
raise NoMatch(
'Values do not match:\n'
'Template: %(expected)s\n%(result_str)s: '
'%(result)s' % {'expected': expected,
'result_str': result_str,
'result': result})
# int
elif isinstance(expected, (six.integer_types, float)):
if expected != result:
raise NoMatch(
'Values do not match:\n'
'Template: %(expected)s\n%(result_str)s: '
'%(result)s' % {'expected': expected,
'result_str': result_str,
'result': result})
else:
raise ValueError(
'Unexpected type %(expected_type)s'
% {'expected_type': type(expected)})
return matched_value
def generalize_subs(self, subs, vanilla_regexes):
"""Give the test a chance to modify subs after the server response
was verified, and before the on-disk doc/api_samples file is checked.
This may be needed by some tests to convert exact matches expected
from the server into pattern matches to verify what is in the
sample file.
If there are no changes to be made, subs is returned unharmed.
"""
return subs
def _update_links(self, sample_data):
"""Process sample data and update version specific links."""
# replace version urls
url_re = self._get_host() + "/v(2|2\.1)/" + PROJECT_ID
new_url = self._get_host() + "/" + self.api_major_version
if self._project_id:
new_url += "/" + PROJECT_ID
updated_data = re.sub(url_re, new_url, sample_data)
# replace unversioned urls
url_re = self._get_host() + "/" + PROJECT_ID
new_url = self._get_host()
if self._project_id:
new_url += "/" + PROJECT_ID
updated_data = re.sub(url_re, new_url, updated_data)
return updated_data
def _verify_response(self, name, subs, response, exp_code,
update_links=True):
# Always also include the laundry list of base regular
# expressions for possible key values in our templates. Test
# specific patterns (the value of ``subs``) can override
# these.
regexes = self._get_regexes()
regexes.update(subs)
subs = regexes
self.subs = subs
self.assertEqual(exp_code, response.status_code)
response_data = response.content
response_data = pretty_data(response_data)
if not os.path.exists(self._get_template(name,
self.microversion)):
self._write_template(name, response_data)
template_data = response_data
else:
template_data = self._read_template(name)
if (self.generate_samples and
not os.path.exists(self._get_sample(
name, self.microversion))):
self._write_sample(name, response_data)
sample_data = response_data
else:
with open(self._get_sample(name,
self.microversion)) as sample:
sample_data = sample.read()
if update_links:
sample_data = self._update_links(sample_data)
try:
template_data = objectify(template_data)
response_data = objectify(response_data)
response_result = self._compare_result(template_data,
response_data, "Response")
# NOTE(danms): replace some of the subs with patterns for the
# doc/api_samples check, which won't have things like the
# correct compute host name. Also let the test do some of its
# own generalization, if necessary
vanilla_regexes = self._get_regexes()
subs['compute_host'] = vanilla_regexes['host_name']
subs['id'] = vanilla_regexes['id']
subs['uuid'] = vanilla_regexes['uuid']
subs['image_id'] = vanilla_regexes['uuid']
subs = self.generalize_subs(subs, vanilla_regexes)
self.subs = subs
sample_data = objectify(sample_data)
self._compare_result(template_data, sample_data, "Sample")
return response_result
except NoMatch:
raise
def _get_host(self):
return 'http://openstack.example.com'
def _get_glance_host(self):
return 'http://glance.openstack.example.com'
def _get_regexes(self):
text = r'(\\"|[^"])*'
isotime_re = '\d{4}-[0,1]\d-[0-3]\dT\d{2}:\d{2}:\d{2}Z'
strtime_re = '\d{4}-[0,1]\d-[0-3]\dT\d{2}:\d{2}:\d{2}\.\d{6}'
xmltime_re = ('\d{4}-[0,1]\d-[0-3]\d '
'\d{2}:\d{2}:\d{2}'
'(\.\d{6})?(\+00:00)?')
# NOTE(claudiub): the x509 keypairs are different from the
# ssh keypairs. For example, the x509 fingerprint has 40 bytes.
return {
'isotime': isotime_re,
'strtime': strtime_re,
'strtime_or_none': r'None|%s' % strtime_re,
'xmltime': xmltime_re,
'password': '[0-9a-zA-Z]{1,12}',
'ip': '[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}',
'ip6': '([0-9a-zA-Z]{1,4}:){1,7}:?[0-9a-zA-Z]{1,4}',
'id': '(?P<id>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}'
'-[0-9a-f]{4}-[0-9a-f]{12})',
'uuid': '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}'
'-[0-9a-f]{4}-[0-9a-f]{12}',
'reservation_id': 'r-[0-9a-zA-Z]{8}',
'private_key': '(-----BEGIN RSA PRIVATE KEY-----|)'
'[a-zA-Z0-9\n/+=]*'
'(-----END RSA PRIVATE KEY-----|)',
'public_key': '(ssh-rsa|-----BEGIN CERTIFICATE-----)'
'[ a-zA-Z0-9\n/+=]*'
'(Generated-by-Nova|-----END CERTIFICATE-----)',
'fingerprint': '(([0-9a-f]{2}:){19}|([0-9a-f]{2}:){15})'
'[0-9a-f]{2}',
'keypair_type': 'ssh|x509',
'host': self._get_host(),
'host_name': r'\w+',
'glance_host': self._get_glance_host(),
'compute_host': self.compute.host,
'text': text,
'int': '[0-9]+',
'user_id': text,
'api_vers': self.api_major_version,
'compute_endpoint': self._get_compute_endpoint(),
'versioned_compute_endpoint': self._get_vers_compute_endpoint(),
}
def _get_compute_endpoint(self):
# NOTE(sdague): "openstack" is stand in for project_id, it
# should be more generic in future.
if self._project_id:
return '%s/%s' % (self._get_host(), PROJECT_ID)
else:
return self._get_host()
def _get_vers_compute_endpoint(self):
# NOTE(sdague): "openstack" is stand in for project_id, it
# should be more generic in future.
if self._project_id:
return '%s/%s/%s' % (self._get_host(), self.api_major_version,
PROJECT_ID)
else:
return '%s/%s' % (self._get_host(), self.api_major_version)
def _get_response(self, url, method, body=None, strip_version=False,
headers=None):
headers = headers or {}
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
return self.api.api_request(url, body=body, method=method,
headers=headers, strip_version=strip_version)
def _do_options(self, url, strip_version=False, headers=None):
return self._get_response(url, 'OPTIONS', strip_version=strip_version,
headers=headers)
def _do_get(self, url, strip_version=False, headers=None):
return self._get_response(url, 'GET', strip_version=strip_version,
headers=headers)
def _do_post(self, url, name, subs, method='POST', headers=None):
self.subs = subs
body = self._read_template(name) % self.subs
sample = self._get_sample(name, self.microversion)
if self.generate_samples and not os.path.exists(sample):
self._write_sample(name, body)
return self._get_response(url, method, body, headers=headers)
def _do_put(self, url, name, subs, headers=None):
return self._do_post(url, name, subs, method='PUT', headers=headers)
def _do_delete(self, url, headers=None):
return self._get_response(url, 'DELETE', headers=headers)
|
{
"content_hash": "fdf4370ba411b000b96a979e95616bc4",
"timestamp": "",
"source": "github",
"line_count": 458,
"max_line_length": 78,
"avg_line_length": 41.52183406113537,
"alnum_prop": 0.5139086080875006,
"repo_name": "CEG-FYP-OpenStack/scheduler",
"id": "f008d0cac7af454ea7983c2a18ae14b785a650a6",
"size": "19649",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "nova/tests/functional/api_samples_test_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17233936"
},
{
"name": "Shell",
"bytes": "36943"
},
{
"name": "Smarty",
"bytes": "291947"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['lg_lock'],
package_dir={'': 'src'},
scripts=[],
requires=[]
)
setup(**d)
|
{
"content_hash": "3e27f65f34ea5c1030ac443edbf0d047",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 60,
"avg_line_length": 20.545454545454547,
"alnum_prop": 0.668141592920354,
"repo_name": "EndPointCorp/lg_ros_nodes",
"id": "e42e7f8b862a425965e881438e513bd85bd3c4e1",
"size": "250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lg_lock/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "28157"
},
{
"name": "C++",
"bytes": "291289"
},
{
"name": "CMake",
"bytes": "26675"
},
{
"name": "Dockerfile",
"bytes": "15931"
},
{
"name": "HTML",
"bytes": "29662"
},
{
"name": "JavaScript",
"bytes": "430737"
},
{
"name": "Makefile",
"bytes": "4197"
},
{
"name": "Python",
"bytes": "1144931"
},
{
"name": "Shell",
"bytes": "17851"
}
],
"symlink_target": ""
}
|
__author__ = 'root'
pyg = 'ay'
original = raw_input('Enter a word:')
word = original.lower()
first = word[0]
new_word = word[1:len(word)] + first + pyg
if len(original) > 0 and original.isalpha():
print new_word
else:
print 'empty'
|
{
"content_hash": "16fd4317faf45604a051be11122ec33f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 44,
"avg_line_length": 21.90909090909091,
"alnum_prop": 0.6348547717842323,
"repo_name": "SpAiNiOr/mystudy",
"id": "babc167d17523c75b142289cb3b400297df183be",
"size": "241",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "learning/RegularExpression/Pig_Latin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "13"
},
{
"name": "Dockerfile",
"bytes": "447"
},
{
"name": "Java",
"bytes": "2439"
},
{
"name": "JavaScript",
"bytes": "1422"
},
{
"name": "Python",
"bytes": "12215"
}
],
"symlink_target": ""
}
|
"""
Tests for apyori.filter_ordered_statistics.
"""
from nose.tools import eq_
from apyori import OrderedStatistic
from apyori import filter_ordered_statistics
TEST_DATA = [
OrderedStatistic(frozenset(['A']), frozenset(['B']), 0.1, 0.7),
OrderedStatistic(frozenset(['A']), frozenset(['B']), 0.3, 0.5),
]
def test_normal():
"""
Test for normal data.
"""
result = list(filter_ordered_statistics(
TEST_DATA, min_confidence=0.1, min_lift=0.5))
eq_(result, TEST_DATA)
def test_min_confidence():
"""
Filter by minimum confidence.
"""
result = list(filter_ordered_statistics(
TEST_DATA, min_confidence=0.2, min_lift=0.1))
eq_(result, [TEST_DATA[1]])
def test_min_lift():
"""
Filter by minimum lift.
"""
result = list(filter_ordered_statistics(
TEST_DATA, min_confidence=0.0, min_lift=0.6))
eq_(result, [TEST_DATA[0]])
|
{
"content_hash": "62ffd6c2de7f997582b1ab016375a81f",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 67,
"avg_line_length": 22.26829268292683,
"alnum_prop": 0.624315443592552,
"repo_name": "VictorQuintana91/Thesis",
"id": "b7820edbb7c6646fbea1798bac337116459f673f",
"size": "913",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/apyori-1.1.1/test/test_filter_ordered_statistics.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "808"
},
{
"name": "C",
"bytes": "107599"
},
{
"name": "C++",
"bytes": "90179"
},
{
"name": "HTML",
"bytes": "224793"
},
{
"name": "Java",
"bytes": "6814"
},
{
"name": "Jupyter Notebook",
"bytes": "855103"
},
{
"name": "M4",
"bytes": "11398"
},
{
"name": "Makefile",
"bytes": "221870"
},
{
"name": "PostScript",
"bytes": "460967"
},
{
"name": "Python",
"bytes": "34586"
},
{
"name": "Roff",
"bytes": "157465"
},
{
"name": "Shell",
"bytes": "96249"
}
],
"symlink_target": ""
}
|
import sys
import yaml
import numpy as np
from astropy.table import Table, vstack
from astropy.io import fits
import argparse
from ..utils import create_mask
def main():
usage = "usage: %(prog)s [config files]"
description = "Merge FITS tables."
parser = argparse.ArgumentParser(usage=usage,description=description)
parser.add_argument('--output', default = None, required=True)
parser.add_argument('--filter', default = None, type=str)
parser.add_argument('--ts_threshold', default = None, type=float)
parser.add_argument('files', nargs='*', default = None,
help='One or more FITS files containing BINTABLEs.')
args = parser.parse_args()
h0 = fits.open(args.files[0])
hdu_names = [t.name for t in h0]
tables = {}
for f in sorted(args.files):
for t in hdu_names[1:]:
tables.setdefault(t,[])
tables[t] += [Table.read(f,t)]
for k, v in tables.items():
if k not in ['CATALOG','SED','LIKELIHOOD']:
tables[k] = v[0]
else:
tables[k] = vstack(v)
cat = tables['CATALOG']
m = np.ones(len(cat),dtype=bool)
if args.filter:
cuts = yaml.load(open(args.filter))
if 'inclusive' in cuts:
m &= create_mask(tables['CATALOG'],cuts['inclusive'])
if 'exclusive' in cuts:
m &= ~create_mask(tables['CATALOG'],cuts['exclusive'])
if args.ts_threshold is not None:
m &= (cat['ts'] > args.ts_threshold)
tab_hdus = []
for t in hdu_names[1:]:
tab = tables[t]
if t in ['CATALOG','SED','LIKELIHOOD']:
tab = tab[m]
tab_hdus += [fits.table_to_hdu(tab)]
hdulist = fits.HDUList([fits.PrimaryHDU()] + tab_hdus)
hdulist.writeto(args.output,clobber=True)
if __name__ == "__main__":
main()
|
{
"content_hash": "571b15692282d22f717db7656eadc74c",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 76,
"avg_line_length": 27.056338028169016,
"alnum_prop": 0.5616866215512754,
"repo_name": "woodmd/haloanalysis",
"id": "9e8db929851e4eb8647fa06b04cb11fbe74cfb51",
"size": "1921",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extpipe/scripts/merge_tables.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1222822"
},
{
"name": "Python",
"bytes": "344215"
},
{
"name": "Shell",
"bytes": "2323"
}
],
"symlink_target": ""
}
|
from starcluster.clustersetup import ClusterSetup
from starcluster.logger import log
class ControlFREECInstaller(ClusterSetup):
def run(self, nodes, master, user, user_shell, volumes):
for node in nodes:
log.info("Installing Control-FREEC 7.2 on %s" % (node.alias))
node.ssh.execute('wget -c -P /opt/software/controlfreec/7.2 http://bioinfo-out.curie.fr/projects/freec/src/FREEC_Linux64.tar.gz')
node.ssh.execute('tar -xzf /opt/software/controlfreec/7.2/FREEC_Linux64.tar.gz -C /opt/software/controlfreec/7.2')
node.ssh.execute('wget -c -P /opt/software/controlfreec/7.2 http://bioinfo-out.curie.fr/projects/freec/src/makeGraph.R')
node.ssh.execute('mkdir -p /usr/local/Modules/applications/controlfreec/;touch /usr/local/Modules/applications/controlfreec/7.2')
node.ssh.execute('echo "#%Module" >> /usr/local/Modules/applications/controlfreec/7.2')
node.ssh.execute('echo "set root /opt/software/controlfreec/7.2" >> /usr/local/Modules/applications/controlfreec/7.2')
node.ssh.execute('echo -e "prepend-path\tPATH\t\$root" >> /usr/local/Modules/applications/controlfreec/7.2')
|
{
"content_hash": "1e8e1bd207e09544e1b6f3daec9f0f82",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 132,
"avg_line_length": 73.8,
"alnum_prop": 0.7515808491418248,
"repo_name": "meissnert/StarCluster-Plugins",
"id": "667e74432e0438472038796e03e8ea6598a82e8b",
"size": "1107",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "controlfreec_7_2.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "152051"
}
],
"symlink_target": ""
}
|
"""
Publication test
"""
# standard modules
import os
import sys
# yael modules
# TODO find a better way to do this
PROJECT_DIRECTORY = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0]))))
sys.path.append(PROJECT_DIRECTORY)
from yael import DC
from yael import MediaType
from yael import OPFMeta3
from yael import Parsing
from yael import Publication
__author__ = "Alberto Pettarin"
__copyright__ = "Copyright 2015, Alberto Pettarin (www.albertopettarin.it)"
__license__ = "MIT"
__version__ = "0.0.9"
__email__ = "alberto@albertopettarin.it"
__status__ = "Development"
def usage():
print("")
print("$ ./%s path/to/dir [--no-mo]" % sys.argv[0])
print("$ ./%s path/to/file.epub [--no-mo]" % sys.argv[0])
print("")
def main():
if (len(sys.argv) > 2) and (sys.argv[2] == "--no-mo"):
p = Publication(
path=sys.argv[1],
parsing_options=[Parsing.NO_MEDIA_OVERLAY])
elif len(sys.argv) > 1:
p = Publication(
path=sys.argv[1],
parsing_options=[])
else:
usage()
return
print("")
print("Manifestation = %s" % p.manifestation)
print("Size = %s" % p.size)
print("Unique identifier = %s" % p.unique_identifier)
print("dcterms:modified = %s" % p.dcterms_modified)
print("Release identifier = %s" % p.release_identifier)
print("Version = %s" % p.version)
print("")
print("Renditions = %d" % (len(p.container.renditions)))
print("")
print("Internal path cover = %s" % p.internal_path_cover_image)
print("")
# print the JSON string of this publication
# pretty, 4-chars indented, keys not sorted, entries with empty/null values not removed
#print(p)
# print the JSON string of this publication, compressed
#print(p.json_string())
# print the JSON string of this publication, compressed and clean
#print(p.json_string(clean=True))
# print the JSON string of this publication
# pretty, 4-chars indented, sorted keys, entries with empty/null values removed
#print(p.json_string(pretty=True, indent=2, sort=True, clean=True))
# print toc (default rendition)
# nav toc (if EPUB 3) or ncx toc (if EPUB 2)
#print(p.container.default_rendition.toc)
# print ncx toc (default rendition)
#print(p.container.default_rendition.ncx_toc)
# print landmarks (default rendition)
#print(p.container.default_rendition.landmarks)
# metadatum with id="aut"
#print(p.container.default_rendition.pac_document.metadata.metadatum_by_id("aut"))
# dc:subject metadata
#for metadatum in p.container.default_rendition.pac_document.metadata.metadata_by_tag(DC.E_NS_SUBJECT):
# print(metadatum)
# get the value for the media:active-class metadatum
#metadata = p.container.default_rendition.pac_document.metadata.metadata_by_property(OPFMeta3.V_MEDIA_ACTIVE_CLASS)
#if len(metadata) > 0:
# print(metadata[0].v_text)
# get the values for the media:narrator metadata
#for metadatum in p.container.default_rendition.pac_document.metadata.metadata_by_property(OPFMeta3.V_MEDIA_NARRATOR):
# print(metadatum.v_text)
# get the values for the media:duration metadata
#for metadatum in p.container.default_rendition.pac_document.metadata.metadata_by_property(OPFMeta3.V_MEDIA_DURATION):
# if metadatum.v_refines == None:
# print("Total: %s" % metadatum.v_text)
# else:
# print("Ref %s: %s" % (metadatum.v_refines, metadatum.v_text))
# all PNG images
#for img in p.container.default_rendition.pac_document.manifest.items_by_media_type("image/png"):
# print(img)
# all JPEG images
#for img in p.container.default_rendition.pac_document.manifest.items_by_media_type(MediaType.JPEG):
# print(img)
# all images
#for img in p.container.default_rendition.pac_document.manifest.image_items:
# print(img)
# all manifest items with scripted property
#for item in p.container.default_rendition.pac_document.manifest.scripted_items:
# print(item)
# all Media Overlay documents
#for item in p.container.default_rendition.pac_document.manifest.mo_items:
# print(item)
# all files referenced in the manifest, in order
#for i_p_file in p.container.default_rendition.pac_document.files_referenced_manifest:
# print(i_p_file)
# all files referenced in the spine, in order
#for i_p_file in p.container.default_rendition.pac_document.files_referenced_spine:
# print(i_p_file)
# all files referenced in the spine, with linear="yes" or omitted, in order
#for i_p_file in p.container.default_rendition.pac_document.files_referenced_spine_linear:
# print(i_p_file)
# for all MO documents, print the referenced audio files
#for mo_doc in p.container.default_rendition.mo_documents:
# print(mo_doc.internal_path)
# for audio_file in mo_doc.referenced_audio_files:
# print(" + %s" % audio_file)
# print("")
# for all MO documents, print the referenced text fragment identifiers, in order
#for mo_doc in p.container.default_rendition.mo_documents:
# print(mo_doc.internal_path)
# for text_id in mo_doc.referenced_fragment_identifiers:
# print(" + %s" % text_id)
# print("")
# for all MO documents, print the referenced text fragment identifiers,
# grouped by text file
#for mo_doc in p.container.default_rendition.mo_documents:
# grouped = mo_doc.grouped_referenced_fragment_identifiers
# for key in grouped:
# print(" + %s" % key)
# for text_id in grouped[key]:
# print(" + %s" % text_id)
# print("")
# test Multiple Renditions
#if len(p.container.renditions) > 1:
# print("Publication Unique ID: %s" % p.unique_identifier)
# print("Publication Release ID: %s" % p.release_identifier)
# print("Rendition 1 Unique ID: %s" % p.container.renditions[0].pac_document.v_unique_identifier)
# print("Rendition 2 Unique ID: %s" % p.container.renditions[1].pac_document.v_unique_identifier)
# print("")
# print("Rendition Mapping Document\n%s" % p.container.rm_document.json_string(clean=True, pretty=True))
# extract the cover image and save it to /tmp/extracted_cover.png
#i_p_cover = p.internal_path_cover_image
#if (i_p_cover != None) and (i_p_cover in p.assets):
# print("Extracting '%s' ..." % i_p_cover)
# blob = p.assets[i_p_cover].contents
# output_file = open("/tmp/extracted_cover.png", mode="wb")
# output_file.write(blob)
# output_file.close()
# print("Extracted /tmp/extracted_cover.png")
# iterates over the spine, printing the contents
# of the referenced content documents
#for itemref in p.container.default_rendition.pac_document.spine.itemrefs:
# item = p.container.default_rendition.pac_document.manifest.item_by_id(itemref.v_idref)
# if item != None:
# print("")
# print("%s (linear? %s)" % (itemref.v_idref, not (itemref.v_linear == "no")))
# print("")
# print(item.contents)
# print("")
# copy all the assets into a single directory /tmp/foo/ ("flatten")
#if not os.path.exists("/tmp/foo"):
# os.mkdir("/tmp/foo")
#for i_p_asset in p.assets.keys():
# destination = os.path.join("/tmp/foo", os.path.basename(i_p_asset))
# print("Copying '%s' into '%s'..." % (i_p_asset, destination))
# fil = open(destination, "wb")
# fil.write(p.assets[i_p_asset].contents)
# fil.close()
# print("Done")
# print all the assets obfuscated with the IDPF algorithm
#if p.encryption != None:
# print("Assets obfuscated with the Adobe algorithm")
# for i_p_asset in p.encryption.adobe_obfuscated_assets:
# print(" + %s" % i_p_asset)
# print("")
# print("Assets obfuscated with the IDPF algorithm")
# for i_p_asset in p.encryption.idpf_obfuscated_assets:
# print(" + %s" % i_p_asset)
# print("")
# print the number of items in the manifest
#print("Manifest length = %d" % len(p.container.default_rendition.pac_document.manifest))
# print the number of itemrefs in the spine
#print("Spine length = %d" % len(p.container.default_rendition.pac_document.spine))
# print the spine index and linear spine index of the following assets
#i_p_assets = ["OEBPS/Text/cover.xhtml", "OEBPS/Text/p001.xhtml", "OEBPS/z.html", "doesnotexist.xhtml"]
#for i_p in i_p_assets:
# print("Spine index of '%s' = %d" % (i_p, p.container.default_rendition.pac_document.spine_index_by_internal_path(i_p)))
# print("Linear spine index of '%s' = %d" % (i_p, p.container.default_rendition.pac_document.spine_linear_index_by_internal_path(i_p)))
if __name__ == '__main__':
main()
|
{
"content_hash": "80b604df672745fea4b456cb56a36d73",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 142,
"avg_line_length": 39.064655172413794,
"alnum_prop": 0.6397440141233587,
"repo_name": "gabalese/yael",
"id": "94140ebe72428977fa2c4523b50230cfbb7cb488",
"size": "9082",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "yael/examples/publication_example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "263342"
}
],
"symlink_target": ""
}
|
from test.test_support import TestFailed, TESTFN, unlink
from types import ClassType
import warnings
import sys, traceback, os
print '5. Built-in exceptions'
# XXX This is not really enough, each *operation* should be tested!
# Reloading the built-in exceptions module failed prior to Py2.2, while it
# should act the same as reloading built-in sys.
#check here
#try:
# import exceptions
# reload(exceptions)
#except ImportError, e:
# raise TestFailed, e
def test_raise_catch(exc):
try:
raise exc, "spam"
except exc, err:
buf = str(err)
try:
raise exc("spam")
except exc, err:
buf = str(err)
print buf
def r(thing):
test_raise_catch(thing)
if isinstance(thing, ClassType):
print thing.__name__
else:
print thing
r(AttributeError)
import sys
try: x = sys.undefined_attribute
except AttributeError: pass
r(EOFError)
import sys
fp = open(TESTFN, 'w')
fp.close()
fp = open(TESTFN, 'r')
savestdin = sys.stdin
try:
try:
sys.stdin = fp
x = raw_input()
except EOFError:
pass
finally:
sys.stdin = savestdin
fp.close()
r(IOError)
try: open('this file does not exist', 'r')
except IOError: pass
r(ImportError)
try: import undefined_module
except ImportError: pass
r(IndexError)
x = []
try: a = x[10]
except IndexError: pass
r(KeyError)
x = {}
try: a = x['key']
except KeyError: pass
r(KeyboardInterrupt)
print '(not testable in a script)'
r(MemoryError)
print '(not safe to test)'
r(NameError)
try: x = undefined_variable
except NameError: pass
r(OverflowError)
# XXX
# Obscure: in 2.2 and 2.3, this test relied on changing OverflowWarning
# into an error, in order to trigger OverflowError. In 2.4, OverflowWarning
# should no longer be generated, so the focus of the test shifts to showing
# that OverflowError *isn't* generated. OverflowWarning should be gone
# in Python 2.5, and then the filterwarnings() call, and this comment,
# should go away.
warnings.filterwarnings("error", "", OverflowWarning, __name__)
x = 1
for dummy in range(128):
x += x # this simply shouldn't blow up
r(RuntimeError)
print '(not used any more?)'
r(SyntaxError)
try: exec '/\n'
except SyntaxError: pass
# make sure the right exception message is raised for each of these
# code fragments:
def ckmsg(src, msg):
try:
compile(src, '<fragment>', 'exec')
except SyntaxError, e:
print e.msg
if e.msg == msg:
print "ok"
else:
print "expected:", msg
else:
print "failed to get expected SyntaxError"
s = '''\
while 1:
try:
pass
finally:
continue
'''
if sys.platform.startswith('java'):
print "'continue' not supported inside 'finally' clause"
print "ok"
else:
ckmsg(s, "'continue' not supported inside 'finally' clause")
s = '''\
try:
continue
except:
pass
'''
ckmsg(s, "'continue' not properly in loop")
ckmsg("continue\n", "'continue' not properly in loop")
r(IndentationError)
r(TabError)
# can only be tested under -tt, and is the only test for -tt
#try: compile("try:\n\t1/0\n \t1/0\nfinally:\n pass\n", '<string>', 'exec')
#except TabError: pass
#else: raise TestFailed
r(SystemError)
print '(hard to reproduce)'
r(SystemExit)
import sys
try: sys.exit(0)
except SystemExit: pass
r(TypeError)
try: [] + ()
except TypeError: pass
r(ValueError)
try: x = chr(10000)
except ValueError: pass
r(ZeroDivisionError)
try: x = 1/0
except ZeroDivisionError: pass
r(Exception)
try: x = 1/0
except Exception, e: pass
# test that setting an exception at the C level works even if the
# exception object can't be constructed.
class BadException:
def __init__(self):
raise RuntimeError, "can't instantiate BadException"
def test_capi1():
import _testcapi
try:
_testcapi.raise_exception(BadException, 1)
except TypeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
assert co.co_name == "test_capi1"
assert co.co_filename.endswith('test_exceptions'+os.extsep+'py')
else:
print "Expected exception"
def test_capi2():
import _testcapi
try:
_testcapi.raise_exception(BadException, 0)
except RuntimeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
assert co.co_name == "__init__"
assert co.co_filename.endswith('test_exceptions'+os.extsep+'py')
co2 = tb.tb_frame.f_back.f_code
assert co2.co_name == "test_capi2"
else:
print "Expected exception"
if not sys.platform.startswith('java'):
test_capi1()
test_capi2()
unlink(TESTFN)
|
{
"content_hash": "b97df74588954d5f50a912315d5ded21",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 78,
"avg_line_length": 22.204761904761906,
"alnum_prop": 0.6594467081278147,
"repo_name": "loongson-community/EFI-MIPS",
"id": "f44b591690c58142c35d72fbac7204c3aaae75e7",
"size": "4713",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ToolKit/cmds/python/Lib/test/skipped/test_exceptions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "271282"
},
{
"name": "Batchfile",
"bytes": "318"
},
{
"name": "C",
"bytes": "32642014"
},
{
"name": "C++",
"bytes": "1058125"
},
{
"name": "CSS",
"bytes": "2547"
},
{
"name": "GAP",
"bytes": "111381"
},
{
"name": "Groff",
"bytes": "1245691"
},
{
"name": "HTML",
"bytes": "1328432"
},
{
"name": "Lex",
"bytes": "14559"
},
{
"name": "M",
"bytes": "748"
},
{
"name": "Makefile",
"bytes": "468567"
},
{
"name": "Mask",
"bytes": "3420"
},
{
"name": "NSIS",
"bytes": "8743"
},
{
"name": "Objective-C",
"bytes": "3415447"
},
{
"name": "Pascal",
"bytes": "3368"
},
{
"name": "Python",
"bytes": "7763565"
},
{
"name": "R",
"bytes": "546"
},
{
"name": "Shell",
"bytes": "10084"
},
{
"name": "Yacc",
"bytes": "30661"
}
],
"symlink_target": ""
}
|
from unittest import mock
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import fixtures as func_fixtures
from nova.tests.functional import integrated_helpers
class HypervisorError(Exception):
"""This is just used to make sure the exception type is in the fault."""
pass
class ServerFaultTestCase(test.TestCase,
integrated_helpers.InstanceHelperMixin):
"""Tests for the server faults reporting from the API."""
def setUp(self):
super(ServerFaultTestCase, self).setUp()
self.useFixture(nova_fixtures.NeutronFixture(self))
self.useFixture(func_fixtures.PlacementFixture())
self.useFixture(nova_fixtures.GlanceFixture(self))
self.useFixture(nova_fixtures.RealPolicyFixture())
# Start the compute services.
self.start_service('conductor')
self.start_service('scheduler')
self.compute = self.start_service('compute')
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
self.api = api_fixture.api
self.admin_api = api_fixture.admin_api
def test_server_fault_non_nova_exception(self):
"""Creates a server using the non-admin user, then reboots it which
will generate a non-NovaException fault and put the instance into
ERROR status. Then checks that fault details are only visible to the
admin user.
"""
# Create the server with the non-admin user.
server = self._build_server(
networks=[{'port': nova_fixtures.NeutronFixture.port_1['id']}])
server = self.api.post_server({'server': server})
server = self._wait_for_state_change(server, 'ACTIVE')
# Stop the server before rebooting it so that after the driver.reboot
# method raises an exception, the fake driver does not report the
# instance power state as running - that will make the compute manager
# set the instance vm_state to error.
self.api.post_server_action(server['id'], {'os-stop': None})
server = self._wait_for_state_change(server, 'SHUTOFF')
# Stub out the compute driver reboot method to raise a non-nova
# exception to simulate some error from the underlying hypervisor
# which in this case we are going to say has sensitive content.
error_msg = 'sensitive info'
with mock.patch.object(
self.compute.manager.driver, 'reboot',
side_effect=HypervisorError(error_msg)) as mock_reboot:
reboot_request = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], reboot_request)
# In this case we wait for the status to change to ERROR using
# the non-admin user so we can assert the fault details. We also
# wait for the task_state to be None since the wrap_instance_fault
# decorator runs before the reverts_task_state decorator so we will
# be sure the fault is set on the server.
server = self._wait_for_server_parameter(
server, {'status': 'ERROR', 'OS-EXT-STS:task_state': None},
api=self.api)
mock_reboot.assert_called_once()
# The server fault from the non-admin user API response should not
# have details in it.
self.assertIn('fault', server)
fault = server['fault']
self.assertNotIn('details', fault)
# And the sensitive details from the non-nova exception should not be
# in the message.
self.assertIn('message', fault)
self.assertNotIn(error_msg, fault['message'])
# The exception type class name should be in the message.
self.assertIn('HypervisorError', fault['message'])
# Get the server fault details for the admin user.
server = self.admin_api.get_server(server['id'])
fault = server['fault']
# The admin can see the fault details which includes the traceback.
self.assertIn('details', fault)
# The details also contain the exception message (which is not in the
# fault message).
self.assertIn(error_msg, fault['details'])
# Make sure the traceback is there by looking for part of it.
self.assertIn('in reboot_instance', fault['details'])
# The exception type class name should be in the message for the admin
# user as well since the fault handling code cannot distinguish who
# is going to see the message so it only sets class name.
self.assertIn('HypervisorError', fault['message'])
|
{
"content_hash": "979b824540873471e77278a44ceea3e0",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 79,
"avg_line_length": 48.5625,
"alnum_prop": 0.6527241527241527,
"repo_name": "mahak/nova",
"id": "edc3c3b377f9b69f0c71d4a97ac8e86226d101bd",
"size": "5208",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/functional/test_server_faults.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3545"
},
{
"name": "Mako",
"bytes": "1952"
},
{
"name": "Python",
"bytes": "23261880"
},
{
"name": "Shell",
"bytes": "28113"
},
{
"name": "Smarty",
"bytes": "507244"
}
],
"symlink_target": ""
}
|
import sys
from os.path import join
from setuptools import setup
import versioneer
# note that we don't explicitly version numpy,
# let menpo handle that
install_requires = [
'docopt>=0.6,<0.7',
'menpofit>=0.5,<0.6',
'menpodetect>=0.5,<0.6',
'numpy'
]
if sys.version_info.major == 2:
install_requires.append('pathlib==1.0')
setup(name='menpocli',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='A command line interface to the Menpo Project',
author='James Booth',
author_email='james.booth08@imperial.ac.uk',
packages=['menpocli'],
scripts=[join('bin', 'menpofit'),
join('bin', 'menpodetect')],
install_requires=install_requires
)
|
{
"content_hash": "e01c9bb0c5dc57c74771c21b404ec67a",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 66,
"avg_line_length": 27.107142857142858,
"alnum_prop": 0.6482213438735178,
"repo_name": "menpo/menpocli",
"id": "16efbebf1815c9fb4798fab498eb1b1fbc601612",
"size": "759",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "155"
},
{
"name": "Python",
"bytes": "89888"
},
{
"name": "Shell",
"bytes": "124"
}
],
"symlink_target": ""
}
|
import unittest
import pytest
import azure.cosmos.cosmos_client as cosmos_client
import azure.cosmos.documents as documents
import azure.cosmos.errors as errors
from requests.exceptions import ConnectionError
from azure.cosmos.http_constants import HttpHeaders, StatusCodes, SubStatusCodes
import azure.cosmos.retry_utility as retry_utility
import azure.cosmos.endpoint_discovery_retry_policy as endpoint_discovery_retry_policy
from azure.cosmos.request_object import _RequestObject
import azure.cosmos.global_endpoint_manager as global_endpoint_manager
import azure.cosmos.http_constants as http_constants
@pytest.mark.usefixtures("teardown")
class TestStreamingFailover(unittest.TestCase):
DEFAULT_ENDPOINT = "https://geotest.documents.azure.com:443/"
MASTER_KEY = "SomeKeyValue"
WRITE_ENDPOINT1 = "https://geotest-WestUS.documents.azure.com:443/"
WRITE_ENDPOINT2 = "https://geotest-CentralUS.documents.azure.com:443/"
READ_ENDPOINT1 = "https://geotest-SouthCentralUS.documents.azure.com:443/"
READ_ENDPOINT2 = "https://geotest-EastUS.documents.azure.com:443/"
WRITE_ENDPOINT_NAME1 = "West US"
WRITE_ENDPOINT_NAME2 = "Central US"
READ_ENDPOINT_NAME1 = "South Central US"
READ_ENDPOINT_NAME2 = "East US"
preferred_regional_endpoints = [READ_ENDPOINT_NAME1, READ_ENDPOINT_NAME2]
counter = 0
endpoint_sequence = []
def test_streaming_failover(self):
self.OriginalExecuteFunction = retry_utility._ExecuteFunction
retry_utility._ExecuteFunction = self._MockExecuteFunctionEndpointDiscover
connection_policy = documents.ConnectionPolicy()
connection_policy.PreferredLocations = self.preferred_regional_endpoints
connection_policy.DisableSSLVerification = True
self.original_get_database_account = cosmos_client.CosmosClient.GetDatabaseAccount
cosmos_client.CosmosClient.GetDatabaseAccount = self.mock_get_database_account
client = cosmos_client.CosmosClient(self.DEFAULT_ENDPOINT, {'masterKey': self.MASTER_KEY}, connection_policy, documents.ConsistencyLevel.Eventual)
document_definition = { 'id': 'doc',
'name': 'sample document',
'key': 'value'}
created_document = {}
created_document = client.CreateItem("dbs/mydb/colls/mycoll", document_definition)
self.assertDictEqual(created_document, {})
self.assertDictEqual(client.last_response_headers, {})
self.assertEqual(self.counter, 10)
# First request is an initial read collection.
# Next 8 requests hit forbidden write exceptions and the endpoint retry policy keeps
# flipping the resolved endpoint between the 2 write endpoints.
# The 10th request returns the actual read document.
for i in range(0,8):
if i % 2 == 0:
self.assertEqual(self.endpoint_sequence[i], self.WRITE_ENDPOINT1)
else:
self.assertEqual(self.endpoint_sequence[i], self.WRITE_ENDPOINT2)
cosmos_client.CosmosClient.GetDatabaseAccount = self.original_get_database_account
retry_utility._ExecuteFunction = self.OriginalExecuteFunction
def mock_get_database_account(self, url_connection = None):
database_account = documents.DatabaseAccount()
database_account._EnableMultipleWritableLocations = True
database_account._WritableLocations = [
{'name': self.WRITE_ENDPOINT_NAME1, 'databaseAccountEndpoint': self.WRITE_ENDPOINT1},
{'name': self.WRITE_ENDPOINT_NAME2, 'databaseAccountEndpoint': self.WRITE_ENDPOINT2}
]
database_account._ReadableLocations = [
{'name': self.READ_ENDPOINT_NAME1, 'databaseAccountEndpoint': self.READ_ENDPOINT1},
{'name': self.READ_ENDPOINT_NAME2, 'databaseAccountEndpoint': self.READ_ENDPOINT2}
]
return database_account
def _MockExecuteFunctionEndpointDiscover(self, function, *args, **kwargs):
self.counter += 1
if self.counter >= 10 or ( len(args) > 0 and args[1].operation_type == documents._OperationType.Read):
return ({}, {})
else:
self.endpoint_sequence.append(args[1].location_endpoint_to_route)
raise errors.HTTPFailure(StatusCodes.FORBIDDEN, "Request is not permitted in this region", {HttpHeaders.SubStatus: SubStatusCodes.WRITE_FORBIDDEN})
def test_retry_policy_does_not_mark_null_locations_unavailable(self):
self.original_get_database_account = cosmos_client.CosmosClient.GetDatabaseAccount
cosmos_client.CosmosClient.GetDatabaseAccount = self.mock_get_database_account
client = cosmos_client.CosmosClient(self.DEFAULT_ENDPOINT, {'masterKey': self.MASTER_KEY}, None, documents.ConsistencyLevel.Eventual)
endpoint_manager = global_endpoint_manager._GlobalEndpointManager(client)
self.original_mark_endpoint_unavailable_for_read_function = endpoint_manager.mark_endpoint_unavailable_for_read
endpoint_manager.mark_endpoint_unavailable_for_read = self._mock_mark_endpoint_unavailable_for_read
self.original_mark_endpoint_unavailable_for_write_function = endpoint_manager.mark_endpoint_unavailable_for_write
endpoint_manager.mark_endpoint_unavailable_for_write = self._mock_mark_endpoint_unavailable_for_write
self.original_resolve_service_endpoint = endpoint_manager.resolve_service_endpoint
endpoint_manager.resolve_service_endpoint = self._mock_resolve_service_endpoint
# Read and write counters count the number of times the endpoint manager's
# mark_endpoint_unavailable_for_read() and mark_endpoint_unavailable_for_read()
# functions were called. When a 'None' location is returned by resolve_service_endpoint(),
# these functions should not be called
self._read_counter = 0
self._write_counter = 0
request = _RequestObject(http_constants.ResourceType.Document, documents._OperationType.Read)
endpointDiscovery_retry_policy = endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request)
endpointDiscovery_retry_policy.ShouldRetry(errors.HTTPFailure(http_constants.StatusCodes.FORBIDDEN))
self.assertEqual(self._read_counter, 0)
self.assertEqual(self._write_counter, 0)
self._read_counter = 0
self._write_counter = 0
request = _RequestObject(http_constants.ResourceType.Document, documents._OperationType.Create)
endpointDiscovery_retry_policy = endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request)
endpointDiscovery_retry_policy.ShouldRetry(errors.HTTPFailure(http_constants.StatusCodes.FORBIDDEN))
self.assertEqual(self._read_counter, 0)
self.assertEqual(self._write_counter, 0)
endpoint_manager.mark_endpoint_unavailable_for_read = self.original_mark_endpoint_unavailable_for_read_function
endpoint_manager.mark_endpoint_unavailable_for_write = self.original_mark_endpoint_unavailable_for_write_function
cosmos_client.CosmosClient.GetDatabaseAccount = self.original_get_database_account
def _mock_mark_endpoint_unavailable_for_read(self, endpoint):
self._read_counter += 1
self.original_mark_endpoint_unavailable_for_read_function(endpoint)
def _mock_mark_endpoint_unavailable_for_write(self, endpoint):
self._write_counter += 1
self.original_mark_endpoint_unavailable_for_write_function(endpoint)
def _mock_resolve_service_endpoint(self, request):
return None
|
{
"content_hash": "994ec157b44c67c05f2c685035fa0ac3",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 159,
"avg_line_length": 57.42962962962963,
"alnum_prop": 0.7190764865213466,
"repo_name": "Azure/azure-documentdb-python",
"id": "65b1c45fff86618a4a2808227af7b6d7a99d2347",
"size": "7753",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/streaming_failover_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "663705"
}
],
"symlink_target": ""
}
|
import argparse
import collections
import time
import ipaddr
from maas_common import get_auth_ref
from maas_common import get_keystone_client
from maas_common import get_nova_client
from maas_common import metric
from maas_common import metric_bool
from maas_common import print_output
from maas_common import status_err
from maas_common import status_ok
from novaclient.client import exceptions as exc
SERVER_STATUSES = ['ACTIVE', 'STOPPED', 'ERROR']
def check(auth_ref, args):
keystone = get_keystone_client(auth_ref)
tenant_id = keystone.tenant_id
compute_endpoint = (
'{protocol}://{ip}:{port}/v2.1/{tenant_id}'.format(
ip=args.ip,
tenant_id=tenant_id,
protocol=args.protocol,
port=args.port
)
)
try:
if args.ip:
nova = get_nova_client(bypass_url=compute_endpoint)
else:
nova = get_nova_client()
is_up = True
except exc.ClientException:
is_up = False
metric_bool('client_success', False, m_name='maas_nova')
# Any other exception presumably isn't an API error
except Exception as e:
metric_bool('client_success', False, m_name='maas_nova')
status_err(str(e), m_name='maas_nova')
else:
metric_bool('client_success', True, m_name='maas_nova')
# time something arbitrary
start = time.time()
nova.services.list()
end = time.time()
milliseconds = (end - start) * 1000
servers = nova.servers.list(search_opts={'all_tenants': 1})
# gather some metrics
status_count = collections.Counter([s.status for s in servers])
status_ok(m_name='maas_nova')
metric_bool('nova_api_local_status', is_up, m_name='maas_nova')
# only want to send other metrics if api is up
if is_up:
metric('nova_api_local_response_time',
'double',
'%.3f' % milliseconds,
'ms')
for status in SERVER_STATUSES:
metric('nova_instances_in_state_%s' % status,
'uint32',
status_count[status], 'instances')
def main(args):
auth_ref = get_auth_ref()
check(auth_ref, args)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Check Nova API against local or remote address')
parser.add_argument('ip', nargs='?',
type=ipaddr.IPv4Address,
help='Optional Nova API server address')
parser.add_argument('--telegraf-output',
action='store_true',
default=False,
help='Set the output format to telegraf')
parser.add_argument('--port',
default='8774',
help='Port for the nova API service')
parser.add_argument('--protocol',
default='http',
help='Protocol used to contact the nova API service')
args = parser.parse_args()
with print_output(print_telegraf=args.telegraf_output):
main(args)
|
{
"content_hash": "c54388d688e5e894f96e199948434b9a",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 77,
"avg_line_length": 32.78947368421053,
"alnum_prop": 0.5861958266452648,
"repo_name": "cfarquhar/rpc-maas",
"id": "79e9f4105c58c3257d0f392aa5e03ba971860d61",
"size": "3722",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "playbooks/files/rax-maas/plugins/nova_api_local_check.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "36"
},
{
"name": "Python",
"bytes": "335336"
},
{
"name": "Shell",
"bytes": "26493"
}
],
"symlink_target": ""
}
|
"""
2016-10-21:
Modified version of balbuzard application for AL, original code found here:
https://github.com/decalage2/balbuzard
"""
"""
balbuzard - v0.20 2014-06-29 Philippe Lagadec
Balbuzard is a tool to quickly extract patterns from suspicious files for
malware analysis (IP addresses, domain names, known file headers and strings,
etc).
For more info and updates: http://www.decalage.info/balbuzard
"""
# LICENSE:
#
# balbuzard is copyright (c) 2007-2014, Philippe Lagadec (http://www.decalage.info)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__version__ = '0.20'
#------------------------------------------------------------------------------
# CHANGELOG:
# 2007-07-11 v0.01 PL: - 1st version
# 2007-07-30 v0.02 PL: - added list of patterns
# 2007-07-31 v0.03 PL: - added patterns
# - added hexadecimal dump
# 2007-08-09 v0.04 PL: - improved some regexs, added Petite detection
# 2008-06-06 v0.05 PL: - escape non-printable characters with '\xNN' when
# displaying matches
# - optional custom pattern list in reScan_custom.py
# - optional call to magic.py to guess filetype
# 2011-05-06 v0.06 PL: - added bruteforce functions
# 2013-02-24 v0.07 PL: - renamed rescan to balbuzard
# - changed license from CeCILL v2 to BSD
# - added patterns for URL, e-mail, Flash
# - new Pattern class to add patterns
# - pattern can now be a regex or a string, with weigth
# - moved bruteforce functions to balbucrack
# 2013-03-18 v0.08 PL: - a few more/improved patterns
# - optionparser with option -s for short display
# 2013-03-21 v0.09 PL: - open file from password-protected zip (inspired from
# Didier Steven's pdfid, thanks Didier! :-)
# - improved plugin system
# 2013-03-26 v0.10 PL: - improved Pattern and Pattern_re classes
# 2013-07-31 v0.11 PL: - added support for Yara plugins
# 2013-08-28 v0.12 PL: - plugins can now be in subfolders
# - improved OLE2 pattern
# 2013-12-03 v0.13 PL: - moved patterns to separate file patterns.py
# - fixed issue when balbuzard launched from another dir
# - added CSV output
# 2013-12-04 v0.14 PL: - can now scan several files from command line args
# - now short display is default, -v for hex view
# 2013-12-09 v0.15 PL: - Pattern_re: added filter function to ignore false
# positives
# 2014-01-14 v0.16 PL: - added riglob, ziglob
# - new option -r to find files recursively in subdirs
# - new option -f to find files within zips with wildcards
# 2014-01-23 v0.17 PL: - Pattern: added partial support for filter function
# 2014-02-24 v0.18 PL: - fixed bug with main_dir when balbuzard is imported
# 2014-03-21 v0.19 PL: - fixed bug when Yara-python is not installed
# 2014-06-29 v0.20 PL: - simplified bbcrack transforms, added Yara signatures
#------------------------------------------------------------------------------
# TODO:
# + add yara plugins support to Balbuzard.count and scan_profiling
# + merge Balbuzard.scan_hexdump and short
# + option to choose which plugins to load: all (default), none, python or yara
# only
# + option to use the Yara-python engine for searching (translating balbuzard
# patterns to yara at runtime)
# - Yara plugins: keep track of the filename containing each set of Yara rules
# - option to support Unicode strings? (need to check 2 alignments and 2 byte
# orders, or simply insert \x00 between all chars, e.g. 'T\x00E\x00S\x00T')
# + improve patterns to avoid some false positives: maybe use pefile or magic.py ?
# - HTML report with color highlighting
# - GUI ?
# - optional use of other magic libs (TrIDscan, pymagic, python-magic, etc: see PyPI)
# - provide samples
# - RTF hex object decoder?
# - option to decode stream before searching: unicode, hex, base64, etc
# - options for XML outputs
# - export to OpenIOC?
# ? zip file: open all files instead of only the 1st one, or add an option to
# specify the filename(s) to open within the zip, with wildcards?
# ISSUES:
# - BUG: it seems that re ignores null bytes in patterns, despite what the doc says?
# - BUG: the URL pattern is not fully correct, need to find a better one
# - BUG: the e-mail pattern catches a lot of false positives.
#--- IMPORTS ------------------------------------------------------------------
import sys, re, os, os.path, optparse, glob, zipfile, time, string, fnmatch, imp
#import csv
# try to import yara-python:
# try:
# import yara
# YARA = True
# except:
# YARA = False
#--- CLASSES ------------------------------------------------------------------
class Pattern (object):
"""
a Pattern object is a string or a list of strings to be searched in data.
Attributes:
- name: str, description of the pattern for display
- pat: str or list/tuple of strings to be searched
- nocase: bool, if True, search is case-insensitive
- single: bool, if True search will stop at the first occurence
- weight: int, weight used by balbucrack
- filt: function to filter out false positives, should be a function
with arguments (value, index, pattern), returning True when acceptable
or False when it is a false positive.
"""
def __init__(self, name, pat=None, nocase=False, single=False, weight=1,
filt=None):
self.name = name
# self.pat should always be a list of strings:
if isinstance(pat, str):
self.pat = [pat]
else:
# else we assume it's a sequence:
self.pat = pat
self.nocase = nocase
if nocase:
# transform pat to lowercase
self.pat_lower = map(string.lower, self.pat)
self.single = single
self.weight = weight
# for profiling:
self.total_time = 0
self.filter = filt
def find_all (self, data, data_lower=None):
"""
find all occurences of pattern in data.
data_lower should be set to data.lower(), if there are case-insensitive
patterns (it's better to do it only once)
return a list of tuples (index, string)
"""
found = []
if self.nocase:
d = data_lower
pat = self.pat_lower
else:
d = data
pat = self.pat
for s in pat:
l = len(s)
for i in str_find_all(d, s):
# the matched string is not always s, case can differ:
match = data[i:i+len(s)]
valid = True
if self.filter is not None:
valid = self.filter(value=match, index=i, pattern=self)
if valid: found.append((i, match))
# debug message:
else: print 'Filtered out %s: %s' % (self.name, repr(match))
return found
def count (self, data, data_lower=None):
"""
count all occurences of pattern in data.
Except for those with single=True, only the first occurence of any
string is counted.
data_lower should be set to data.lower(), if there are case-insensitive
patterns (it's better to do it only once)
return an integer
"""
#TODO: add support for filter? (will be much slower...)
count = 0
if self.nocase:
d = data_lower
pat = self.pat_lower
else:
d = data
pat = self.pat
if not self.single:
for s in pat:
count += d.count(s)
return count
else:
for s in pat:
if s in d:
return 1
return 0
class Pattern_re (Pattern):
"""
a Pattern_re object is a regular expression to be searched in data.
Attributes:
- name: str, description of the pattern for display
- pat: str, regular expression to be searched
- trigger: str or list/tuple of strings to be searched before pat
- nocase: bool, if True, search is case-insensitive
- single: bool, if True search will stop at the first occurence
- weight: int, weight used by balbucrack
- filt: function to filter out false positives, should be a function
with arguments (value, index, pattern), returning True when acceptable
or False when it is a false positive.
"""
def __init__(self, name, pat=None, trigger=None, nocase=False, single=False,
weight=1, filt=None):
# first call the Pattern constructor:
Pattern.__init__(self, name, pat, nocase, single, weight)
# compile regex
flags = 0
if nocase:
flags = re.IGNORECASE
self.pat = re.compile(pat, flags)
self.trigger = trigger
if trigger is not None:
# create second pattern for trigger, for single search:
self.trigger_pat = Pattern(name, pat=trigger, nocase=nocase, single=True)
self.filter = filt
#print 'pattern %s: filter=%s' % (self.name, self.filter)
def find_all (self, data, data_lower=None):
"""
find all occurences of pattern in data.
data_lower should be set to data.lower(), if there are case-insensitive
patterns (it's better to do it only once)
return a list of tuples (index, string)
"""
found = []
if self.trigger is not None:
# when trigger is specified, search trigger first and stop if not
# found:
if self.trigger_pat.count(data, data_lower) == 0:
return found
for m in self.pat.finditer(data):
valid = True
if self.filter is not None:
valid = self.filter(value=m.group(), index=m.start(), pattern=self)
if valid: found.append((m.start(), m.group()))
# debug message:
#else: print 'Filtered out %s: %s' % (self.name, repr(m.group()))
return found
def count (self, data, data_lower=None):
"""
count all occurences of pattern in data.
data_lower should be set to data.lower(), if there are case-insensitive
patterns (it's better to do it only once)
return an integer
"""
if self.trigger is not None:
# when trigger is specified, search trigger first and stop if not
# found:
if self.trigger_pat.count(data, data_lower) == 0:
return 0
# when no filter is defined, quickest way to count:
if self.filter is None:
return len(self.pat.findall(data))
# otherwise, need to call filter for each match:
c = 0
for m in self.pat.finditer(data):
valid = self.filter(value=m.group(), index=m.start(), pattern=self)
if valid: c += 1
return c
#------------------------------------------------------------------------------
class Balbuzard (object):
"""
class to scan a string of data, searching for a set of patterns (strings
and regular expressions)
"""
def __init__(self, patterns=None, yara_rules=None):
self.patterns = patterns
if patterns == None:
self.patterns = []
# self.yara_rules = yara_rules
## def add_pattern(self, name, regex=None, string=None, weight=1):
## self.patterns.append(Pattern(name, regex, string, weight))
def list_patterns(self):
"""
Adding function for FrankenStrings to get regex patterns when needed
"""
return self.patterns
def scan (self, data):
"""
Scans data for all patterns. This is an iterator: for each pattern
found, yields the Pattern object and a list of matches as tuples
(index in data, matched string).
"""
# prep lowercase version of data for case-insensitive patterns
data_lower = data.lower()
for pattern in self.patterns:
matches = pattern.find_all(data, data_lower)
if len(matches)>0:
yield pattern, matches
# if YARA and self.yara_rules is not None:
# for rules in self.yara_rules:
# yara_matches = rules.match(data=data)
# for match in yara_matches:
# # create a fake pattern object, with a single match:
# pattern = Pattern(match.rule)
# matches = []
# for s in match.strings:
# offset, id, d = s
# matches.append((offset, d))
# yield pattern, matches
def scan_profiling (self, data):
"""
Scans data for all patterns. This is an iterator: for each pattern
found, yields the Pattern object and a list of matches as tuples
(index in data, matched string).
Version with profiling, to check which patterns take time.
"""
start = time.clock()
# prep lowercase version of data for case-insensitive patterns
data_lower = data.lower()
for pattern in self.patterns:
start_pattern = time.clock()
matches = pattern.find_all(data, data_lower)
pattern.time = time.clock()-start_pattern
pattern.total_time += pattern.time
if len(matches)>0:
yield pattern, matches
self.time = time.clock()-start
def count (self, data):
"""
Scans data for all patterns. This is an iterator: for each pattern
found, yields the Pattern object and the count as int.
"""
# prep lowercase version of data for case-insensitive patterns
data_lower = data.lower()
for pattern in self.patterns:
count = pattern.count(data, data_lower)
if count:
yield pattern, count
def scan_display (self, data, filename, hexdump=False, csv_writer=None):
"""
Scans data for all patterns, displaying an hexadecimal dump for each
match on the console (if hexdump=True), or one line for each
match (if hexdump=False).
"""
for pattern, matches in self.scan(data):
if hexdump:
print "-"*79
print "%s:" % pattern.name
for index, match in matches:
# limit matched string display to 50 chars:
m = repr(match)
if len(m)> 50:
m = m[:24]+'...'+m[-23:]
if hexdump:
print "at %08X: %s" % (index, m)
# 5 lines of hexadecimal dump around the pattern: 2 lines = 32 bytes
start = max(index-32, 0) & 0xFFFFFFF0
index_end = index + len(match)
end = min(index_end+32+15, len(data)) & 0xFFFFFFF0
length = end-start
#print start, end, length
print hexdump3(data[start:end], length=16, startindex=start)
print ""
else:
print "at %08X: %s - %s" % (index, pattern.name, m)
if csv_writer is not None:
#['Filename', 'Index', 'Pattern name', 'Found string', 'Length']
csv_writer.writerow([filename, '0x%08X' % index, pattern.name,
m, len(match)])
# blank line between each file:
print ''
## if item == "EXE MZ headers" and MAGIC:
## # Check if it's really a EXE header
## print "Magic: %s\n" % magic.whatis(data[m.start():])
#--- GLOBALS ------------------------------------------------------------------
patterns = []
#--- FUNCTIONS ----------------------------------------------------------------
##def add_pattern(name, regex=None, string=None, weight=1):
## patterns.append(Pattern(name, regex, string, weight))
# HEXDUMP from http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/142812
FILTER=''.join([(len(repr(chr(x)))==3) and chr(x) or '.' for x in range(256)])
##def hexdump(src, length=8):
## N=0; result=''
## while src:
## s,src = src[:length],src[length:]
## hexa = ' '.join(["%02X"%ord(x) for x in s])
## s = s.translate(FILTER)
## result += "%04X %-*s %s\n" % (N, length*3, hexa, s)
## N+=length
## return result
##
##def hexdump2(src, length=8):
## result=[]
## for i in xrange(0, len(src), length):
## s = src[i:i+length]
## hexa = ' '.join(["%02X"%ord(x) for x in s])
## printable = s.translate(FILTER)
## result.append("%04X %-*s %s\n" % (i, length*3, hexa, printable))
## return ''.join(result)
# my improved hexdump, to add a start index:
def hexdump3(src, length=8, startindex=0):
"""
Returns a hexadecimal dump of a binary string.
length: number of bytes per row.
startindex: index of 1st byte.
"""
result=[]
for i in xrange(0, len(src), length):
s = src[i:i+length]
hexa = ' '.join(["%02X"%ord(x) for x in s])
printable = s.translate(FILTER)
result.append("%04X %-*s %s\n" % (i+startindex, length*3, hexa, printable))
return ''.join(result)
def str_find_all(a_str, sub):
start = 0
while True:
start = a_str.find(sub, start)
if start == -1: return
yield start
start += len(sub)
# recursive glob function to find plugin files in any subfolder:
# inspired by http://stackoverflow.com/questions/14798220/how-can-i-search-sub-folders-using-glob-glob-module-in-python
def rglob (path, pattern='*.*'):
"""
Recursive glob:
similar to glob.glob, but finds files recursively in all subfolders of path.
path: root directory where to search files
pattern: pattern for filenames, using wildcards, e.g. *.txt
"""
#TODO: more compatible API with glob: use single param, split path from pattern
return [os.path.join(dirpath, f)
for dirpath, dirnames, files in os.walk(path)
for f in fnmatch.filter(files, pattern)]
def riglob (pathname):
"""
Recursive iglob:
similar to glob.iglob, but finds files recursively in all subfolders of path.
pathname: root directory where to search files followed by pattern for
filenames, using wildcards, e.g. *.txt
"""
path, filespec = os.path.split(pathname)
for dirpath, dirnames, files in os.walk(path):
for f in fnmatch.filter(files, filespec):
yield os.path.join(dirpath, f)
def ziglob (zipfileobj, pathname):
"""
iglob in a zip:
similar to glob.iglob, but finds files within a zip archive.
- zipfileobj: zipfile.ZipFile object
- pathname: root directory where to search files followed by pattern for
filenames, using wildcards, e.g. *.txt
"""
files = zipfileobj.namelist()
for f in files: print f
for f in fnmatch.filter(files, pathname):
yield f
def iter_files(files, recursive=False, zip_password=None, zip_fname='*'):
"""
Open each file provided as argument:
- files is a list of arguments
- if zip_password is None, each file is opened and read as-is. Wilcards are
supported.
- if not, then each file is opened as a zip archive with the provided password
- then files matching zip_fname are opened from the zip archive
Iterator: yields (filename, data) for each file
"""
# choose recursive or non-recursive iglob:
if recursive:
iglob = riglob
else:
iglob = glob.iglob
for filespec in files:
for filename in iglob(filespec):
if zip_password is not None:
# Each file is a zip archive:
print 'Opening zip archive %s with provided password' % filename
z = zipfile.ZipFile(filename, 'r')
print 'Looking for file(s) matching "%s"' % zip_fname
for filename in ziglob(z, zip_fname):
print 'Opening file in zip archive:', filename
data = z.read(filename, zip_password)
yield filename, data
else:
# normal file
print 'Opening file', filename
data = open(filename, 'rb').read()
yield filename, data
def relpath(path, start='.'):
"""
convert a path to a relative path, using os.path.relpath on Python 2.6+
On Python 2.5 or older, the path is not changed, but no exception is raised.
(this function is just for backward compatibility)
"""
# with python 2.6+, make it a relative path:
try:
return os.path.relpath(path, start)
except:
return path
#=== INITALIZATION ============================================================
# get main directory where this script is located:
main_dir = os.path.dirname(__file__)
#print 'main dir:', main_dir
#plugins_dir = os.path.join(main_dir, 'plugins')
#print 'plugins dir:', plugins_dir
# load patterns
patfile = os.path.join(main_dir, 'patterns.py')
# save __doc__, else it seems to be overwritten:
d = __doc__
#print 'patfile:', patfile
execfile(patfile)
__doc__ = d
del d
#=== MAIN =====================================================================
if __name__ == '__main__':
usage = 'usage: %prog [options] <filename> [filename2 ...]'
parser = optparse.OptionParser(usage=usage)
## parser.add_option('-o', '--outfile', dest='outfile',
## help='output file')
parser.add_option('-c', '--csv', dest='csv',
help='export results to a CSV file')
parser.add_option("-v", action="store_true", dest="verbose",
help='verbose display, with hex view.')
parser.add_option("-r", action="store_true", dest="recursive",
help='find files recursively in subdirectories.')
parser.add_option("-z", "--zip", dest='zip_password', type='str', default=None,
help='if the file is a zip archive, open first file from it, using the provided password (requires Python 2.6+)')
parser.add_option("-f", "--zipfname", dest='zip_fname', type='str', default='*',
help='if the file is a zip archive, file(s) to be opened within the zip. Wildcards * and ? are supported. (default:*)')
(options, args) = parser.parse_args()
# Print help if no argurments are passed
if len(args) == 0:
print __doc__
parser.print_help()
sys.exit()
# load plugins
#for f in rglob(plugins_dir, 'bbz*.py'): # glob.iglob('plugins/bbz*.py'):
# print 'Loading plugin from', relpath(f, plugins_dir)
# execfile(f)
# load yara plugins
# if YARA:
# yara_rules = []
# for f in rglob(plugins_dir, '*.yara'): #glob.iglob('plugins/*.yara'): # or bbz*.yara?
# print 'Loading yara plugin from', relpath(f, plugins_dir)
# yara_rules.append(yara.compile(f))
# else:
# yara_rules = None
# open CSV file
# if options.csv:
# print 'Writing output to CSV file: %s' % options.csv
# csvfile = open(options.csv, 'wb')
# csv_writer = csv.writer(csvfile)
# csv_writer.writerow(['Filename', 'Index', 'Pattern name',
# 'Found string', 'Length'])
# else:
# csv_writer = None
#
# # close CSV file
# if options.csv:
# csvfile.close()
# This was coded while listening to The National "Boxer".
|
{
"content_hash": "b165bb9db5da95c64434bf0c1088c6fe",
"timestamp": "",
"source": "github",
"line_count": 639,
"max_line_length": 127,
"avg_line_length": 39.06103286384977,
"alnum_prop": 0.5842548076923076,
"repo_name": "maliceio/malice-pdf",
"id": "65e29ad60f0ac8684e68d336be112864867c0222",
"size": "24984",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pdfparser/balbuzard/balbuzard.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "4713"
},
{
"name": "Makefile",
"bytes": "442"
},
{
"name": "Python",
"bytes": "108816"
}
],
"symlink_target": ""
}
|
from __future__ import division
import collections
import copy
import time
import os
import sys
from heapq import heappush, heappop
from itertools import count
import pandas as pd
import numpy as np
import networkx as nx
import geopandas as gp
from matplotlib import pyplot as plt
import matplotlib.colors as colors
from matplotlib.pylab import *
from shapely.geometry import LineString, Point, Polygon, MultiPolygon
from scipy.stats.stats import pearsonr
from scipy.stats import spearmanr, ks_2samp
from astropy.stats import median_absolute_deviation
from numpy import std
import ema_workbench
try:
from transport_network_modeling.od_prep import od_aggregation
except:
from od_prep import od_aggregation
__all__ = ['aon_assignment',
'probit_assignment',
'edge_betweenness_centrality',
'edge_betweenness_subset_od',
'betweenness_to_df',
'edge_betweenness_subset_od_ema',
'ema_betweenness',
'k_shortest_paths',
'ksp_edge_betweenness_subset_od',
'sp_dict_graph_creation',
'interdiction_single_edge',
'min_edge_cut',
'interdiction_m1',
'interdiction_m2',
'interdiction_m6',
'interdiction_user_exposure',
'interdiction_m3_02',
'metric_m5_01',
'correlate_metrics_pearson',
'correlate_metrics_spearman',
'correlate_metrics_ks']
def aon_assignment(G, sources, targets, weight, od):
'''
Function to do All-or-Nothing assignment on transport network
Parameters
------------
G: Graph
Transport network Graph Networkx object that will be analyzed
sources: list
List of nodes (integer) that will be used as sources. The integer should correspond to
node id in G Graph
targets: list
List of nodes (integer) that will be used as targets. The integer should correspond to
node id in G Graph
weight: str
String which corresponds to attribute of G Graph's edges that will be used as penalty for each
edge. In most cases this is defined as 'length' of the edge.
od: DataFrame
OD matrix dataframe
Returns
------------
d: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and flow value as values
'''
#create empty dict
d = collections.defaultdict(float)
#iterate over all sources
for i in range(len(sources)):
source = sources[i]
#iterate over all edges
for j in range(len(targets)):
target = targets[j]
#it is assumed that there is no self-loop on the node
#e.g. there is no flow from node A to node A
if source != target :
#determine shortest path between the OD pair
sp_dijk_all = nx.dijkstra_path(G, source=source, target=target, weight=weight)
#update the betweenness value of all edges in the shortest path
flow = od[source][target]
for j in range(len(sp_dijk_all)-1):
lst = [sp_dijk_all[j],sp_dijk_all[j+1]]
lst = [min(lst), max(lst)]
tup = tuple(lst)
d[tup] += flow
#if tup in d.keys():
# d[tup]+=1*flow
#else:
# d.update({tup:1*flow})
#assign 0 to all edges which don't belong to any shortest path
#at the same time, record all the correct order of edges name
edges_list = []
for u,v in G.edges():
elst = [u,v]
elst = [min(elst), max(elst)]
etup = tuple(elst)
if not etup in d.keys():
d.update({etup:0})
tup = tuple([u,v])
edges_list.append(tup)
#alter the tuple(u,v) to tuple(v,u) if the order is inconsistent with the original graph's order
d1 = {}
for key, val in d.items():
if not key in edges_list:
tup = tuple([key[1], key[0]])
d1.update({tup:val})
else:
d1.update({key:val})
return d1
def probit_assignment(G, sources, targets, weight, od, N=5, sd=10, penalty=0):
'''
Function to do stochastic probit assignment on transport network. The weight of the transport network
is sampled by normal distribution with the original link weight as the mean.
Parameters
------------
G: Graph
Transport network Graph Networkx object that will be analyzed
sources: list
List of nodes (integer) that will be used as sources. The integer should correspond to
node id in G Graph
targets: list
List of nodes (integer) that will be used as targets. The integer should correspond to
node id in G Graph
weight: str
String which corresponds to attribute of G Graph's edges that will be used as penalty for each
edge. In most cases this is defined as 'length' of the edge.
od: DataFrame
OD matrix dataframe
N: int
Number of probit iterations that want to be performed
sd: int
Percentage of the link's weight that will be used as standard deviation of the normal distribution (e.g.
if 10 is inputted, then the standard deviation is 10% of the link's weight). If you don't want to sample
over the normal distribution, set sd to 0.
penalty: double
Penalty that is given to links which have been part of shortest paths set. If set, the value should be higher
than 1. The intention is to force the code to find distinguished shortest paths between each probit iteration
by increasing the weight of links that have been part of shortest paths in previous iterations.
Returns
------------
d: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and flow value as values
'''
#create empty dict
d = collections.defaultdict(float)
#create copy of original network to avoid changing the attributes of the original network
G1 = G.copy()
#iterate N times
#in each iteration, sample the link's weight using a normal distribution
for i in np.arange(N):
length_dict = {}
for u, v, data in G1.edges(data=True):
tup = tuple([u,v])
if sd > 0:
length_mean = data[weight]
stdev = sd/100
length_sd = length_mean * stdev
try:
length = np.random.normal(length_mean, length_sd)
except:
length = length_mean
if length < 0:
length = 0
else:
length = data[weight]
length_dict[tup] = length
#create a copy of G1 since we want to work the penalty on G1 later
G2 = G1.copy()
#set the attribute of G2, we'll work the assignment based on G2's weight information
nx.set_edge_attributes(G2, name=weight, values=length_dict)
#iterate over all sources
penalty_set = set()
for i, source in enumerate(sources):
#iterate over all edges
for j, target in enumerate(targets):
#it is assumed that there is no self-loop on the node
#e.g. there is no link from node A to node A
if source == target:
continue
#determine shortest path between the OD pair
sp_dijk_all = nx.dijkstra_path(G2, source=source, target=target,
weight=weight)
#update the betweenness value of all edges in the shortest path
flow = od.iloc[source, target] #change iloc instead of ix
#divide the flow over the number of iteration
flow = flow/N
for j in range(len(sp_dijk_all)-1):
lst = [sp_dijk_all[j], sp_dijk_all[j+1]]
lst = [min(lst), max(lst)]
tup = tuple(lst)
d[tup] += flow
#if we want to work with penalty, record the shortest paths
if penalty:
penalty_set.add(tup)
tup = tup[::-1]
penalty_set.add(tup)
#if work with penalty, update the weight of the links which belong to the shortest paths
if penalty:
for uv in penalty_set:
G1[uv[0]][uv[1]][weight] *= penalty
penalty_dict = {}
#for u, v,data in G1.edges(data=True):
# if tuple([u,v]) in penalty_set:
# length = data[weight] * penalty
# else:
# length = data[weight]
# penalty_dict[tuple([u,v])] = length
#nx.set_edge_attributes(G1, name=weight, values=penalty_dict)
#assign 0 to all edges which don't belong to any shortest path
#at the same time, record all the correct order of edges name
edges_set = set()
for u,v in G.edges():
elst = [u,v]
elst = [min(elst), max(elst)]
etup = tuple(elst)
if not etup in d:
d[etup] = 0.0
tup = tuple([u,v])
edges_set.add(tup)
#alter the tuple(u,v) to tuple(v,u) if the order is inconsistent with the original graph's order
d1 = {}
for key, val in d.items():
if not key in edges_set:
tup = tuple([key[1], key[0]])
d1[tup] = val
else:
d1[key] = val
return d1
def correlate_metric_spearman(df, m_a, m_b):
df2 = df[df[m_a] + df[m_b] != 0]
r, p = spearmanr(df2[m_a], df2[m_b])
return r, p, len(df2)
def edge_betweenness_centrality(flow, od):
'''
Function to do stochastic probit assignment on transport network. The weight of the transport network
is sampled by normal distribution with the original link weight as the mean
Parameters
------------
flow: dict
Flow dictionary obtained from assignment function (e.g. from aon_assignment or probit_assignment)
od: DataFrame
OD matrix dataframe
Returns
------------
d: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and betweenness value as values
'''
#record the total flow in the network
totalval = (sum(od.sum()))
#copy the flow to avoid changing the original flow dictionary
flow2 = flow.copy()
#normalize the flow
for key, val in flow2.items():
flow2[key] = val / totalval
return flow2
def edge_betweenness_subset_od(G, sources, targets, weight, od):
'''
Old function before betweenness centrality and flow assignment were separated.
Calculating edge betweenness centrality between only subset of nodes in the network (e.g. between districts)
Parameters
------------
G: Graph
Transport network Graph Networkx object that will be analyzed
sources: list
List of nodes (integer) that will be used as sources. The integer should correspond to
node id in G Graph
targets: list
List of nodes (integer) that will be used as targets. The integer should correspond to
node id in G Graph
weight: str
String which corresponds to attribute of G Graph's edges that will be used as penalty for each
edge. In most cases this is defined as 'length' of the edge.
od: DataFrame
OD matrix dataframe
Returns
------------
d: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and betweenness value as values
'''
#create empty dict
d={}
#iterate over all sources
for i in range(len(sources)):
source = sources[i]
#iterate over all edges
for j in range(len(targets)):
target = targets[j]
#it is assumed that there is no self-loop on the node
#e.g. there is no flow from node A to node A
if source != target :
#determine shortest path between the OD pair
sp_dijk_all = nx.dijkstra_path(G, source=source, target=target, weight=weight)
#update the betweenness value of all edges in the shortest path
flow = od[source][target]
for j in range(len(sp_dijk_all)-1):
lst = [sp_dijk_all[j],sp_dijk_all[j+1]]
lst = [min(lst), max(lst)]
tup = tuple(lst)
if tup in d.keys():
d[tup]+=1*flow
else:
d.update({tup:1*flow})
#normalize the betweenness value
totalval = (sum(od.sum()))
for key, val in d.items():
d[key] = val / totalval
#assign 0 to all edges which don't belong to any shortest path
for u,v in G.edges():
elst = [u,v]
elst = [min(elst), max(elst)]
etup = tuple(elst)
if not etup in d.keys():
d.update({etup:0})
return d
def betweenness_to_df(gdf,betweenness,betweenness_string):
'''
Append betweenness centrality result to the transport network's GeoDataFrame.
For visualization purpose later.
Parameters
------------
gdf: GeoDataFrame
GeoDataFrame (Linestring) of the original transport network
betweenness: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and betweenness value as values
betweenness_string: str
String of betweenness dictionary's object name
Returns
------------
gdf_final: GeoDataFrame
Updated gdf with additional column of betweenness centrality
betweenness_df: DataFrame
Betweenness dictionary transformed into dataframe
'''
betweenness_df = pd.DataFrame(list(betweenness.items()), columns=['FromTo_tuple', betweenness_string])
FromTo_tuple = betweenness_df['FromTo_tuple'].tolist()
FromTo_tolist = []
for i in FromTo_tuple:
odlist = list(i)
minval = min(odlist)
maxval = max(odlist)
val = str(minval) + str(maxval)
FromTo_tolist.append(val)
betweenness_df['FromTo'] = FromTo_tolist
c = []
for i in range(len(gdf)):
minval = min([gdf['TNODE_'][i],gdf['FNODE_'][i]])
maxval = max([gdf['TNODE_'][i],gdf['FNODE_'][i]])
val = str(minval) + str(maxval)
c.append(val)
gdf['FromTo'] = c
gdf_final = pd.merge(gdf,betweenness_df,on='FromTo',how='outer')
del gdf_final['FromTo_tuple']
return gdf_final, betweenness_df
def _shortest_path_record(G, sources, targets, weight):
'''
Record links in shortest pats of all od pairs
'''
d={}
for i in range(len(sources)):
source = sources[i]
for j in range(len(targets)):
target = targets[j]
if source != target :
sp_dijk_all = nx.dijkstra_path(G, source=source, target=target, weight=weight)
od_pair = str(source)+str(target)
d[od_pair] = (sp_dijk_all, source, target)
return d
def edge_betweenness_subset_od_ema(G, sp_dict, od):
d={}
for key, val in sp_dict.items():
source = val[1]
target = val[2]
sp = val[0]
flow = od[source][target]
for j in range(len(sp)-1):
lst = [sp[j],sp[j+1]]
lst = [min(lst), max(lst)]
tup = tuple(lst)
#the codes below take almost one minute
if tup in d.keys():
d[tup]+=1*flow
else:
d.update({tup:1*flow})
totalval = (sum(od.sum()))
for key, val in d.items():
d[key] = val / totalval
for u,v in G.edges():
elst = [u,v]
elst = [min(elst), max(elst)]
etup = tuple(elst)
if not etup in d.keys():
d.update({etup:0})
return d
def ema_betweenness(prod_lists, OD_all_dict, G, sp_dict, **factors_dict):
OD_final_df = od_aggregation(OD_all_dict, **factors_dict)
betweenness = edge_betweenness_subset_od_ema(G=G, sp_dict=sp_dict, od=OD_final_df)
new_d = {}
for key, val in betweenness.items():
new_key = str(key[0])+str(key[1])
new_d[new_key] = val
return new_d
def k_shortest_paths(G, source, target, k=1, weight='weight'):
#MAY NOT BE USED ANYMORE
if source == target:
return ([0], [[source]])
length, path = nx.single_source_dijkstra(G, source, target, weight=weight)
if target not in length:
raise nx.NetworkXNoPath("node %s not reachable from %s" % (source, target))
lengths = [length[target]]
paths = [path[target]]
c = count()
B = []
G_original = G.copy()
for i in range(1, k):
for j in range(len(paths[-1]) - 1):
spur_node = paths[-1][j]
root_path = paths[-1][:j + 1]
edges_removed = []
for c_path in paths:
if len(c_path) > j and root_path == c_path[:j + 1]:
u = c_path[j]
v = c_path[j + 1]
if G.has_edge(u, v):
edge_attr = G.edge[u][v]
G.remove_edge(u, v)
edges_removed.append((u, v, edge_attr))
for n in range(len(root_path) - 1):
node = root_path[n]
# out-edges
for u, v, edge_attr in G.edges(node, data=True):
G.remove_edge(u, v)
edges_removed.append((u, v, edge_attr))
if G.is_directed():
# in-edges
for u, v, edge_attr in G.in_edges_iter(node, data=True):
G.remove_edge(u, v)
edges_removed.append((u, v, edge_attr))
spur_path_length, spur_path = nx.single_source_dijkstra(G, spur_node, target, weight=weight)
if target in spur_path and spur_path[target]:
total_path = root_path[:-1] + spur_path[target]
total_path_length = _get_path_length(G_original, root_path, weight) + spur_path_length[target]
heappush(B, (total_path_length, next(c), total_path))
for e in edges_removed:
u, v, edge_attr = e
G.add_edge(u, v, **edge_attr)
if B:
(l, _, p) = heappop(B)
lengths.append(l)
paths.append(p)
else:
break
return (lengths, paths)
def _get_path_length(G, path, weight='weight'):
#MAY NOT BE USED ANYMORE
length = 0
if len(path) > 1:
for i in range(len(path) - 1):
u = path[i]
v = path[i + 1]
length += G.edge[u][v].get(weight, 1)
return length
def _total_cost_sp(G, sources, targets, weight, od, weighted=True):
'''
Input:
G : Graph Networkx object
sources, targets : List of nodes sources IDs and nodes targets IDs (e.g. the centroid nodes)
weight : Edge data key corresponding to the edge weight
od : OD matrix for calculating weighted total cost
weighted : if True, the total cost is weighted by the OD matrix
Output:
d : Dict with centroid pairs tuple as keys (e.g. (2,3) ) and total cost between them as values
'''
d={}
total_cost = 0
for i in range(len(sources)):
source = sources[i]
for j in range(len(targets)):
target = targets[j]
if source != target :
sp_dijk_distance = nx.dijkstra_path_length(G, source=source, target=target, weight=weight)
flow = od[source][target]
if weighted:
cost = sp_dijk_distance * flow
else:
cost = sp_dijk_distance
total_cost += cost
tup=tuple([source,target])
d.update({tup:cost})
return total_cost, d
def sp_dict_graph_creation(G, sources, targets, weight):
'''
Function to store shortest path of each OD pair into a dictionary
Parameters
------------
G: Graph
Transport network Graph Networkx object that will be analyzed
sources: list
List of nodes (integer) that will be used as sources. The integer should correspond to
node id in G Graph
targets: list
List of nodes (integer) that will be used as targets. The integer should correspond to
node id in G Graph
weight: str
String which corresponds to attribute of G Graph's edges that will be used as penalty for each
edge. In most cases this is defined as 'length' of the edge.
Returns
------------
sp_dict_graph: dict
Dictionary with OD pair nodes' id tuple as keys (e.g. (2,3) ) and links in shortest path as values
'''
sp_dict = _shortest_path_record(G=G, sources = sources, targets = targets, weight=weight)
edgelist = []
for edge in list(G.edges()):
edgelist.append(edge)
sp_dict_graph = {}
for key, val in sp_dict.items():
source = val[1]
target = val[2]
tup = tuple([source, target])
sp_dict_graph.update({tup:[]})
for j in range(len(val[0])-1):
test1 = tuple([val[0][j], val[0][j+1]])
test2 = tuple([val[0][j+1], val[0][j]])
if test1 in edgelist:
sp_dict_graph[tup].append(test1)
if test2 in edgelist:
sp_dict_graph[tup].append(test2)
return sp_dict_graph
def interdiction_single_edge(G2, od, weight, sp_dict_graph, sources, targets):
interdiction_costs = {}
interdiction_disconnected = {}
interdiction_unsatisfieddemand = {}
total_cost_base, od_cost_dict = _total_cost_sp(G=G2, sources=sources, targets=targets,
weight='length', od=od)
edges_odmap = collections.defaultdict(list)
for key, value in sp_dict_graph.items():
for entry in value:
edges_odmap[entry].append(key)
for edge, ods in sorted(list(edges_odmap.items()), key=lambda x:x[0]):
od_cost = od_cost_dict.copy()
G = G2.copy()
G.remove_edge(edge[0], edge[1])
disconnected = 0
unsatisfied_demand = 0
for source, target in ods:
demand = od.loc[source, target]
try:
distance = nx.dijkstra_path_length(G, source=source, target=target,
weight=weight)
except nx.NetworkXNoPath:
disconnected += 1
unsatisfied_demand += demand
else:
costs = distance * demand
od_cost[(source, target)] = costs
total_costs = sum(list(od_cost.values()))
cost_increase = (total_costs - total_cost_base)/total_cost_base
unsatisfied_demand = unsatisfied_demand/total_cost_base
if cost_increase < 0:
cost_increase = 0
interdiction_costs[edge] = cost_increase
interdiction_disconnected[edge] = disconnected
interdiction_unsatisfieddemand[edge] = unsatisfied_demand
# interdiction_costs = {'{}{}'.format(*sorted(k)):v for k,v in interdiction_costs.items()}
# disconnected = {'{}{}'.format(*sorted(k)):v for k,v in interdiction_disconnected.items()}
# unsatisfied_demand = {'{}{}'.format(*sorted(k)):v for k,v in interdiction_unsatisfieddemand.items()}
return interdiction_costs, interdiction_disconnected, interdiction_unsatisfieddemand
def ksp_edge_betweenness_subset_od(G, sources, targets, weight, od, k):
'''
MAY NOT BE USED ANYMORE
Input:
G : Graph Networkx object
sources, targets : List of nodes sources IDs and nodes targets IDs (e.g. the centroid nodes)
weight : Edge data key corresponding to the edge weight
Output:
d : Dict with edge tuple as keys (e.g. (2,3) ) and betweenness value as values
'''
d={}
number=0
for i in range(len(sources)):
source = sources[i]
for j in range(len(targets)):
target = targets[j]
if source != target :
#calculate k-shortest path
ksp = k_shortest_paths(G = G, source = source, target = target, k = k, weight = weight)
#store the length of the k-shortest paths
path_length = ksp[0]
path_length_set = set(path_length)
#store total flow between od pair
flow = od[source][target]
#calculate logit model for route choice
#firstly calculate the denominator
sum_exp = 0
for i in path_length_set:
exp_val = np.exp(-0.05*i)
sum_exp += exp_val
#secondly create list which contains probability of each route
probability = []
for i in path_length_set:
exp_val = np.exp(-0.05*i)
prob = exp_val/sum_exp
probability.append(prob)
#now append the flow*probability to each route
#iterate for each route
counter = 0
for path in path_length_set:
index = path_length.index(path)
sp = ksp[1][index]
for j in range(len(sp)-1):
lst = [sp[j],sp[j+1]]
lst = [min(lst), max(lst)]
tup = tuple(lst)
if tup in d.keys():
d[tup]+=1*flow*probability[counter]
else:
d.update({tup:1*flow*probability[counter]})
counter += 1
totalval = (sum(od.sum()))
for key, val in d.items():
d[key] = val / totalval
for u,v in G.edges():
elst = [u,v]
elst = [min(elst), max(elst)]
etup = tuple(elst)
if not etup in d.keys():
d.update({etup:0})
return d
def min_edge_cut(G, centroid_nodes):
'''
Function to calculate minimum edge cut criticality of all links in a transport network
Parameters
------------
G: Graph
Transport network Graph Networkx object that will be analyzed
centroid_nodes: list
List of nodes id (integer) that will be used as sources and targets. The integer should correspond to
node id in G Graph
Returns
------------
edgecut_dict: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and min edge cut criticality value as values
'''
# Save the links cut set for each OD pair
# Links cut set are set of links which if simultaneously removed will disconnect a specific OD pair
testcut = []
for i in range(len(centroid_nodes)):
for j in range(len(centroid_nodes)):
if i != j:
testcut2 = nx.minimum_edge_cut(G=G, s=centroid_nodes[i], t=centroid_nodes[j])
testcut += testcut2
# Create dictionary which stores the occurence of each edge in all cut sets
# Save the edge IDs as dictionary key
edgecut_dict = {}
for edge in list(G.edges()):
edgecut_dict.update({edge: 0})
#increment the value of the dictionary's key if an edge belongs in a cut set
for i in testcut:
try:
edgecut_dict[i] += 1
except:
new_tup = tuple([i[1], i[0]])
edgecut_dict[new_tup] += 1
return edgecut_dict
# Metrics M1: Change in unewighted daily accessibility and
# change in number of nodes accessible within daily reach
def _daily_accessibility(centroid, G, theta, weight='length', beta=0.5):
'''
Helper function for function interdiction_m1
return:
a : daily accessibility index
len(sp_length) : number of nodes accessible within daily travel threshold (theta)
'''
total_sp_length = 0
#calculate shortest path length to all other centroids
# for target in G.nodes(): #why need this?
sp_length = nx.single_source_dijkstra_path_length(G=G, source=centroid, cutoff=theta, weight=weight)
count_node = 0
for item in sp_length:
try:
total_sp_length += 1 / (item[1]**beta)
except:
pass
count_node += 1
#calculate the accessibility
try:
a = total_sp_length
except:
a = 0
return a, count_node
def _dict_daily_accessibility(centroids, G, theta, weight='length', beta=0.5):
'''
Helper function for function interdiction_m1
return:
a_dict : dictionary of daily accessibility, keyed by centroids id
a_n_dict : dictionary of number of nodes accessible within daily travel threshold, keyed by centroids id
'''
a_dict = {}
a_n_dict = {}
for centroid in centroids:
a, a_n = _daily_accessibility(centroid=centroid, G=G, theta=theta, weight=weight, beta=beta)
a_dict.update({centroid:a})
a_n_dict.update({centroid:a_n})
return a_dict, a_n_dict
def _sum_daily_accessibility(a_dict, a_n_dict):
'''
Helper function for function interdiction_m1
'''
sum_a = 0
for key, val in a_dict.items():
sum_a += val
sum_a_n = 0
for key, val in a_n_dict.items():
sum_a_n += val
return sum_a, sum_a_n
def _all_daily_sp_record(G, sources, cutoff, weight):
'''
Helper function for function interdiction_m1
return:
all_daily_sp_list : list of all sources' daily shortest paths (shortest paths to node which
are accessible on daily basis)
all_daily_sp_dict : dictionary of all shortest paths for each source, keyed by source id
'''
edgelist = []
for edge in list(G.edges()):
edgelist.append(edge)
all_daily_sp_list = []
all_daily_sp_dict = {}
for source in sources:
sp_dict = nx.single_source_dijkstra_path(G=G, source=source, cutoff=cutoff, weight=weight)
source_sp_list = []
for key, val in sp_dict.items():
for n in np.arange(0, len(val)-1, 1):
start = val[n]
end = val[n+1]
if tuple([start, end]) in edgelist:
all_daily_sp_list.append(tuple([start, end]))
source_sp_list.append(tuple([start, end]))
elif tuple([end, start]) in edgelist:
all_daily_sp_list.append(tuple([end, start]))
source_sp_list.append(tuple([end, start]))
else:
print('edge not found', start, end)
source_sp_list = list(set(source_sp_list))
all_daily_sp_dict.update({source:source_sp_list})
all_daily_sp_list = list(set(all_daily_sp_list))
return all_daily_sp_list, all_daily_sp_dict
def interdiction_m1(G2, centroids, theta, weight, beta=0.5):
'''
Function to calculate metric M1_01 (change in unweighted daily accessibility)
and metric M1_02 (change in number of nodes accessible within daily reach)
Parameters
------------
G2: Graph
Transport network Graph Networkx object that will be analyzed
centroids: list
List of nodes id (integer) that will be used as sources and targets. The integer should correspond to
node id in G Graph
theta: float
Threshold of daily travel distance (in kilometer)
weight: str
String which corresponds to attribute of G2 Graph's edges that will be used as penalty for each
edge. In most cases this is defined as 'length' of the edge
beta: float
Distance sensitivity parameter for accessibility calculation
Returns
------------
m1_01_dict: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and change in unweighted daily accessibility as values
m1_02_dict: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and change in number of nodes accessible within daily
reach as values
'''
ff=0
m1_01_dict = {}
m1_02_dict = {}
#record the daily accessibility of each centroid
a_dict_base, a_n_dict_base = _dict_daily_accessibility(centroids=centroids, G=G2, theta=theta, weight=weight, beta=beta)
#record the business as usual total accessibility
sum_a_base, sum_a_n_base = _sum_daily_accessibility(a_dict_base, a_n_dict_base)
#record the daily shortest paths list for each centroid
all_daily_sp_list, all_daily_sp_dict = _all_daily_sp_record(G=G2, sources=centroids, cutoff=theta, weight='length')
#iterate for all edges in daily shortest path list
print('start')
for edge in all_daily_sp_list:
ff += 1
if ff%50 == 0:
print(str(ff)+' edges have been interdicted')
u = edge[0]
v = edge[1]
tup = tuple([u,v])
#make a copy of the daily accessibility dictionary
a_dict_base2 = a_dict_base.copy()
a_n_dict_base2 = a_n_dict_base.copy()
#make a copy of the original graph
G = G2.copy()
#remove that edge
G.remove_edge(u,v)
#iterate over all centroids
for key, val in all_daily_sp_dict.items():
#if the removed edge is part of that centroid's daily shortest path
#recalculate the daily accessibility of that centroid
#and update the daily accessibility dictionary
if tup in val:
a_new, a_n_new = _daily_accessibility(centroid=key, G=G, theta=theta, weight=weight, beta=beta)
a_dict_base2.update({key:a_new})
a_n_dict_base2.update({key:a_n_new})
sum_a_new, sum_a_n_new = _sum_daily_accessibility(a_dict_base2, a_n_dict_base2)
m1_01 = sum_a_base / sum_a_new
m1_01_dict.update({edge:m1_01})
m1_02 = sum_a_n_base / sum_a_n_new
m1_02_dict.update({edge:m1_02})
edgelist = []
for edge in list(G2.edges()):
edgelist.append(edge)
#if an edge does not have value yet
#assign 0 to it
for edge in edgelist:
if not edge in m1_01_dict.keys():
m1_01_dict.update({edge:0})
if not edge in m1_02_dict.keys():
m1_02_dict.update({edge:0})
return m1_01_dict, m1_02_dict
# Metrics M2: Change in unweighted total travel cost and
# change in network average efficiency
def _total_cost_sp_inversed(G, sources, targets, weight):
'''
Helper function for function interdiction_m2
Input:
G : Graph Networkx object
sources, targets : List of nodes sources IDs and nodes targets IDs (e.g. the centroid nodes)
weight : Edge data key corresponding to the edge weight
Output:
total_cost : Sum of inversed total cost
d : Dict with centroid pairs tuple as keys (e.g. (2,3) ) and inversed unweighted total
cost as values
'''
d={}
total_cost = 0
for i in range(len(sources)):
source = sources[i]
for j in range(len(targets)):
target = targets[j]
if source != target :
sp_dijk_distance = nx.dijkstra_path_length(G, source=source, target=target, weight=weight)
cost = 1/sp_dijk_distance
total_cost += cost
tup=tuple([source,target])
d.update({tup:cost})
return total_cost, d
def _network_efficiency_calc(G, total_cost_inversed):
'''
Helper function for function interdiction_m2
return:
eff: Network efficiency
'''
node_calc = 2 / (G.number_of_nodes() * (G.number_of_nodes() - 1))
eff = node_calc * total_cost_inversed
return eff
def interdiction_m2(G2, od, weight, sources, targets):
'''
Function to calculate metric m2_01 (change in total unweighted travel cost) and
m2_02 (change in network average efficiency)
Parameters
------------
G2: Graph
Transport network Graph Networkx object that will be analyzed
sources, targets: list
List of nodes (integer) that will be used as centroids. The integer should correspond to
node id in G Graph
weight: str
String which corresponds to attribute of G2 Graph's edges that will be used as penalty for each
edge. In most cases this is defined as 'length' of the edge.
od: DataFrame
OD matrix dataframe
Returns
------------
m2_01_dict: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and change in total unweighted travel cost as values
m2_02_dict: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and change in network average efficiency as values
'''
ff=0
m2_01_dict = {}
m2_02_dict = {}
sp_dict_graph = sp_dict_graph_creation(G=G2, sources=sources,
targets=targets, weight=weight)
#record unweighted total cost, and shortest path cost for each OD pair
#for calculating metrics M2_01
total_cost_base, od_cost_dict = _total_cost_sp(G=G2, sources=sources, targets=targets,
weight=weight, od=od, weighted=False)
#record inversed unweighted total cost, and inversed shortest path cost for each OD pair
#for calculating metrics M2_02
total_cost_sp_inversed, od_cost_inversed_dict = _total_cost_sp_inversed(G=G2, sources=sources,
targets=targets, weight=weight)
#record the business as usual network efficiency for calculating metrics M2_02
efficiency_base = _network_efficiency_calc(G=G2, total_cost_inversed=total_cost_sp_inversed)
#record all shortest paths of all OD pairs
path_in_sp_list = []
for i in sp_dict_graph.items():
path_in_sp_list += i[1]
path_in_sp_list = list(set(path_in_sp_list))
#iterate for each shortest path
print('start')
for i in path_in_sp_list:
ff += 1
if ff%50 == 0:
print(str(ff)+' edges have been interdicted')
u = i[0]
v = i[1]
tup = tuple([u,v])
#make a copy of the unweighted (inversed) cost dictionary
od_cost_dict2 = od_cost_dict.copy()
od_cost_inversed_dict2 = od_cost_inversed_dict.copy()
#make a copy of the original graph
G = G2.copy()
#remove that edge
G.remove_edge(u,v)
#iterate over all OD pairs
#very time consuming. gimana kalau untuk setiap edge, di store dia melalui shortest path od pair mana aja?
#jadi gak perlu iterate 4000 kali untuk setiap edge
for key, val in sp_dict_graph.items():
#if the removed edge is part of that OD pair's shortest path
#recalculate the unweighted (inversed) cost
#and update the corresponding dictionaries
if tup in val:
try:
print(tup, key)
sp_dijk_distance = nx.dijkstra_path_length(G, source=key[0], target=key[1], weight=weight)
cost = sp_dijk_distance
od_cost_dict2[key] = cost
od_cost_inversed_dict2[key] = 1 / cost
except:
sp_dijk_distance = 9999
#update (inversed) total cost
total_cost_new = sum(od_cost_dict2.values())
total_cost_inversed_new = sum(od_cost_inversed_dict2.values())
#update efficiency
efficiency_new = _network_efficiency_calc(G, total_cost_inversed_new)
#calculate metrics
m2_01 = total_cost_new/total_cost_base
if m2_01 < 0:
m2_01 = 0
m2_01_dict.update({tup:m2_01})
m2_02 = (efficiency_base - efficiency_new)/efficiency_base
m2_02_dict.update({tup:m2_02})
edgelist = []
for edge in list(G2.edges()):
edgelist.append(edge)
#if an edge does not have value yet
#assign 0 to it
for edge in edgelist:
if not edge in m2_01_dict.keys():
m2_01_dict.update({edge:0})
if not edge in m2_02_dict.keys():
m2_02_dict.update({edge:0})
return m2_01_dict, m2_02_dict
# Metric M6: Change in weighted daily accessibility
def _weighted_accessibility(G, centroid, targets, flow, weight, beta=0.5):
a = 0
acc_dict = {}
if type(targets)==list:
for target in targets:
if target != centroid:
dist = nx.dijkstra_path_length(G=G, source=centroid, target=target, weight=weight)
a_val = (flow.iloc[centroid] * flow.iloc[target])/(dist**beta) #change iloc instead of ix
a = a_val
acc = a.flow
acc_dict.update({target:acc})
else:
dist = nx.dijkstra_path_length(G=G, source=centroid, target=targets, weight=weight)
a_val = (flow.iloc[centroid] * flow.iloc[targets])/(dist**beta) #change iloc instead of ix
a = a_val
acc_dict = a.flow
# print(acc_dict)
return acc_dict
def _sum_weighted_accessibility(G, centroids, flow, weight, beta=0.5):
a_sum_dict = {}
a_master_dict = {}
for centroid in centroids:
a_dict = _weighted_accessibility(G=G, centroid=centroid, targets=centroids,
flow=flow, weight=weight, beta=beta)
a_val = sum(a_dict.values())
a_sum_dict.update({centroid:a_val})
a_master_dict.update({centroid:a_dict})
a_sum = sum(a_sum_dict.values())
return a_sum, a_master_dict
def interdiction_m6(G2, weight, centroids, od, beta=0.5):
'''
Function to calculate metric m6_01 (change in weighted accessibility)
Parameters
------------
G2: Graph
Transport network Graph Networkx object that will be analyzed
centroids: list
List of nodes (integer) that will be used as centroids. The integer should correspond to
node id in G Graph
weight: str
String which corresponds to attribute of G2 Graph's edges that will be used as penalty for each
edge. In most cases this is defined as 'length' of the edge.
od: DataFrame
OD matrix dataframe
beta: float
Distance sensitivity parameter for accessibility calculation
Returns
------------
m6_01_dict: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and change in weighted accessibility as values
'''
ff=0
m6_01_dict = {}
#record total traffic going in and going out of each district
flow = pd.DataFrame(od.sum(axis=0) + od.sum(axis=1))
#record shortest path of each OD pair
sp_dict_graph = sp_dict_graph_creation(G=G2, sources=centroids, targets=centroids, weight=weight)
#record total weighted accessibility, and weighted accessibility for each OD pair
a_sum_base, a_sum_dict = _sum_weighted_accessibility(G=G2, centroids=centroids,
flow=flow, weight=weight, beta=beta)
#record all shortest paths of all OD pairs
path_in_sp_list = []
for i in sp_dict_graph.items():
path_in_sp_list += i[1]
path_in_sp_list = list(set(path_in_sp_list))
#iterate for each shortest path
# print('start')
for i in path_in_sp_list:
ff += 1
# if ff%100 == 0:
# print(str(ff)+' edges have been interdicted')
u = i[0]
v = i[1]
tup = tuple([u,v])
#make a copy of the weighted accessibility dictionary
a_sum_dict2 = a_sum_dict.copy()
#make a copy of the original graph
G = G2.copy()
#remove that edge
G.remove_edge(u,v)
# create empty list to store centroid that has been updated
updated_centroid = []
#iterate over all OD pairs
#very time consuming. gimana kalau untuk setiap edge, di store dia melalui shortest path od pair mana aja?
#jadi gak perlu iterate 4000 kali untuk setiap edge
for key, val in sp_dict_graph.items():
#if the removed edge is part of that OD pair's shortest path
#recalculate the weighted accessibility for the source
#and update the corresponding dictionaries
if not key[0] in updated_centroid:
if tup in val:
updated_centroid.append(key[0])
try:
a_new = _weighted_accessibility(G=G, centroid=key[0], targets=centroids,
flow=flow, weight=weight, beta=beta)
a_sum_dict2.update({key[0]:a_new})
except:
sp_dijk_distance = 9999
#update weighted accessibility
a_sum_new = sum(a_sum_dict2.values())
#calculate metrics
m6_01 = a_sum_base/a_sum_new
if m6_01 < 0:
m6_01 = 0
m6_01_dict.update({tup:m6_01})
edgelist = []
for edge in list(G2.edges()):
edgelist.append(edge)
#if an edge does not have value yet
#assign 0 to it
for edge in edgelist:
if not edge in m6_01_dict.keys():
m6_01_dict.update({edge:0})
return m6_01_dict
# Metrics M7: Change in expected user exposure and
# change in worst case user exposure
def _shortest_path_cost(G, centroids, weight):
'''
Helper function for function interdiction_user_exposure
return:
sp_cost_dict: dictionary with od pairs centroid id tuple as keys (e.g (2,3)) and shortest path cost between
them as values
'''
sp_cost_dict = {}
for centroid in centroids:
for target in centroids:
if centroid != target:
cost = nx.dijkstra_path_length(G=G, source=centroid, target=target, weight=weight)
sp_cost_dict.update({tuple([centroid, target]):cost})
return sp_cost_dict
def _user_exposure(G_new, centroid, target, weight, od, sp_cost_dict):
'''
Helper function for function interdiction_user_exposure
return:
exposure: exposure of a centroid due to link disruption
'''
old_cost = sp_cost_dict[tuple([centroid, target])]
try:
new_cost = nx.dijkstra_path_length(G=G_new, source=centroid, target=target, weight=weight)
except:
new_cost = old_cost
flow = od[centroid][target]
exposure = flow * (new_cost - old_cost)
return exposure
# Metrics M7_02 and M7_03
def interdiction_user_exposure(G2, centroids, weight, od):
'''
Function to calculate metric m7_02 (Change in expected user exposure) and
m7_03 (change in worst case user exposure)
Parameters
------------
G2: Graph
Transport network Graph Networkx object that will be analyzed
centroids: list
List of nodes (integer) that will be used as centroids. The integer should correspond to
node id in G Graph
weight: str
String which corresponds to attribute of G2 Graph's edges that will be used as penalty for each
edge. In most cases this is defined as 'length' of the edge.
od: DataFrame
OD matrix dataframe
Returns
------------
m7_02_dict: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and Change in expected user exposure as values
m7_03_dict: dict
Dictionary with edge tuple as keys (e.g. (2,3) ) and change in worst case user exposure as values
'''
ff = 0
m7_02_dict = {}
m7_03_dict = {}
# record business as usual shortest path costs for all OD pairs
sp_cost_dict = _shortest_path_cost(G=G2, centroids=centroids, weight=weight)
# records business as usual shortest path edges for all OD pairs
sp_dict_graph = sp_dict_graph_creation(G=G2, sources=centroids, targets=centroids, weight=weight)
#record all shortest paths of all OD pairs
path_in_sp_list = []
for i in sp_dict_graph.items():
path_in_sp_list += i[1]
path_in_sp_list = list(set(path_in_sp_list))
#iterate for each edges in shortest paths
print('start')
for i in path_in_sp_list:
ff += 1
if ff%50 == 0:
print(str(ff)+' edges have been interdicted')
u = i[0]
v = i[1]
#create dictionary for user exposure of each centroids
user_exposure_dict = {}
for centroid in centroids:
user_exposure_dict.update({centroid:[]})
#make a copy of the original graph
G = G2.copy()
#remove that edge
G.remove_edge(u,v)
sum_exposure = 0
#iterate over all OD pairs
for key, val in sp_dict_graph.items():
if i in val:
exposure = _user_exposure(G_new=G, centroid=key[0], target=key[1],
weight=weight, od=od, sp_cost_dict=sp_cost_dict)
user_exposure_dict[key[0]].append(exposure)
#calculate expected and worst user exposure
expected_ue_dict = {}
worst_ue_dict = {}
for key, val in user_exposure_dict.items():
if len(val) > 0:
average_val = average(val)
worst_val = max(val)
else:
average_val = 0
worst_val = 0
expected_ue_dict.update({key:average_val})
worst_ue_dict.update({key:worst_val})
#calculate metrics
m7_02 = sum(expected_ue_dict.values())/len(centroids)
m7_03 = sum(worst_ue_dict.values())/len(centroids)
m7_02_dict.update({i:m7_02})
m7_03_dict.update({i:m7_03})
edgelist = []
for edge in list(G2.edges()):
edgelist.append(edge)
#if an edge does not have value yet
#assign 0 to it
for edge in edgelist:
if not edge in m7_02_dict.keys():
m7_02_dict.update({edge:0})
if not edge in m7_03_dict.keys():
m7_03_dict.update({edge:0})
return m7_02_dict, m7_03_dict
def _average_shortest_path_length2(G, weight='length'):
try:
dist = nx.average_shortest_path_length(G=G, weight=weight)
except:
cc = 0
if nx.number_connected_components(G) > 1:
for subgraph in nx.connected_component_subgraphs(G):
if len(subgraph) > cc:
cc = len(subgraph)
graph = subgraph
dist = nx.average_shortest_path_length(G=graph, weight=weight)
return dist
# Metric M3_02
def interdiction_m3_02(row, div_graph_dict, div_init_avrgdist_dict, weight='length'):
#get the division
division = row['division']
try:
G = div_graph_dict[division].copy()
init_average_sp_distance = div_init_avrgdist_dict[division]
#remove the path
G.remove_edge(row['FNODE_'], row['TNODE_'])
#recalculate average shortest path distance
new_average_sp_distance = _average_shortest_path_length2(G=G, weight=weight)
m3_02 = new_average_sp_distance / init_average_sp_distance
except:
m3_02 = 0
return m3_02
def _find_distinct_sp(G, source, target, weight, cutoff=3):
#record all edges
edgelist = []
for edge in G.edges():
edgelist.append(edge)
#create empty shortest path list
sp_list = []
#record the number of shortest path
n_sp = 0
#record maximum allowed distance, which is 'cutoff' times the initial shortest path distance
init_len = nx.dijkstra_path_length(G=G, source=source, target=target, weight=weight)
max_len = init_len * cutoff
current_len = 0
#copy the graph
G2 = G.copy()
num = 0
# iterate as long as the new shortest path distance is smaller than the maximum
while current_len < max_len:
num += 1
if num%10 == 0:
print('same od pair has been iterated ' + str(num) + ' times')
print('it is od pair ' + str([source, target]))
try:
#compute the current shortest path's distance
pathlength = nx.single_source_dijkstra(G=G2, source=source, target=target, weight=weight)
new_len = pathlength[0][target]
current_len = new_len
#if it is still lower than the threshold, do the algorithm
if new_len < max_len:
n_sp += 1
#record the new shortest path
new_sp = pathlength[1][target]
tup = []
for i in arange(0, len(new_sp)-1, 1):
new_path = tuple([new_sp[i], new_sp[i+1]])
if new_path in edgelist:
sp_list.append(new_path)
#TODO: make sure that the links where node source/target resides are not removed
tup.append(new_path)
else:
new_path = tuple([new_sp[i+1], new_sp[i]])
sp_list.append(new_path)
tup.append(new_path)
#remove all paths in the new shortest path
G2.remove_edges_from(tup)
# for path in tup:
# G2.remove_edge(path[0], path[1])
#if there is no more path available between the source and the target, break the loop
except:
break
sp_list = list(set(sp_list))
return sp_list, n_sp
def _distinct_path_all_pairs(G, centroids, weight, cutoff=3):
ksp = {}
centroid_nodes2 = copy.deepcopy(centroids)
for node in centroids:
centroid_nodes2.remove(node)
for node2 in centroid_nodes2:
sp_list, n_sp = _find_distinct_sp(G=G, source=node, target=node2, weight=weight, cutoff=cutoff)
ksp.update({(node, node2):[sp_list, n_sp]})
if len(ksp)%50==0:
print(str(len(ksp)) + ' od pairs has been calculated')
return ksp
# Metric M5_01
def metric_m5_01(gdf, line, cutoff):
gdf2 = gdf.copy()
area = line.buffer(cutoff)
gdf2 = gdf2.loc[gdf2['geometry'].intersects(area)]
gdf2['endpoint1'] = gdf2.geometry.apply(lambda geom: Point(geom.coords[0]))
gdf2['endpoint2'] = gdf2.geometry.apply(lambda geom: Point(geom.coords[-1]))
gdf2['geometry'] = gdf2['endpoint1']
gdf2 = gdf2.loc[gdf2['geometry'].within(area)]
gdf2['geometry'] = gdf2['endpoint2']
gdf2 = gdf2.loc[gdf2['geometry'].within(area)]
redundancy = len(gdf2) - 3
if redundancy < 0:
redundancy = 0
return redundancy
# Correlation coefficient analysis for metrics comparison
def correlate_metrics_pearson(df, m_a, m_b):
df2 = df[df[m_a] + df[m_b] != 0]
df2 = df.copy()
r, p = pearsonr(df2[m_a], df2[m_b])
return r, p, len(df2)
def correlate_metrics_spearman(df, m_a, m_b):
df2 = df[df[m_a] + df[m_b] != 0]
df2 = df.copy()
r, p = spearmanr(df2[m_a], df2[m_b])
return r, p, len(df2)
def correlate_metrics_ks(df, m_a, m_b):
df2 = df[df[m_a] + df[m_b] != 0]
df2 = df.copy()
D, p = ks_2samp(df2[m_a], df2[m_b])
return D, p
# Functions for metrics robustness analysis
def rank_robustness(df, all_metrics):
#create dataframe per replication
all_df = []
for i, num in enumerate(list(set(list(df['rep'])))):
exec("rep_{}_df = df.loc[df['rep']==i]".format(i))
exec("all_df.append(rep_{}_df)".format(i))
#calculate Spearman rank correlation coefficient between each replication
spearman_names = []
spearman_dfs = []
for metric in all_metrics:
c = metric
exec("{}_spearman_df = _spearman_ks_all_rep(all_df, rep_0_df, metric)".format(metric))
exec("spearman_dfs.append({}_spearman_df)".format(metric))
spearman_names.append(metric)
print("{}_spearman_df has been created".format(metric))
spearmans_dict = dict(zip(spearman_names, spearman_dfs))
return spearmans_dict
def dist_robustness(df, all_metrics):
#create dataframe per replication
all_df = []
for i, num in enumerate(list(set(list(df['rep'])))):
exec("rep_{}_df = df.loc[df['rep']==i]".format(i))
exec("all_df.append(rep_{}_df)".format(i))
#calculate Kolmogorov-Smirnov distance between each replication
ks_names = []
ks_dfs = []
for metric in all_metrics:
c = metric
exec("{}_ks_df = _spearman_ks_all_rep(all_df, rep_0_df, metric, type='ks')".format(metric))
exec("ks_dfs.append({}_ks_df)".format(metric))
ks_names.append(metric)
print("{}_ks_df has been created".format(metric))
ks_dict = dict(zip(ks_names, ks_dfs))
return ks_dict
def _spearman_ks_all_rep(all_df, rep_0_df, metric, type='Spearman'):
# create dataframe of spearman rank correlation coefficient
n = 100
top_link = []
rep_0_new_df = rep_0_df.loc[rep_0_df[metric]!=0]
top_link.extend(list(rep_0_new_df.sort_values(metric, ascending=False).osmid[:n]))
r_df = pd.DataFrame(np.nan, index=list(np.arange(0,len(all_df),1)), columns=list(np.arange(0,len(all_df),1)))
all_rep = list(r_df.columns)
all_rep2 = copy.deepcopy(all_rep)
for i in all_rep:
current_df1 = all_df[i][[metric,'osmid']]
current_df1 = current_df1.loc[current_df1['osmid'].isin(top_link)]
current_df1 = current_df1.sort_values('osmid')
for j in all_rep2:
current_df2 = all_df[j][[metric,'osmid']]
current_df2 = current_df2.loc[current_df2['osmid'].isin(top_link)]
current_df2 = current_df2.sort_values('osmid')
if type == 'Spearman':
r, p = spearmanr(current_df1[metric], current_df2[metric])
r_df.set_value(i, j, r)
else:
r, p = ks_2samp(current_df1[metric], current_df2[metric])
r_df.set_value(i, j, r)
all_rep2.remove(i)
r_df = r_df.transpose()
r_df.fillna(0)
return r_df
def value_sensitivity(df, all_metrics):
#TODO: still use quick fix here
df2 = df.copy()
df2['m03_02'] = df2['m03_02'].apply(lambda val: 0 if val <= 1 else val-1)
df2['m01_02'] = df2['m01_02'].apply(lambda val: 0 if val <= 1 else val-1)
df2['m01_01'] = df2['m01_01'].apply(lambda val: 0 if val <= 1 else val-1)
df2['m02_01'] = df2['m02_01'].apply(lambda val: 0 if val <= 1 else val-1)
df2['m06_01'] = df2['m06_01'].apply(lambda val: 0 if val <= 1 else val-1)
all_df = []
for i, num in enumerate(list(set(list(df2['rep'])))):
exec("rep_{}_df = df2.loc[df2['rep']==i]".format(i))
exec("all_df.append(rep_{}_df)".format(i))
mad_all_list = []
std_all_list = []
for metric in all_metrics:
mad, stdev = _mad_std_all_rep(all_df, rep_0_df, metric)
mad_all_list.append(mad)
std_all_list.append(stdev)
allvalue_df = pd.DataFrame({'metric': all_metrics, 'mad': mad_all_list, 'std': std_all_list})
allvalue_df2 = allvalue_df.sort_values('metric')
#remove m04_02 since there is supposed to be no robustness analysis for that metric
allvalue_df2 = allvalue_df2.loc[allvalue_df2['metric']!='m04_02']
return allvalue_df2
def _mad_std_all_rep(all_df, rep_0_df, metric):
n = 100
top_link = []
rep_0_new_df = rep_0_df.loc[rep_0_df[metric]!=0]
top_link.extend(list(rep_0_new_df.sort_values(metric, ascending=False).osmid[:n]))
mad_list = []
std_list = []
for link in top_link:
crit_list = []
init_val = all_df[0].loc[all_df[0]['osmid']==link][metric].iloc[0]
for dataset in all_df:
crit_score = dataset.loc[dataset['osmid']==link][metric].iloc[0]
crit_list.append(crit_score)
crit_list = [x / init_val for x in crit_list]
mad = median_absolute_deviation(crit_list)
data_std = std(crit_list)
mad_list.append(mad)
std_list.append(data_std)
mad_list = [x if x >= 0 else 0 for x in mad_list]
std_list = [x if x >= 0 else 0 for x in std_list]
return mean(mad_list), mean(std_list)
def node_assignment(flow, nodes_gdf):
'''
Put results of flow assignment to nodes
Parameters
------------
flow: dict
Flow dictionary obtained from assignment function (e.g. from aon_assignment or probit_assignment)
nodes_gdf: GeoDataFrame
Geodataframe of all nodes in the original network
Returns
------------
nodes_gdf: GeoDataFrame
Geodataframe of all nodes in the simplified network, with 'flow' information added
'''
#take all points in the simplified graph
#take all points in the simplified graph
all_points = set()
for tup in flow.keys():
for entry in tup:
all_points.add(entry)
#new gdf of points that exist in the simplified graph
nodes_gdf2 = nodes_gdf.copy()
nodes_gdf2.loc[:, 'bool'] = nodes_gdf2.loc[:, 'Node'].apply(lambda n: True if n in all_points else False)
nodes_gdf2 = nodes_gdf2[nodes_gdf2.loc[:,'bool']==True]
newflow = {}
for keys, val in flow.items():
for key in keys:
try:
newflow[key] += val
except:
newflow[key] = val
def lab(x):
return newflow[x]
nodes_gdf2.loc[:, 'flow'] = nodes_gdf2.loc[:, 'Node'].apply(lab)
return nodes_gdf2
def node_betweenness_centrality(flow, nodes_gdf, od):
'''
Calculate weighted betweenness centrality on nodes level
Parameters
------------
flow: dict
Flow dictionary obtained from assignment function (e.g. from aon_assignment or probit_assignment)
nodes_gdf: GeoDataFrame
Geodataframe of all nodes in the original network
od: DataFrame
OD matrix dataframe
Returns
------------
nodes_gdf: GeoDataFrame
Geodataframe of all nodes in the simplified network, with 'centrality' and 'flow' information added
'''
#assign flow onto nodes
nodes_gdf2 = node_assignment(flow, nodes_gdf)
#record the total flow in the network
totalval = (sum(od.sum())) * 2
#normalize the flow
nodes_gdf2['centrality'] = nodes_gdf2['flow']/totalval
return nodes_gdf2
|
{
"content_hash": "619c7090ce1203e8a6914b5d2bfcc994",
"timestamp": "",
"source": "github",
"line_count": 1828,
"max_line_length": 124,
"avg_line_length": 35.21498905908096,
"alnum_prop": 0.5607319839062961,
"repo_name": "bramkaarga/transcrit",
"id": "fc1cb0fd560d4d898cef10124f9b2bedca137ea0",
"size": "64778",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "transport_network_modeling/criticality.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "9971810"
},
{
"name": "Python",
"bytes": "225619"
}
],
"symlink_target": ""
}
|
from .amqp_exchange import PulsarExchange
from .util import filter_destination_params
def get_exchange(url, manager_name, params):
connect_ssl = parse_amqp_connect_ssl_params(params)
exchange_kwds = dict(
manager_name=manager_name,
connect_ssl=connect_ssl,
publish_kwds=parse_amqp_publish_kwds(params)
)
timeout = params.get('amqp_consumer_timeout', False)
if timeout is not False:
exchange_kwds['timeout'] = timeout
exchange = PulsarExchange(url, **exchange_kwds)
return exchange
def parse_amqp_connect_ssl_params(params):
ssl_params = filter_destination_params(params, "amqp_connect_ssl_")
if not ssl_params:
return
ssl = __import__('ssl')
if 'cert_reqs' in ssl_params:
value = ssl_params['cert_reqs']
ssl_params['cert_reqs'] = getattr(ssl, value.upper())
return ssl_params
def parse_amqp_publish_kwds(params):
all_publish_params = filter_destination_params(params, "amqp_publish_")
retry_policy_params = {}
for key in all_publish_params.keys():
if key.startswith("retry_"):
value = all_publish_params[key]
retry_policy_params[key[len("retry_"):]] = value
del all_publish_params[key]
if retry_policy_params:
all_publish_params["retry_policy"] = retry_policy_params
return all_publish_params
|
{
"content_hash": "6d9fccdcf1712f6251025cca15e739f8",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 75,
"avg_line_length": 33.5609756097561,
"alnum_prop": 0.6584302325581395,
"repo_name": "jmchilton/pulsar",
"id": "df26f8f58536bb405bfa862ab1b1fbb8c205645f",
"size": "1376",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pulsar/client/amqp_exchange_factory.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "3639"
},
{
"name": "Python",
"bytes": "753626"
},
{
"name": "Shell",
"bytes": "12474"
}
],
"symlink_target": ""
}
|
import json
import logging
import urllib2
class SpectatorClient(object):
"""Helper class for pulling data from Spectator servers."""
SERVICE_PORT_MAP = {
'clouddriver': 7002,
'echo': 8089,
'fiat': 7003,
'front50': 8080,
'gate': 8084,
'igor': 8088,
'orca': 8083,
'rosco': 8087,
}
def __init__(self, options):
self.__host = options.host
self.__prototype = None
self.__options = options
self.__default_scan_params = {}
if options.prototype_path:
with open(options.prototype_path) as fd:
self.__prototype = json.JSONDecoder().decode(fd.read())
def collect_metrics(self, host, port, params=None):
"""Return JSON metrics from the given server."""
sep = '?'
query = ''
query_params = dict(self.__default_scan_params)
query_params.update(params or {})
for key, value in query_params.items():
query += sep + key + "=" + urllib2.quote(value)
sep = "&"
url = 'http://{host}:{port}/spectator/metrics{query}'.format(
host=host, port=port, query=query)
response = urllib2.urlopen(url)
all_metrics = json.JSONDecoder(encoding='utf-8').decode(response.read())
return (self.filter_metrics(all_metrics, self.__prototype)
if self.__prototype else all_metrics)
def filter_metrics(self, instance, prototype):
"""Filter metrics entries in |instance| to those that match |prototype|.
Only the names and tags are checked. The instance must contain a
tag binding found in the prototype, but may also contain additional tags.
The prototype is the same format as the json of the metrics returned.
"""
filtered = {}
metrics = instance.get('metrics') or {}
for key, expect in prototype.get('metrics', {}).items():
got = metrics.get(key)
if not got:
continue
expect_values = expect.get('values')
if not expect_values:
filtered[key] = got
continue
expect_tags = [elem.get('tags') for elem in expect_values]
# Clone the dict because we are going to modify it to remove values
# we dont care about
keep_values = []
def have_tags(expect_tags, got_tags):
for wanted_set in expect_tags:
ok = True
for want in wanted_set:
if want not in got_tags:
ok = False
break
if ok:
return True
return expect_tags == []
for got_value in got.get('values', []):
got_tags = got_value.get('tags')
if have_tags(expect_tags, got_tags):
keep_values.append(got_value)
if not keep_values:
continue
keep = dict(got)
keep['values'] = keep_values
filtered[key] = keep
result = dict(instance)
result['metrics'] = filtered
return result
def scan_by_service(self, service_list, params=None):
result = {}
if service_list == ['all']:
service_list = self.SERVICE_PORT_MAP.keys()
for service in service_list:
port = self.SERVICE_PORT_MAP[service]
try:
result[service] = self.collect_metrics(self.__host, port, params=params)
except IOError as ioex:
logging.getLogger(__name__).error('%s failed: %s', service, ioex)
return result
def scan_by_type(self, service_list, params=None):
service_map = self.scan_by_service(service_list, params=params)
return self.service_map_to_type_map(service_map)
@staticmethod
def ingest_metrics(service, service_response, type_map):
"""Add JSON metrics |response| from |service| name and add them into |type_map|"""
for key, value in service_response['metrics'].items():
if key in type_map:
type_map[key][service] = value
else:
type_map[key] = {service: value}
@staticmethod
def service_map_to_type_map(service_map):
type_map = {}
for service, got in service_map.items():
SpectatorClient.ingest_metrics(service, got, type_map)
return type_map
|
{
"content_hash": "255bfd5b823a7f546435638c03204ab9",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 86,
"avg_line_length": 31.171875,
"alnum_prop": 0.6210526315789474,
"repo_name": "imosquera/spinnaker",
"id": "61089cfa4e1952f62247190ecf594c7696745b21",
"size": "4027",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "google/stackdriver_monitoring/spectator_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7684"
},
{
"name": "Python",
"bytes": "734576"
},
{
"name": "Shell",
"bytes": "116446"
}
],
"symlink_target": ""
}
|
from django.db import models
from models import MODERATION_STATUS_APPROVED
class MetaManager(type(models.Manager)):
def __new__(cls, name, bases, attrs):
return super(MetaManager, cls).__new__(cls, name, bases, attrs)
class ModeratorManagerFactory(object):
@staticmethod
def get(bases):
if not isinstance(bases, tuple):
bases = (bases,)
bases = (ModeratorManager,) + bases
return MetaManager(ModeratorManager.__name__, bases,
{'use_for_related_fields': True})
class ModeratorManager(models.Manager):
def get_queryset(self):
return super(ModeratorManager, self).get_queryset()\
.filter(moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)
def unmoderated(self):
return super(ModeratorManager, self).get_queryset()
|
{
"content_hash": "868ea71e7b01ce4fd297deddd7f5700d",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 82,
"avg_line_length": 29.379310344827587,
"alnum_prop": 0.6596244131455399,
"repo_name": "mpyatishev/djmoderator",
"id": "84abdb516deea652e8adcc52057468fa4bfaba4c",
"size": "878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "moderator/managers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4397"
},
{
"name": "Python",
"bytes": "44649"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/attachment/weapon/shared_blacksun_light_weapon_s06.iff"
result.attribute_template_id = 8
result.stfName("item_n","ship_attachment")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "f0c5b8c3a3d6cc6d3b3a9e84a153810a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 96,
"avg_line_length": 25.23076923076923,
"alnum_prop": 0.7103658536585366,
"repo_name": "obi-two/Rebelion",
"id": "eb62d6f07ce4f22d0cfbf9cc3b8797a0445ff3fb",
"size": "473",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/tangible/ship/attachment/weapon/shared_blacksun_light_weapon_s06.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
import OutputStream
import InputStream
import SequenceFile
import ArrayFile
import MapFile
import SetFile
from Writable import *
from IntWritable import *
from BytesWritable import *
from Text import *
import WritableUtils
import compress
from hadoop.typedbytes import *
|
{
"content_hash": "326d5ee2284e8a8c9203563249eaae84",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 31,
"avg_line_length": 16.176470588235293,
"alnum_prop": 0.8327272727272728,
"repo_name": "igorgatis/hadoop-tools",
"id": "699a48f3aaa524ca37736c9fac1700dcfa1a19dc",
"size": "1157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/hadoop/io/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "153993"
},
{
"name": "C++",
"bytes": "1251563"
},
{
"name": "Java",
"bytes": "417380"
},
{
"name": "Python",
"bytes": "84234"
},
{
"name": "Shell",
"bytes": "19665"
}
],
"symlink_target": ""
}
|
import pandas as pd
df=pd.read_hdf('bigDataFrame.H5')
|
{
"content_hash": "ac443a0a4ea4e52933bec7e1299a52b1",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 33,
"avg_line_length": 18,
"alnum_prop": 0.7592592592592593,
"repo_name": "uqyge/combustionML",
"id": "91e5cdc7efa536aeba71f3c7e4a4fb23781175dd",
"size": "54",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ildm/of_tables.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4525"
},
{
"name": "C++",
"bytes": "144009"
},
{
"name": "Dockerfile",
"bytes": "816"
},
{
"name": "Jupyter Notebook",
"bytes": "40959474"
},
{
"name": "Makefile",
"bytes": "1310"
},
{
"name": "Python",
"bytes": "276493"
},
{
"name": "Shell",
"bytes": "285"
}
],
"symlink_target": ""
}
|
import redis
from redis.exceptions import ConnectionError
class Redis(object):
def __init__(self, agentConfig, checksLogger, rawConfig):
self.agentConfig = agentConfig
self.checksLogger = checksLogger
self.rawConfig = rawConfig
def run(self):
info = {'running': 0}
try:
host = self.rawConfig['Main']['redis_host']
except (KeyError, TypeError):
host = 'localhost'
try:
port = int(self.rawConfig['Main']['redis_port'])
except (KeyError, TypeError):
port = 6379
try:
password = self.rawConfig['Main']['redis_password']
except (KeyError, TypeError):
password = ''
r = redis.StrictRedis(host=host, port=port, password=password)
try:
info = r.info()
info['running'] = 1
info['keys_on_db0'] = r.info()['db0']['keys']
except ConnectionError as e:
self.checksLogger.error('Failed to collect data: {}'.format(e))
return info
if __name__ == '__main__':
rs = Redis(None, None, None)
print rs.run()
|
{
"content_hash": "ad3240113cc2153e44e55a9768adb342",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 75,
"avg_line_length": 25.488888888888887,
"alnum_prop": 0.5527462946817786,
"repo_name": "MWers/sd-redis-plugin",
"id": "1ff9fd16cd72edc46451ee564883db72d9cd3b9a",
"size": "1147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Redis.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1147"
}
],
"symlink_target": ""
}
|
"""
Verifies *_wrapper in environment.
"""
import os
import sys
import TestGyp
test_format = ['ninja']
os.environ['CC_wrapper'] = 'distcc'
os.environ['LINK_wrapper'] = 'distlink'
os.environ['CC.host_wrapper'] = 'ccache'
test = TestGyp.TestGyp(formats=test_format)
old_env = dict(os.environ)
os.environ['GYP_CROSSCOMPILE'] = '1'
test.run_gyp('wrapper.gyp')
os.environ.clear()
os.environ.update(old_env)
if test.format == 'ninja':
cc_expected = ('cc = ' + os.path.join('..', '..', 'distcc') + ' ' +
os.path.join('..', '..', 'clang'))
cc_host_expected = ('cc_host = ' + os.path.join('..', '..', 'ccache') + ' ' +
os.path.join('..', '..', 'clang'))
ld_expected = 'ld = ../../distlink $cxx'
if sys.platform == 'win32':
ld_expected = 'link.exe'
test.must_contain('out/Default/build.ninja', cc_expected)
test.must_contain('out/Default/build.ninja', cc_host_expected)
test.must_contain('out/Default/build.ninja', ld_expected)
test.pass_test()
|
{
"content_hash": "2e09c6aaba6cf37167513b605733c571",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 79,
"avg_line_length": 28.571428571428573,
"alnum_prop": 0.607,
"repo_name": "brson/gyp",
"id": "09470e1ff59023dc7d1b49b4cb60150a10d77ba0",
"size": "1180",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "test/make_global_settings/env-wrapper/gyptest-wrapper.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "803"
},
{
"name": "Batchfile",
"bytes": "1115"
},
{
"name": "C",
"bytes": "33743"
},
{
"name": "C++",
"bytes": "28576"
},
{
"name": "Objective-C",
"bytes": "3180"
},
{
"name": "Objective-C++",
"bytes": "1857"
},
{
"name": "Python",
"bytes": "1711154"
},
{
"name": "Shell",
"bytes": "10659"
}
],
"symlink_target": ""
}
|
from gerencianet import Gerencianet
from credentials import CREDENTIALS
gn = Gerencianet(CREDENTIALS)
params = {
'id': 1
}
body = {
'description': 'This carnet is about a service'
}
response = gn.create_carnet_history(params=params, body=body)
print(response)
|
{
"content_hash": "f9bc5e568e1180104d23bced3bb0d969",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 62,
"avg_line_length": 18.2,
"alnum_prop": 0.7289377289377289,
"repo_name": "dannielhugo/gn-api-sdk-python-dev",
"id": "72ccf9e51af941c850a47b29c7d80ac5ebf7bae1",
"size": "292",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/examples/create_carnet_history.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16306"
}
],
"symlink_target": ""
}
|
"""This module contains the general information for ComputeServerNode ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class ComputeServerNodeConsts:
ADMIN_POWER_BMC_RESET_DEFAULT = "bmc-reset-default"
ADMIN_POWER_BMC_RESET_IMMEDIATE = "bmc-reset-immediate"
ADMIN_POWER_CMOS_RESET_IMMEDIATE = "cmos-reset-immediate"
ADMIN_POWER_CYCLE_IMMEDIATE = "cycle-immediate"
ADMIN_POWER_DIAGNOSTIC_INTERRUPT = "diagnostic-interrupt"
ADMIN_POWER_DOWN = "down"
ADMIN_POWER_HARD_RESET_IMMEDIATE = "hard-reset-immediate"
ADMIN_POWER_POLICY = "policy"
ADMIN_POWER_SOFT_SHUT_DOWN = "soft-shut-down"
ADMIN_POWER_UP = "up"
AVAILABLE_MEMORY_ = ""
MEMORY_SPEED_ = ""
MEMORY_SPEED_UNSPECIFIED = "unspecified"
OPER_POWER_DEGRADED = "degraded"
OPER_POWER_ERROR = "error"
OPER_POWER_NOT_SUPPORTED = "not-supported"
OPER_POWER_OFF = "off"
OPER_POWER_OFFDUTY = "offduty"
OPER_POWER_OFFLINE = "offline"
OPER_POWER_ON = "on"
OPER_POWER_ONLINE = "online"
OPER_POWER_POWER_SAVE = "power-save"
OPER_POWER_TEST = "test"
OPER_POWER_UNKNOWN = "unknown"
PRESENCE_EMPTY = "empty"
PRESENCE_EQUIPPED = "equipped"
PRESENCE_EQUIPPED_IDENTITY_UNESTABLISHABLE = "equipped-identity-unestablishable"
PRESENCE_EQUIPPED_NOT_PRIMARY = "equipped-not-primary"
PRESENCE_EQUIPPED_WITH_MALFORMED_FRU = "equipped-with-malformed-fru"
PRESENCE_INACCESSIBLE = "inaccessible"
PRESENCE_MISMATCH = "mismatch"
PRESENCE_MISMATCH_IDENTITY_UNESTABLISHABLE = "mismatch-identity-unestablishable"
PRESENCE_MISSING = "missing"
PRESENCE_UNAUTHORIZED = "unauthorized"
PRESENCE_UNKNOWN = "unknown"
TOTAL_MEMORY_ = ""
class ComputeServerNode(ManagedObject):
"""This is ComputeServerNode class."""
consts = ComputeServerNodeConsts()
naming_props = set([u'serverId'])
mo_meta = {
"modular": MoMeta("ComputeServerNode", "computeServerNode", "server-[server_id]", VersionMeta.Version2013e, "InputOutput", 0x3f, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [u'adaptorUnit', u'biosUnit', u'bmcResetReason', u'commSvcRack', u'computeBoard', u'equipmentFanModule', u'equipmentIndicatorLed', u'equipmentLocatorLed', u'equipmentPsu', u'faultInst', u'huuController', u'ioExpander', u'iodController', u'kmipManagement', u'lsbootDef', u'lsbootDevPrecision', u'mgmtBackupServer', u'mgmtController', u'mgmtImporterServer', u'networkAdapterUnit', u'oneTimeBootDevice', u'oneTimePrecisionBootDevice', u'osiController', u'pciEquipSlot', u'powerBudget', u'powerMonitor', u'serverUtilization', u'solIf'], ["Get", "Set"])
}
prop_meta = {
"modular": {
"admin_power": MoPropertyMeta("admin_power", "adminPower", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["bmc-reset-default", "bmc-reset-immediate", "cmos-reset-immediate", "cycle-immediate", "diagnostic-interrupt", "down", "hard-reset-immediate", "policy", "soft-shut-down", "up"], []),
"available_memory": MoPropertyMeta("available_memory", "availableMemory", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [""], ["0-4294967295"]),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"memory_speed": MoPropertyMeta("memory_speed", "memorySpeed", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, ["", "unspecified"], ["0-4294967295"]),
"model": MoPropertyMeta("model", "model", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"num_of_adaptors": MoPropertyMeta("num_of_adaptors", "numOfAdaptors", "byte", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"num_of_cores": MoPropertyMeta("num_of_cores", "numOfCores", "ulong", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"num_of_cores_enabled": MoPropertyMeta("num_of_cores_enabled", "numOfCoresEnabled", "ulong", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"num_of_cpus": MoPropertyMeta("num_of_cpus", "numOfCpus", "byte", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"num_of_eth_host_ifs": MoPropertyMeta("num_of_eth_host_ifs", "numOfEthHostIfs", "ushort", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"num_of_fc_host_ifs": MoPropertyMeta("num_of_fc_host_ifs", "numOfFcHostIfs", "ushort", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"num_of_threads": MoPropertyMeta("num_of_threads", "numOfThreads", "ulong", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"oper_power": MoPropertyMeta("oper_power", "operPower", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, ["degraded", "error", "not-supported", "off", "offduty", "offline", "on", "online", "power-save", "test", "unknown"], []),
"original_uuid": MoPropertyMeta("original_uuid", "originalUuid", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, r"""(([0-9a-fA-F]){8}\-([0-9a-fA-F]){4}\-([0-9a-fA-F]){4}\-([0-9a-fA-F]){4}\-([0-9a-fA-F]){12})|0""", [], []),
"presence": MoPropertyMeta("presence", "presence", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, ["empty", "equipped", "equipped-identity-unestablishable", "equipped-not-primary", "equipped-with-malformed-fru", "inaccessible", "mismatch", "mismatch-identity-unestablishable", "missing", "unauthorized", "unknown"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8, 0, 255, None, [], []),
"serial": MoPropertyMeta("serial", "serial", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"server_id": MoPropertyMeta("server_id", "serverId", "string", VersionMeta.Version2013e, MoPropertyMeta.NAMING, None, 0, 510, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"total_memory": MoPropertyMeta("total_memory", "totalMemory", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [""], ["0-4294967295"]),
"usr_lbl": MoPropertyMeta("usr_lbl", "usrLbl", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x20, 0, 64, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,64}""", [], []),
"uuid": MoPropertyMeta("uuid", "uuid", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, r"""(([0-9a-fA-F]){8}\-([0-9a-fA-F]){4}\-([0-9a-fA-F]){4}\-([0-9a-fA-F]){4}\-([0-9a-fA-F]){12})|0""", [], []),
"vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
},
}
prop_map = {
"modular": {
"adminPower": "admin_power",
"availableMemory": "available_memory",
"childAction": "child_action",
"dn": "dn",
"memorySpeed": "memory_speed",
"model": "model",
"name": "name",
"numOfAdaptors": "num_of_adaptors",
"numOfCores": "num_of_cores",
"numOfCoresEnabled": "num_of_cores_enabled",
"numOfCpus": "num_of_cpus",
"numOfEthHostIfs": "num_of_eth_host_ifs",
"numOfFcHostIfs": "num_of_fc_host_ifs",
"numOfThreads": "num_of_threads",
"operPower": "oper_power",
"originalUuid": "original_uuid",
"presence": "presence",
"rn": "rn",
"serial": "serial",
"serverId": "server_id",
"status": "status",
"totalMemory": "total_memory",
"usrLbl": "usr_lbl",
"uuid": "uuid",
"vendor": "vendor",
},
}
def __init__(self, parent_mo_or_dn, server_id, **kwargs):
self._dirty_mask = 0
self.server_id = server_id
self.admin_power = None
self.available_memory = None
self.child_action = None
self.memory_speed = None
self.model = None
self.name = None
self.num_of_adaptors = None
self.num_of_cores = None
self.num_of_cores_enabled = None
self.num_of_cpus = None
self.num_of_eth_host_ifs = None
self.num_of_fc_host_ifs = None
self.num_of_threads = None
self.oper_power = None
self.original_uuid = None
self.presence = None
self.serial = None
self.status = None
self.total_memory = None
self.usr_lbl = None
self.uuid = None
self.vendor = None
ManagedObject.__init__(self, "ComputeServerNode", parent_mo_or_dn, **kwargs)
|
{
"content_hash": "d32bc2b30ba284336126eeb4f5af2e99",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 744,
"avg_line_length": 65.00671140939598,
"alnum_prop": 0.6317365269461078,
"repo_name": "ragupta-git/ImcSdk",
"id": "3fec7a0c3ca48a2a1fd3f42df9f184f33397c452",
"size": "9686",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "imcsdk/mometa/compute/ComputeServerNode.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1042023"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding index on 'CronJobLog', fields ['end_time']
db.create_index(u'django_cronium_cronjoblog', ['end_time'])
# Adding index on 'CronJobLog', fields ['ran_at_time', 'is_success', 'code']
db.create_index(u'django_cronium_cronjoblog', ['ran_at_time', 'is_success', 'code'])
# Adding index on 'CronJobLog', fields ['ran_at_time', 'start_time', 'code']
db.create_index(u'django_cronium_cronjoblog', ['ran_at_time', 'start_time', 'code'])
# Adding index on 'CronJobLog', fields ['start_time', 'code']
db.create_index(u'django_cronium_cronjoblog', ['start_time', 'code'])
def backwards(self, orm):
# Removing index on 'CronJobLog', fields ['start_time', 'code']
db.delete_index(u'django_cronium_cronjoblog', ['start_time', 'code'])
# Removing index on 'CronJobLog', fields ['ran_at_time', 'start_time', 'code']
db.delete_index(u'django_cronium_cronjoblog', ['ran_at_time', 'start_time', 'code'])
# Removing index on 'CronJobLog', fields ['ran_at_time', 'is_success', 'code']
db.delete_index(u'django_cronium_cronjoblog', ['ran_at_time', 'is_success', 'code'])
# Removing index on 'CronJobLog', fields ['end_time']
db.delete_index(u'django_cronium_cronjoblog', ['end_time'])
models = {
u'django_cronium.cronjoblog': {
'Meta': {'object_name': 'CronJobLog', 'index_together': "[('code', 'is_success', 'ran_at_time'), ('code', 'start_time', 'ran_at_time'), ('code', 'start_time')]"},
'code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_success': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'message': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'blank': 'True'}),
'ran_at_time': ('django.db.models.fields.TimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
}
}
complete_apps = ['django_cronium']
|
{
"content_hash": "ba6426f25e347e8a01b852803fab1cbf",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 174,
"avg_line_length": 49.48,
"alnum_prop": 0.597413096200485,
"repo_name": "philippeowagner/django-cronium",
"id": "e04d504cbdc588cff4ebaaa353d69e11b0ea32fd",
"size": "2498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_cronium/migrations/0003_auto__add_index_cronjoblog_end_time__add_index_cronjoblog_ran_at_time_.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17782"
}
],
"symlink_target": ""
}
|
from apertures_additions_lib import Add_quad_apertures_to_lattice
from apertures_additions_lib import Add_bend_apertures_to_lattice
from apertures_additions_lib import Add_rfgap_apertures_to_lattice
from apertures_additions_lib import GetLostDistributionArr
from apertures_additions_lib import AddScrapersAperturesToLattice
from apertures_additions_lib import Add_drift_apertures_to_lattice
from sns_aperture_additions import AddMEBTChopperPlatesAperturesToSNS_Lattice
from rf_models_modifications_lib import Replace_BaseRF_Gap_to_AxisField_Nodes
from rf_quad_overlap_modifications_lib import Replace_BaseRF_Gap_and_Quads_to_Overlapping_Nodes
from quad_overlap_modifications_lib import Replace_Quads_to_OverlappingQuads_Nodes
#---- modification with errors
from errors_modifications_lib import ErrorForNodesModification
from errors_modifications_lib import CoordinateDisplacementNodesModification
from errors_modifications_lib import BendFieldNodesModification
from errors_modifications_lib import LongitudinalDisplacementNodesModification
from errors_modifications_lib import StraightRotationZ_NodesModification
from errors_modifications_lib import StraightRotationX_NodesModification
from errors_modifications_lib import StraightRotationY_NodesModification
from errors_modifications_lib import QuadFieldsErrorsDeployment
__all__ = []
__all__.append("Add_quad_apertures_to_lattice")
__all__.append("Add_bend_apertures_to_lattice")
__all__.append("Add_rfgap_apertures_to_lattice")
__all__.append("GetLostDistributionArr")
__all__.append("AddScrapersAperturesToLattice")
__all__.append("Add_drift_apertures_to_lattice")
__all__.append("AddMEBTChopperPlatesAperturesToSNS_Lattice")
__all__.append("Replace_BaseRF_Gap_to_AxisField_Nodes")
__all__.append("Replace_BaseRF_Gap_and_Quads_to_Overlapping_Nodes")
__all__.append("Replace_Quads_to_OverlappingQuads_Nodes")
__all__.append("ErrorForNodesModification")
__all__.append("CoordinateDisplacementNodesModification")
__all__.append("BendFieldNodesModification")
__all__.append("LongitudinalDisplacementNodesModification")
__all__.append("StraightRotationZ_NodesModification")
__all__.append("StraightRotationX_NodesModification")
__all__.append("StraightRotationY_NodesModification")
__all__.append("QuadFieldsErrorsDeployment")
|
{
"content_hash": "55806294b6d6bd7674ae2e3232f3ce28",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 95,
"avg_line_length": 55.58536585365854,
"alnum_prop": 0.8306274681878016,
"repo_name": "PyORBIT-Collaboration/py-orbit",
"id": "6d0fde621a7ae2a072a36c90efcc06f4292c951c",
"size": "2377",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "py/orbit/py_linac/lattice_modifications/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "1859113"
},
{
"name": "Dockerfile",
"bytes": "232"
},
{
"name": "Makefile",
"bytes": "13194"
},
{
"name": "Python",
"bytes": "990624"
},
{
"name": "Shell",
"bytes": "2850"
}
],
"symlink_target": ""
}
|
"""
WSGI config for shoreline project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shoreline.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
{
"content_hash": "d88bac63e80413c4abbd0c948e8f0eb8",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 78,
"avg_line_length": 28.071428571428573,
"alnum_prop": 0.7786259541984732,
"repo_name": "Eylrid/shoreline",
"id": "f9b32d2a2a4b7c14318b8aaf3f2fbaf3d66f5b37",
"size": "393",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "shoreline/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2970"
}
],
"symlink_target": ""
}
|
import os
import shutil
import StringIO
import subprocess
import tempfile
import testtools
from os_testr.tests import base
DEVNULL = open(os.devnull, 'wb')
class TestReturnCodes(base.TestCase):
def setUp(self):
super(TestReturnCodes, self).setUp()
# Setup test dirs
self.directory = tempfile.mkdtemp(prefix='ostestr-unit')
self.addCleanup(shutil.rmtree, self.directory)
self.test_dir = os.path.join(self.directory, 'tests')
os.mkdir(self.test_dir)
# Setup Test files
self.testr_conf_file = os.path.join(self.directory, '.testr.conf')
self.setup_cfg_file = os.path.join(self.directory, 'setup.cfg')
self.passing_file = os.path.join(self.test_dir, 'test_passing.py')
self.failing_file = os.path.join(self.test_dir, 'test_failing.py')
self.init_file = os.path.join(self.test_dir, '__init__.py')
self.setup_py = os.path.join(self.directory, 'setup.py')
shutil.copy('os_testr/tests/files/testr-conf', self.testr_conf_file)
shutil.copy('os_testr/tests/files/passing-tests', self.passing_file)
shutil.copy('os_testr/tests/files/failing-tests', self.failing_file)
shutil.copy('setup.py', self.setup_py)
shutil.copy('os_testr/tests/files/setup.cfg', self.setup_cfg_file)
shutil.copy('os_testr/tests/files/__init__.py', self.init_file)
self.stdout = StringIO.StringIO()
self.stderr = StringIO.StringIO()
# Change directory, run wrapper and check result
self.addCleanup(os.chdir, os.path.abspath(os.curdir))
os.chdir(self.directory)
def assertRunExit(self, cmd, expected, subunit=False):
p = subprocess.Popen(
"%s" % cmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if not subunit:
self.assertEqual(
p.returncode, expected,
"Stdout: %s; Stderr: %s" % (out, err))
else:
self.assertEqual(p.returncode, expected,
"Expected return code: %s doesn't match actual "
"return code of: %s" % (expected, p.returncode))
def test_default_passing(self):
self.assertRunExit('ostestr --regex passing', 0)
def test_default_fails(self):
self.assertRunExit('ostestr', 1)
def test_default_passing_no_slowest(self):
self.assertRunExit('ostestr --no-slowest --regex passing', 0)
def test_default_fails_no_slowest(self):
self.assertRunExit('ostestr --no-slowest', 1)
def test_default_serial_passing(self):
self.assertRunExit('ostestr --serial --regex passing', 0)
def test_default_serial_fails(self):
self.assertRunExit('ostestr --serial', 1)
def test_testr_subunit_passing(self):
self.assertRunExit('ostestr --no-pretty --subunit --regex passing', 0,
subunit=True)
@testtools.skip('Skipped because of testrepository lp bug #1411804')
def test_testr_subunit_fails(self):
self.assertRunExit('ostestr --no-pretty --subunit', 1, subunit=True)
def test_testr_no_pretty_passing(self):
self.assertRunExit('ostestr --no-pretty --regex passing', 0)
def test_testr_no_pretty_fails(self):
self.assertRunExit('ostestr --no-pretty', 1)
def test_list(self):
self.assertRunExit('ostestr --list', 0)
|
{
"content_hash": "d23c01af34de545713eea95685a7e929",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 78,
"avg_line_length": 38.28888888888889,
"alnum_prop": 0.6329077190946024,
"repo_name": "mtreinish/os-testr",
"id": "591e4dd9d54a3a4cb2640f5838101bdff61859bd",
"size": "4080",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "os_testr/tests/test_return_codes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "50334"
}
],
"symlink_target": ""
}
|
import barrister
from bottle import run, post, request
from store import Store, RecordNotFound, UserDataInvalid, MaxTodosExceeded
from functools import wraps
import sys
class TodoManager(object):
def __init__(self, store):
self.store = store
def readTodos(self):
return self.store.get_all()
def createTodo(self, properties):
return self.store.save(properties)
def updateTodo(self, todo):
return self.store.update(todo['id'], todo)
def deleteTodo(self, todo):
return self.store.delete(todo['id'])
store = Store()
todo_manager = TodoManager(store)
contract = barrister.contract_from_file('../../todo_manager.v1.json')
server = barrister.Server(contract)
server.add_handler('TodoManager', todo_manager)
@post('/v1/todos')
def todos():
return server.call_json(request.body.read())
run(host='localhost', port=3000)
|
{
"content_hash": "737b1f31e393a67f4817cdf8322b0351",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 74,
"avg_line_length": 24.4,
"alnum_prop": 0.7259953161592506,
"repo_name": "laser/polyglot-distributed-systems",
"id": "41b66f6808265a2e520bc6b9ba65ed4088a15ba3",
"size": "877",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "servers/python/v1/server.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "9545"
},
{
"name": "HTML",
"bytes": "21989"
},
{
"name": "JavaScript",
"bytes": "5603"
},
{
"name": "PHP",
"bytes": "31602"
},
{
"name": "Python",
"bytes": "7231"
},
{
"name": "Ruby",
"bytes": "4073"
},
{
"name": "Shell",
"bytes": "378"
}
],
"symlink_target": ""
}
|
class Time(object):
def __init__(self, hour, minute, second):
self.hour = hour
self.minute = minute
self.second = second
def __str__(self):
return str(self.hour) + ":"+str(self.minute)+":"+str(self.second)
def __add__(self, other):
return Time(str(self.hour+other.hour) + ":" + str(self.minute+other.minute) + ":" + str(self.second+other.second)
time1 = Time(5, 32, 0)
time2 = Time(23, 11, 11)
print time1
print time2
print time1+time2
print str(time1.hour) + ":" + str(time1.minute) + ":" + str(time1.minute)
|
{
"content_hash": "bc3df86e2737c13d7069fed93ebc7cb2",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 121,
"avg_line_length": 29.736842105263158,
"alnum_prop": 0.6017699115044248,
"repo_name": "bensk/CS9",
"id": "cf4ed55d2a3a6c2ed64adbd5d2d970a4a6919203",
"size": "565",
"binary": false,
"copies": "3",
"ref": "refs/heads/gh-pages",
"path": "Code Examples/Methods.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "223049"
},
{
"name": "HTML",
"bytes": "42810"
},
{
"name": "JavaScript",
"bytes": "3384"
},
{
"name": "Jupyter Notebook",
"bytes": "3824"
},
{
"name": "Python",
"bytes": "65943"
},
{
"name": "Ruby",
"bytes": "1728"
}
],
"symlink_target": ""
}
|
"""Module for baked spitfire utility functions."""
try:
from spitfire.runtime import _baked # pylint: disable=g-import-not-at-top
except ImportError:
_baked = None
class _SanitizedPlaceholder(str):
"""Class that represents an already sanitized string.
SanitizedPlaceholder wraps another value to let the runtime know that this
does not need to filtered again.
"""
def __add__(self, other):
value = str.__add__(self, other)
if type(other) == _SanitizedPlaceholder:
value = _SanitizedPlaceholder(value)
return value
def __mod__(self, other):
value = str.__mod__(self, other)
if type(other) == _SanitizedPlaceholder:
value = _SanitizedPlaceholder(value)
return value
def _runtime_mark_as_sanitized(value, function):
"""Wrap a function's return value in a SanitizedPlaceholder.
This function is called often so it needs to be fast. This
function checks the skip_filter annotation on the function passed
in to determine if the value should be wrapped.
Args:
value: The value to be marked
function: The function to check for skip_filter
Returns:
Either a SanitizedPlaceholder object or the value passed in.
"""
if getattr(function, 'skip_filter', False):
if type(value) == str:
return _SanitizedPlaceholder(value)
return value
def _mark_as_sanitized(value):
"""Wrap a value in a SanitizedPlaceholder.
This function is called often so it needs to be fast.
Args:
value: The value to be marked
Returns:
Either a SanitizedPlaceholder object or the value passed in.
"""
# The if branch is going to be taken in most cases.
if type(value) == str:
return _SanitizedPlaceholder(value)
return value
if _baked is None:
SanitizedPlaceholder = _SanitizedPlaceholder
runtime_mark_as_sanitized = _runtime_mark_as_sanitized
mark_as_sanitized = _mark_as_sanitized
else:
# Use C.
SanitizedPlaceholder = _baked._SanitizedPlaceholder
runtime_mark_as_sanitized = _baked._runtime_mark_as_sanitized
mark_as_sanitized = _baked._mark_as_sanitized
|
{
"content_hash": "7c7a301b96f9ffde12ba7bf535d15826",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 78,
"avg_line_length": 30.027397260273972,
"alnum_prop": 0.6747262773722628,
"repo_name": "nicksay/spitfire",
"id": "bfc99482efdecf92c38c9ab240181ca21a6449ad",
"size": "2356",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "spitfire/runtime/baked.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "17906"
},
{
"name": "GAP",
"bytes": "22862"
},
{
"name": "HTML",
"bytes": "3348"
},
{
"name": "Makefile",
"bytes": "4260"
},
{
"name": "PHP",
"bytes": "1076"
},
{
"name": "Python",
"bytes": "371723"
},
{
"name": "Smarty",
"bytes": "124"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
# users API
url(r'^download/$', 'django_irods.views.download'),
url(r'^list/$', 'django_irods.views.list'),
)
|
{
"content_hash": "febe5b81be88758c887d633bb129d941",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 55,
"avg_line_length": 21.555555555555557,
"alnum_prop": 0.6494845360824743,
"repo_name": "hydroshare/hydroshare_temp",
"id": "5ff59bb749c15c7282d2589e18746eabbffcd8ac",
"size": "194",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_irods/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "173515"
},
{
"name": "C++",
"bytes": "4136"
},
{
"name": "CSS",
"bytes": "228598"
},
{
"name": "CoffeeScript",
"bytes": "34267"
},
{
"name": "JavaScript",
"bytes": "736373"
},
{
"name": "Python",
"bytes": "1870088"
},
{
"name": "Shell",
"bytes": "5335"
},
{
"name": "XSLT",
"bytes": "790987"
}
],
"symlink_target": ""
}
|
from tempest.api.compute import base
from tempest import config
from tempest import test
CONF = config.CONF
class FixedIPsTestJson(base.BaseV2ComputeAdminTest):
@classmethod
def skip_checks(cls):
super(FixedIPsTestJson, cls).skip_checks()
if CONF.service_available.neutron:
msg = ("%s skipped as neutron is available" % cls.__name__)
raise cls.skipException(msg)
@classmethod
def setup_clients(cls):
super(FixedIPsTestJson, cls).setup_clients()
cls.client = cls.os_adm.fixed_ips_client
@classmethod
def resource_setup(cls):
super(FixedIPsTestJson, cls).resource_setup()
server = cls.create_test_server(wait_until='ACTIVE')
server = cls.servers_client.get_server(server['id'])
for ip_set in server['addresses']:
for ip in server['addresses'][ip_set]:
if ip['OS-EXT-IPS:type'] == 'fixed':
cls.ip = ip['addr']
break
if cls.ip:
break
@test.idempotent_id('16b7d848-2f7c-4709-85a3-2dfb4576cc52')
@test.services('network')
def test_list_fixed_ip_details(self):
fixed_ip = self.client.show_fixed_ip(self.ip)
self.assertEqual(fixed_ip['address'], self.ip)
@test.idempotent_id('5485077b-7e46-4cec-b402-91dc3173433b')
@test.services('network')
def test_set_reserve(self):
body = {"reserve": "None"}
self.client.reserve_fixed_ip(self.ip, body)
@test.idempotent_id('7476e322-b9ff-4710-bf82-49d51bac6e2e')
@test.services('network')
def test_set_unreserve(self):
body = {"unreserve": "None"}
self.client.reserve_fixed_ip(self.ip, body)
|
{
"content_hash": "7476a519394d4d3dbdbf8461cb5274b4",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 71,
"avg_line_length": 33.76470588235294,
"alnum_prop": 0.6236933797909407,
"repo_name": "yamt/tempest",
"id": "a65fda61fc38017f215b5f503da5224ab8f428ad",
"size": "2346",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tempest/api/compute/admin/test_fixed_ips.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2739641"
},
{
"name": "Shell",
"bytes": "8560"
}
],
"symlink_target": ""
}
|
error = {
'not_implemented': {
'err_code': 'not_implemented',
'err_description': 'Method is not implemented',
'err_resolution': 'Check your request.'
},
'not_json': {
'err_code': 'not_json',
'err_description': 'Request contains data in other than JSON format.',
'err_resolution': 'Check your request, or contact the developer.'
},
'no_request_data': {
'err_code': 'no_request_data',
'err_description': 'Request data is empty.',
'err_resolution': 'Check your request, or contact the developer.'
},
'trex_not_start': {
'err_code': 'trex_not_start',
'err_description': 'TRex could not start to generate traffic.',
'err_resolution': 'Check with developer team.'
},
'ascii_error': {
'err_code': 'ascii_error',
'err_description': 'TRex supports ASCII characters only.',
'err_resolution': 'Please verify input data and make sure it contains ASCII-compatible symbols only.'
},
'trex_already_running': {
'err_code': 'trex_already_running',
'err_description': 'TRex is running already.',
'err_resolution': 'Stop traffic, then try to start it again.'
},
'pps_must_be_positive': {
'err_code': 'pps_must_be_positive',
'err_description': 'PPS must have a positive value (>0). Traffic stopped.',
'err_resolution': 'Choose a positive value to start server.'
}
}
# Get an error details by its code
def get_error_message(code):
return error[code]
|
{
"content_hash": "67ed400aff823a2b4a9232d5bba5539c",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 109,
"avg_line_length": 37.11904761904762,
"alnum_prop": 0.6016677357280308,
"repo_name": "zverevalexei/trex-http-proxy",
"id": "001c1173d78d3d284b0bf298879b2a7b226790a6",
"size": "1559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "error_messages.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "110136"
},
{
"name": "Python",
"bytes": "7385638"
}
],
"symlink_target": ""
}
|
"""Tests for the diagnostics data provided by the PVOutput integration."""
from aiohttp import ClientSession
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
from tests.components.diagnostics import get_diagnostics_for_config_entry
async def test_diagnostics(
hass: HomeAssistant,
hass_client: ClientSession,
init_integration: MockConfigEntry,
):
"""Test diagnostics."""
assert await get_diagnostics_for_config_entry(
hass, hass_client, init_integration
) == {
"energy_consumption": 1000,
"energy_generation": 500,
"normalized_output": 0.5,
"power_consumption": 2500,
"power_generation": 1500,
"reported_date": "2021-12-29",
"reported_time": "22:37:00",
"temperature": 20.2,
"voltage": 220.5,
}
|
{
"content_hash": "a237666d2c55ec926951ecf70f9c41e6",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 74,
"avg_line_length": 30.142857142857142,
"alnum_prop": 0.6682464454976303,
"repo_name": "GenericStudent/home-assistant",
"id": "529e9b4e575b2c5ee77490caeffe894910c5f97d",
"size": "844",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "tests/components/pvoutput/test_diagnostics.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
}
|
import math
import imath
import IECore
import Gaffer
import GafferDispatch
class Wedge( GafferDispatch.TaskContextProcessor ) :
Mode = IECore.Enum.create( "FloatRange", "IntRange", "ColorRange", "FloatList", "IntList", "StringList" )
def __init__( self, name = "Wedge" ) :
GafferDispatch.TaskContextProcessor.__init__( self, name )
self["variable"] = Gaffer.StringPlug( defaultValue = "wedge:value" )
self["indexVariable"] = Gaffer.StringPlug( defaultValue = "wedge:index" )
self["mode"] = Gaffer.IntPlug(
defaultValue = int( self.Mode.FloatRange ),
minValue = int( self.Mode.FloatRange ),
maxValue = int( self.Mode.StringList ),
)
# float range
self["floatMin"] = Gaffer.FloatPlug( defaultValue = 0 )
self["floatMax"] = Gaffer.FloatPlug( defaultValue = 1 )
self["floatSteps"] = Gaffer.IntPlug( minValue = 2, defaultValue = 11 )
# int range
self["intMin"] = Gaffer.IntPlug( defaultValue = 0 )
self["intMax"] = Gaffer.IntPlug( defaultValue = 5 )
self["intStep"] = Gaffer.IntPlug( minValue = 1, defaultValue = 1 )
# color range
self["ramp"] = Gaffer.SplinefColor3fPlug(
defaultValue = IECore.SplinefColor3f(
IECore.CubicBasisf.catmullRom(),
(
( 0, imath.Color3f( 0 ) ),
( 0, imath.Color3f( 0 ) ),
( 1, imath.Color3f( 1 ) ),
( 1, imath.Color3f( 1 ) ),
)
)
)
self["colorSteps"] = Gaffer.IntPlug( defaultValue = 5, minValue = 2 )
# lists
self["floats"] = Gaffer.FloatVectorDataPlug( defaultValue = IECore.FloatVectorData() )
self["ints"] = Gaffer.IntVectorDataPlug( defaultValue = IECore.IntVectorData() )
self["strings"] = Gaffer.StringVectorDataPlug( defaultValue = IECore.StringVectorData() )
def values( self ) :
mode = self.Mode( self["mode"].getValue() )
if mode == self.Mode.FloatRange :
min = self["floatMin"].getValue()
max = self["floatMax"].getValue()
steps = self["floatSteps"].getValue()
values = []
for i in range( 0, steps ) :
t = float( i ) / ( steps - 1 )
values.append( min + t * ( max - min ) )
elif mode == self.Mode.IntRange :
min = self["intMin"].getValue()
max = self["intMax"].getValue()
step = self["intStep"].getValue()
if max < min :
min, max = max, min
if step == 0 :
raise RuntimeError( "Invalid step - step must not be 0" )
elif step < 0 :
step = -step
values = []
while True :
value = min + len( values ) * step
if value > max :
break
values.append( value )
elif mode == self.Mode.ColorRange :
spline = self["ramp"].getValue().spline()
steps = self["colorSteps"].getValue()
values = [ spline( i / float( steps - 1 ) ) for i in range( 0, steps ) ]
elif mode == self.Mode.FloatList :
values = self["floats"].getValue()
elif mode == self.Mode.IntList :
values = self["ints"].getValue()
elif mode == self.Mode.StringList :
values = self["strings"].getValue()
return values
def _processedContexts( self, context ) :
# make a context for each of the wedge values
variable = self["variable"].getValue()
indexVariable = self["indexVariable"].getValue()
contexts = []
for index, value in enumerate( self.values() ) :
contexts.append( Gaffer.Context( context ) )
contexts[-1][variable] = value
contexts[-1][indexVariable] = index
return contexts
IECore.registerRunTimeTyped( Wedge, typeName = "GafferDispatch::Wedge" )
|
{
"content_hash": "a48ca9ae69d68722af15c16f0ac3ee51",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 106,
"avg_line_length": 26.153846153846153,
"alnum_prop": 0.6444117647058824,
"repo_name": "ivanimanishi/gaffer",
"id": "b01e9fdae339c560bf340abc5d655463f713dc02",
"size": "5203",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/GafferDispatch/Wedge.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "39753"
},
{
"name": "C++",
"bytes": "6086015"
},
{
"name": "CMake",
"bytes": "83446"
},
{
"name": "CSS",
"bytes": "28027"
},
{
"name": "GLSL",
"bytes": "6236"
},
{
"name": "Python",
"bytes": "6120483"
},
{
"name": "Shell",
"bytes": "13049"
},
{
"name": "Slash",
"bytes": "2870"
}
],
"symlink_target": ""
}
|
from collections import defaultdict
from proboscis.asserts import Check
from proboscis.asserts import assert_equal
from proboscis.asserts import assert_not_equal
import unittest
from trove.common import utils
from trove.tests.config import CONFIG
from proboscis.dependencies import SkipTest
MESSAGE_QUEUE = defaultdict(list)
def create_usage_verifier():
return utils.import_object(CONFIG.usage_endpoint)
class UsageVerifier(object):
def clear_events(self):
"""Hook that is called to allow endpoints to clean up."""
pass
def check_message(self, resource_id, event_type, **attrs):
messages = self.get_messages(resource_id)
print("%s %s" % (messages, resource_id))
found = None
for message in messages:
if message['event_type'] == event_type:
found = message
assert_not_equal(found, None)
with Check() as check:
for key, value in attrs.iteritems():
check.equal(found[key], value)
def get_messages(self, resource_id, expected_messages=None):
global MESSAGE_QUEUE
import pprint
pprint.pprint(MESSAGE_QUEUE.items())
msgs = MESSAGE_QUEUE.get(resource_id, [])
if expected_messages is not None:
assert_equal(len(msgs), expected_messages)
return msgs
class FakeVerifier(object):
"""This is the default handler in fake mode, it is basically a no-op."""
def clear_event(self):
pass
def check_message(self, *args, **kwargs):
raise SkipTest("Notifications not available")
def get_messages(self, *args, **kwargs):
pass
def notify(context, message):
"""Simple test notify function which saves the messages to global list."""
print('Received Usage Notification: %s' % message)
payload = message.get('payload', None)
payload['event_type'] = message['event_type']
resource_id = payload['instance_id']
global MESSAGE_QUEUE
MESSAGE_QUEUE[resource_id].append(payload)
|
{
"content_hash": "1d5cc7ec00bda40f7de2b94c53031512",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 78,
"avg_line_length": 30.696969696969695,
"alnum_prop": 0.6643632773938796,
"repo_name": "citrix-openstack-build/trove",
"id": "4a784e28ea780b5b619036da36f2fff926851122",
"size": "2708",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "trove/tests/util/usage.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19900"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "1725275"
},
{
"name": "Shell",
"bytes": "5512"
}
],
"symlink_target": ""
}
|
"""storage plugin."""
import gettext
from otopi import plugin, util
from ovirt_engine_setup import constants as osetupcons
from ovirt_engine_setup import dialog
from ovirt_engine_setup.engine import constants as oenginecons
from ovirt_engine_setup.engine import vdcoption
from ovirt_engine_setup.engine_common import constants as oengcommcons
def _(m):
return gettext.dgettext(message=m, domain='ovirt-engine-setup')
SAN_WIPE_AFTER_DELETE = osetupcons.ConfigEnv.SAN_WIPE_AFTER_DELETE
@util.export
class Plugin(plugin.PluginBase):
"""storage plugin."""
@plugin.event(
stage=plugin.Stages.STAGE_INIT,
)
def _init(self):
self.environment.setdefault(SAN_WIPE_AFTER_DELETE, None)
@plugin.event(
stage=plugin.Stages.STAGE_CUSTOMIZATION,
before=(
oengcommcons.Stages.DIALOG_TITLES_E_STORAGE,
),
after=(
oengcommcons.Stages.DIALOG_TITLES_S_STORAGE,
),
condition=lambda self: self._enableCondition(),
)
def _configureSANWipeAfterDelete(self):
if self.environment[SAN_WIPE_AFTER_DELETE] is None:
# Value for SAN_WIPE_AFTER_DELETE is not forced.
sanWipeAfterDelete = dialog.queryBoolean(
dialog=self.dialog,
name='OVESETUP_CONFIG_SAN_WIPE_AFTER_DELETE',
note=_(
'Default SAN wipe after delete '
'(@VALUES@) [@DEFAULT@]: '
),
prompt=True,
default=False,
)
self.environment[SAN_WIPE_AFTER_DELETE] = sanWipeAfterDelete
@plugin.event(
stage=plugin.Stages.STAGE_MISC,
after=(
oengcommcons.Stages.DB_CONNECTION_AVAILABLE,
),
condition=lambda self: self._enableCondition(),
)
def _updateSANWipeAfterDelete(self):
option = vdcoption.VdcOption(
statement=self.environment[oenginecons.EngineDBEnv.STATEMENT]
)
options = (
{
'name': 'SANWipeAfterDelete',
'value': self.environment[SAN_WIPE_AFTER_DELETE],
},
)
option.updateVdcOptions(options=options,)
def _enableCondition(self):
# Returns a condition that validates
# we are installing the engine and we are running the setup
# on a new database.
return (
self.environment[oenginecons.CoreEnv.ENABLE] and
self.environment[oenginecons.EngineDBEnv.NEW_DATABASE]
)
# vim: expandtab tabstop=4 shiftwidth=4
|
{
"content_hash": "0d070bcf389b7c6f288241461d1a411e",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 73,
"avg_line_length": 30.127906976744185,
"alnum_prop": 0.6171362408336549,
"repo_name": "yapengsong/ovirt-engine",
"id": "1eb509dbc5561151372ec808fa19c08401be7f59",
"size": "3221",
"binary": false,
"copies": "5",
"ref": "refs/heads/eayunos-4.2",
"path": "packaging/setup/plugins/ovirt-engine-setup/ovirt-engine/config/storage.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "68312"
},
{
"name": "HTML",
"bytes": "16218"
},
{
"name": "Java",
"bytes": "35067557"
},
{
"name": "JavaScript",
"bytes": "69948"
},
{
"name": "Makefile",
"bytes": "24723"
},
{
"name": "PLSQL",
"bytes": "533"
},
{
"name": "PLpgSQL",
"bytes": "796728"
},
{
"name": "Python",
"bytes": "970860"
},
{
"name": "Roff",
"bytes": "10764"
},
{
"name": "Shell",
"bytes": "163853"
},
{
"name": "XSLT",
"bytes": "54683"
}
],
"symlink_target": ""
}
|
from absl import logging
import tensorflow as tf
import tensorflow_datasets as tfds
from cold_posterior_flax.cifar10 import train
from cold_posterior_flax.cifar10.configs import default
class TrainTest(tf.test.TestCase):
"""Test cases for CIFAR10."""
def test_train_sgmcmc(self):
config = default.get_config()
config.algorithm = 'sgmcmc'
config.optimizer = 'sym_euler'
config.arch = 'wrn8_1'
config.batch_size = 2
config.num_epochs = 1
# TODO(basv): include evaluation in testing (mock_data is preventing this).
config.do_eval = False
workdir = self.create_tempdir().full_path
with tfds.testing.mock_data(num_examples=1):
train.train_and_evaluate(config, workdir)
logging.info('workdir content: %s', tf.io.gfile.listdir(workdir))
if __name__ == '__main__':
tf.test.main()
|
{
"content_hash": "23fefa5a31ac693058a6603caf8348b6",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 79,
"avg_line_length": 29.785714285714285,
"alnum_prop": 0.7014388489208633,
"repo_name": "google-research/google-research",
"id": "ff1f5520cda2f4c3f2cc065bcc89cc0a588e80b3",
"size": "1442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cold_posterior_flax/cifar10/train_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9817"
},
{
"name": "C++",
"bytes": "4166670"
},
{
"name": "CMake",
"bytes": "6412"
},
{
"name": "CSS",
"bytes": "27092"
},
{
"name": "Cuda",
"bytes": "1431"
},
{
"name": "Dockerfile",
"bytes": "7145"
},
{
"name": "Gnuplot",
"bytes": "11125"
},
{
"name": "HTML",
"bytes": "77599"
},
{
"name": "ImageJ Macro",
"bytes": "50488"
},
{
"name": "Java",
"bytes": "487585"
},
{
"name": "JavaScript",
"bytes": "896512"
},
{
"name": "Julia",
"bytes": "67986"
},
{
"name": "Jupyter Notebook",
"bytes": "71290299"
},
{
"name": "Lua",
"bytes": "29905"
},
{
"name": "MATLAB",
"bytes": "103813"
},
{
"name": "Makefile",
"bytes": "5636"
},
{
"name": "NASL",
"bytes": "63883"
},
{
"name": "Perl",
"bytes": "8590"
},
{
"name": "Python",
"bytes": "53790200"
},
{
"name": "R",
"bytes": "101058"
},
{
"name": "Roff",
"bytes": "1208"
},
{
"name": "Rust",
"bytes": "2389"
},
{
"name": "Shell",
"bytes": "730444"
},
{
"name": "Smarty",
"bytes": "5966"
},
{
"name": "Starlark",
"bytes": "245038"
}
],
"symlink_target": ""
}
|
import os
import json
from datetime import datetime
from urlparse import urlparse
from urllib2 import unquote
from peewee import Field, Model, Proxy, PrimaryKeyField, CharField,\
ForeignKeyField, BooleanField, IntegerField, SelectQuery, DateTimeField
from tornado.web import HTTPError
from ..extension import task_scheduler
database_proxy = Proxy()
class JSONField(Field):
db_field = 'text'
_json_type_ = None
def db_value(self, value):
if value:
if not isinstance(value, self._json_type_):
raise RuntimeError()
else:
value = self._json_type_()
return json.dumps(value)
def python_value(self, value):
if not value:
return self._json_type_()
value = json.loads(value)
if not isinstance(value, self._json_type_):
value = self._json_type_()
return value
class JSONListField(JSONField):
_json_type_ = list
class JSONDictField(JSONField):
_json_type_ = dict
def property_to_json(property_value, is_detail=False,
properties=None):
if isinstance(property_value, datetime):
property_value = property_value.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
elif isinstance(property_value, ResourceModel):
property_value = property_value.get_json(is_detail, properties)
elif isinstance(property_value, Exception):
property_value = str(property_value)
if isinstance(property_value, list) or isinstance(property_value, set) or\
isinstance(property_value, SelectQuery):
property_value = list_property_to_json(property_value, is_detail,
properties)
elif isinstance(property_value, dict):
property_value = dict_property_to_json(property_value, is_detail,
properties)
return property_value
def list_property_to_json(list_property_value, is_detail=False,
properties=None):
json_value = []
for value in list_property_value:
json_value.append(property_to_json(value, is_detail, properties))
return json_value
def dict_property_to_json(dict_property_value, is_detail=False,
properties=None):
json_value = {}
for key, value in dict_property_value.items():
json_value[key] = property_to_json(value, is_detail, properties)
return json_value
class ResourceModel(Model):
id = PrimaryKeyField()
_properties_ = []
_detail_properties_ = []
class Meta:
database = database_proxy
order_by = ['-id']
@classmethod
def get_with_validate(cls, *query, **kwargs):
try:
resource = super(ResourceModel, cls).get(*query, **kwargs)
except cls.DoesNotExist:
raise HTTPError(404)
return resource
def get_json(self, is_detail, properties=None):
if not properties:
if is_detail:
properties = self._detail_properties_
else:
properties = self._properties_
json_value = {}
for property_name in properties:
if not hasattr(self, property_name):
continue
property_value = getattr(self, property_name)
json_value[property_name] = property_to_json(property_value,
is_detail)
return json_value
@property
def json(self):
return self.get_json(False)
@property
def detail_json(self):
return self.get_json(True)
_xunlei_lixian_path_ = os.path.join(os.path.dirname(os.path.dirname(
os.path.dirname(__file__))),
'xunlei-lixian', 'lixian_cli.py')
class Setting(ResourceModel):
_properties_ = ['xunlei_username', 'xunlei_password', 'aria2_rpc_path',
'aria2_opts', 'output_dir', 'auto_download']
xunlei_username = CharField(default="")
xunlei_password = CharField(default="")
aria2_rpc_path = CharField(default='http://localhost:6800/rpc')
aria2_opts = CharField(null=True)
output_dir = CharField(null=True)
auto_download = BooleanField(default=True)
def reconfig(self):
os.system('python {0} config username "{1}"'.
format(_xunlei_lixian_path_, self.xunlei_username))
os.system('python {0} config password "{1}"'.
format(_xunlei_lixian_path_, self.xunlei_password))
os.system('python {0} config tool aria2-rpc'.
format(_xunlei_lixian_path_))
os.system('python {0} config -- aria2-rpc "{1}"'.
format(_xunlei_lixian_path_, self.aria2_rpc_path))
if self.aria2_opts:
os.system('python {0} config -- aria2c-rpc-opts "{1}"'.
format(_xunlei_lixian_path_, self.aria2_opts))
if self.output_dir:
os.system('python {0} config output-dir "{1}"'.
format(_xunlei_lixian_path_, self.output_dir))
os.system('python {0} config continue'.
format(_xunlei_lixian_path_))
os.system('python {0} config delete'.
format(_xunlei_lixian_path_))
class TVShow(ResourceModel):
_properties_ = ['id', 'title', 'episodes_count']
_detail_properties_ = ['id', 'title', 'url', 'blob', 'allow_repeat',
'episodes', 'chinese_only', 'season',
'refresh_interval', 'paused']
_chinese_indicators_ = [u'中', u'双语']
class Meta:
order_by = ['-updated', '-id']
url = CharField(max_length=2048, unique=True, index=True)
season = IntegerField(null=True)
refresh_interval = IntegerField(null=True)
title = CharField(index=True)
blob = CharField()
chinese_only = BooleanField(default=True)
allow_repeat = BooleanField(default=False)
plugins = JSONListField(null=True)
updated = DateTimeField(null=True, index=True)
def __setattr__(self, key, value):
if key == 'url':
if value.find('http://') != 0 and value.find('https://') != 0:
value = 'http://' + value
parts = urlparse(value)
host = ''.join([x for x in parts.hostname.split('.')[0:-1]
if x != 'www'])
resource = parts.path.split('/')[-1]
if not self.title:
self.title = ':'.join([host, resource])
super(TVShow, self).__setattr__(key, value)
def is_valid_episode(self, episode_title):
if self.chinese_only:
has_chinese = False
for chinese_indicator in self._chinese_indicators_:
if episode_title.find(chinese_indicator) != -1:
has_chinese = True
break
if not has_chinese:
return False
if self.blob and episode_title.find(self.blob) == -1:
return False
return True
@property
def paused(self):
jobs = task_scheduler.get_jobs()
for job in jobs:
if str(self.id) == str(job.args[0]):
return False
return True
@property
def episodes_count(self):
return self.episodes.count()
class Episode(ResourceModel):
_properties_ = ['id', 'title', 'season', 'episode', 'ed2k', 'magnet',
'is_downloaded']
_detail_properties_ = _properties_
_format_priorities_ = ['1080p', '720p', 'hdtv', 'hr-hdtv', 'mp4']
class Meta:
order_by = ['-season', '-episode', '-updated', '-id']
tvshow = ForeignKeyField(TVShow, related_name='episodes')
title = CharField()
season = IntegerField(null=True, index=True)
episode = IntegerField(null=True, index=True)
unique = CharField(null=True, unique=True)
ed2k = CharField(null=True, max_length=2048)
ed2k_unique = CharField(null=True, unique=True)
magnet = CharField(null=True, max_length=2048)
magnet_unique = CharField(null=True, unique=True)
updated = DateTimeField(null=True, index=True)
def __init__(self, *args, **kwargs):
super(Episode, self).__init__(*args, **kwargs)
if kwargs:
self.season = kwargs['season']
self.episode = kwargs['episode']
self.ed2k = kwargs['ed2k']
self.magnet = kwargs['magnet']
self.updated = datetime.utcnow()
@property
def is_downloaded(self):
setting = Setting.get()
file_name = self.ed2k.split('|')[2]
if isinstance(file_name, unicode):
file_name = file_name.encode('utf-8')
file_name = unquote(file_name).decode('utf-8')
file_path = os.path.join(setting.output_dir, file_name)
return os.path.exists(file_path)
def __setattr__(self, key, value):
if value:
try:
self.tvshow
except TVShow.DoesNotExist:
pass
else:
if key == 'season' or key == 'episode':
if key == 'season':
season = value
episode = self.episode
else:
season = self.season
episode = value
if not season:
season = ''
if not episode:
episode = ''
if season or episode and not self.tvshow.allow_repeat:
self.unique = u'{0}:{1}:{2}'.format(self.tvshow.id,
season, episode)
elif key == 'ed2k':
self.ed2k_unique = u'{0}:{1}'.format(self.tvshow.id, value)
elif key == 'magnet':
self.magnet_unique = u'{0}:{1}'.format(self.tvshow.id,
value)
super(Episode, self).__setattr__(key, value)
def update_from(self, episode):
if self.ed2k == episode.ed2k and\
self.magnet == episode.magnet:
return False
need_update = False
left_lower_title = self.title.lower()
right_lower_title = episode.title.lower()
for priority in self._format_priorities_:
left = left_lower_title.find(priority) != -1
right = right_lower_title.find(priority) != -1
if left and not right:
return False
if right and not left:
print(left_lower_title, right_lower_title, priority)
need_update = True
break
if need_update:
self.title = episode.title
self.ed2k = episode.ed2k
self.magnet = episode.magnet
self.updated = datetime.utcnow()
return need_update
|
{
"content_hash": "d43ba0897e92329ba3d8d8386ebc0bef",
"timestamp": "",
"source": "github",
"line_count": 313,
"max_line_length": 79,
"avg_line_length": 34.90415335463259,
"alnum_prop": 0.5522196796338673,
"repo_name": "exherb/tvee",
"id": "d0d8c8062f71d5b4c785cb76c1e7e4a4a7c074f1",
"size": "10969",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tvee/models/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CoffeeScript",
"bytes": "11586"
},
{
"name": "JavaScript",
"bytes": "15440"
},
{
"name": "Python",
"bytes": "29449"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
}
|
"""This example gets all teams that you belong to. The statement retrieves up to
the maximum page size limit of 500. To create a team, run create_team.py."""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
team_service = client.GetService(
'TeamService', 'https://www.google.com', 'v201203')
user_service = client.GetService(
'UserService', 'https://www.google.com', 'v201203')
# Get current user's teams.
team_ids = user_service.GetCurrentUser()[0]['teamIds']
# Create a statement to only select teams by their IDs.
filter_statement = {'query': 'WHERE id IN (%s) LIMIT 500' % ','.join(team_ids)}
# Get teams by statement.
response = team_service.GetTeamsByStatement(filter_statement)[0]
teams = []
if 'results' in response:
teams = response['results']
# Display results.
for team in teams:
print ('Team with id \'%s\' and name \'%s\' was found.'
% (team['id'], team['name']))
print
print 'Number of results found: %s' % len(teams)
|
{
"content_hash": "11c7387d516aa2eb869c987a1d18a1ed",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 80,
"avg_line_length": 31.363636363636363,
"alnum_prop": 0.6855072463768116,
"repo_name": "donspaulding/adspygoogle",
"id": "6195e383d4e10d04275083d102c5e09316710c68",
"size": "1998",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/adspygoogle/dfp/v201203/get_teams_by_statement.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3734067"
},
{
"name": "Shell",
"bytes": "603"
}
],
"symlink_target": ""
}
|
import unittest
import pickle
import sys
from test import support
py_operator = support.import_fresh_module('operator', blocked=['_operator'])
c_operator = support.import_fresh_module('operator', fresh=['_operator'])
class Seq1:
def __init__(self, lst):
self.lst = lst
def __len__(self):
return len(self.lst)
def __getitem__(self, i):
return self.lst[i]
def __add__(self, other):
return self.lst + other.lst
def __mul__(self, other):
return self.lst * other
def __rmul__(self, other):
return other * self.lst
class Seq2(object):
def __init__(self, lst):
self.lst = lst
def __len__(self):
return len(self.lst)
def __getitem__(self, i):
return self.lst[i]
def __add__(self, other):
return self.lst + other.lst
def __mul__(self, other):
return self.lst * other
def __rmul__(self, other):
return other * self.lst
class OperatorTestCase:
def test_lt(self):
operator = self.module
self.assertRaises(TypeError, operator.lt)
self.assertRaises(TypeError, operator.lt, 1j, 2j)
self.assertFalse(operator.lt(1, 0))
self.assertFalse(operator.lt(1, 0.0))
self.assertFalse(operator.lt(1, 1))
self.assertFalse(operator.lt(1, 1.0))
self.assertTrue(operator.lt(1, 2))
self.assertTrue(operator.lt(1, 2.0))
def test_le(self):
operator = self.module
self.assertRaises(TypeError, operator.le)
self.assertRaises(TypeError, operator.le, 1j, 2j)
self.assertFalse(operator.le(1, 0))
self.assertFalse(operator.le(1, 0.0))
self.assertTrue(operator.le(1, 1))
self.assertTrue(operator.le(1, 1.0))
self.assertTrue(operator.le(1, 2))
self.assertTrue(operator.le(1, 2.0))
def test_eq(self):
operator = self.module
class C(object):
def __eq__(self, other):
raise SyntaxError
self.assertRaises(TypeError, operator.eq)
self.assertRaises(SyntaxError, operator.eq, C(), C())
self.assertFalse(operator.eq(1, 0))
self.assertFalse(operator.eq(1, 0.0))
self.assertTrue(operator.eq(1, 1))
self.assertTrue(operator.eq(1, 1.0))
self.assertFalse(operator.eq(1, 2))
self.assertFalse(operator.eq(1, 2.0))
def test_ne(self):
operator = self.module
class C(object):
def __ne__(self, other):
raise SyntaxError
self.assertRaises(TypeError, operator.ne)
self.assertRaises(SyntaxError, operator.ne, C(), C())
self.assertTrue(operator.ne(1, 0))
self.assertTrue(operator.ne(1, 0.0))
self.assertFalse(operator.ne(1, 1))
self.assertFalse(operator.ne(1, 1.0))
self.assertTrue(operator.ne(1, 2))
self.assertTrue(operator.ne(1, 2.0))
def test_ge(self):
operator = self.module
self.assertRaises(TypeError, operator.ge)
self.assertRaises(TypeError, operator.ge, 1j, 2j)
self.assertTrue(operator.ge(1, 0))
self.assertTrue(operator.ge(1, 0.0))
self.assertTrue(operator.ge(1, 1))
self.assertTrue(operator.ge(1, 1.0))
self.assertFalse(operator.ge(1, 2))
self.assertFalse(operator.ge(1, 2.0))
def test_gt(self):
operator = self.module
self.assertRaises(TypeError, operator.gt)
self.assertRaises(TypeError, operator.gt, 1j, 2j)
self.assertTrue(operator.gt(1, 0))
self.assertTrue(operator.gt(1, 0.0))
self.assertFalse(operator.gt(1, 1))
self.assertFalse(operator.gt(1, 1.0))
self.assertFalse(operator.gt(1, 2))
self.assertFalse(operator.gt(1, 2.0))
def test_abs(self):
operator = self.module
self.assertRaises(TypeError, operator.abs)
self.assertRaises(TypeError, operator.abs, None)
self.assertEqual(operator.abs(-1), 1)
self.assertEqual(operator.abs(1), 1)
def test_add(self):
operator = self.module
self.assertRaises(TypeError, operator.add)
self.assertRaises(TypeError, operator.add, None, None)
self.assertTrue(operator.add(3, 4) == 7)
def test_bitwise_and(self):
operator = self.module
self.assertRaises(TypeError, operator.and_)
self.assertRaises(TypeError, operator.and_, None, None)
self.assertTrue(operator.and_(0xf, 0xa) == 0xa)
def test_concat(self):
operator = self.module
self.assertRaises(TypeError, operator.concat)
self.assertRaises(TypeError, operator.concat, None, None)
self.assertTrue(operator.concat('py', 'thon') == 'python')
self.assertTrue(operator.concat([1, 2], [3, 4]) == [1, 2, 3, 4])
self.assertTrue(operator.concat(Seq1([5, 6]), Seq1([7])) == [5, 6, 7])
self.assertTrue(operator.concat(Seq2([5, 6]), Seq2([7])) == [5, 6, 7])
self.assertRaises(TypeError, operator.concat, 13, 29)
def test_countOf(self):
operator = self.module
self.assertRaises(TypeError, operator.countOf)
self.assertRaises(TypeError, operator.countOf, None, None)
self.assertTrue(operator.countOf([1, 2, 1, 3, 1, 4], 3) == 1)
self.assertTrue(operator.countOf([1, 2, 1, 3, 1, 4], 5) == 0)
def test_delitem(self):
operator = self.module
a = [4, 3, 2, 1]
self.assertRaises(TypeError, operator.delitem, a)
self.assertRaises(TypeError, operator.delitem, a, None)
self.assertTrue(operator.delitem(a, 1) is None)
self.assertTrue(a == [4, 2, 1])
def test_floordiv(self):
operator = self.module
self.assertRaises(TypeError, operator.floordiv, 5)
self.assertRaises(TypeError, operator.floordiv, None, None)
self.assertTrue(operator.floordiv(5, 2) == 2)
def test_truediv(self):
operator = self.module
self.assertRaises(TypeError, operator.truediv, 5)
self.assertRaises(TypeError, operator.truediv, None, None)
self.assertTrue(operator.truediv(5, 2) == 2.5)
def test_getitem(self):
operator = self.module
a = range(10)
self.assertRaises(TypeError, operator.getitem)
self.assertRaises(TypeError, operator.getitem, a, None)
self.assertTrue(operator.getitem(a, 2) == 2)
def test_indexOf(self):
operator = self.module
self.assertRaises(TypeError, operator.indexOf)
self.assertRaises(TypeError, operator.indexOf, None, None)
self.assertTrue(operator.indexOf([4, 3, 2, 1], 3) == 1)
self.assertRaises(ValueError, operator.indexOf, [4, 3, 2, 1], 0)
def test_invert(self):
operator = self.module
self.assertRaises(TypeError, operator.invert)
self.assertRaises(TypeError, operator.invert, None)
self.assertEqual(operator.inv(4), -5)
def test_lshift(self):
operator = self.module
self.assertRaises(TypeError, operator.lshift)
self.assertRaises(TypeError, operator.lshift, None, 42)
self.assertTrue(operator.lshift(5, 1) == 10)
self.assertTrue(operator.lshift(5, 0) == 5)
self.assertRaises(ValueError, operator.lshift, 2, -1)
def test_mod(self):
operator = self.module
self.assertRaises(TypeError, operator.mod)
self.assertRaises(TypeError, operator.mod, None, 42)
self.assertTrue(operator.mod(5, 2) == 1)
def test_mul(self):
operator = self.module
self.assertRaises(TypeError, operator.mul)
self.assertRaises(TypeError, operator.mul, None, None)
self.assertTrue(operator.mul(5, 2) == 10)
def test_matmul(self):
operator = self.module
self.assertRaises(TypeError, operator.matmul)
self.assertRaises(TypeError, operator.matmul, 42, 42)
class M:
def __matmul__(self, other):
return other - 1
self.assertEqual(M() @ 42, 41)
def test_neg(self):
operator = self.module
self.assertRaises(TypeError, operator.neg)
self.assertRaises(TypeError, operator.neg, None)
self.assertEqual(operator.neg(5), -5)
self.assertEqual(operator.neg(-5), 5)
self.assertEqual(operator.neg(0), 0)
self.assertEqual(operator.neg(-0), 0)
def test_bitwise_or(self):
operator = self.module
self.assertRaises(TypeError, operator.or_)
self.assertRaises(TypeError, operator.or_, None, None)
self.assertTrue(operator.or_(0xa, 0x5) == 0xf)
def test_pos(self):
operator = self.module
self.assertRaises(TypeError, operator.pos)
self.assertRaises(TypeError, operator.pos, None)
self.assertEqual(operator.pos(5), 5)
self.assertEqual(operator.pos(-5), -5)
self.assertEqual(operator.pos(0), 0)
self.assertEqual(operator.pos(-0), 0)
def test_pow(self):
operator = self.module
self.assertRaises(TypeError, operator.pow)
self.assertRaises(TypeError, operator.pow, None, None)
self.assertEqual(operator.pow(3,5), 3**5)
self.assertRaises(TypeError, operator.pow, 1)
self.assertRaises(TypeError, operator.pow, 1, 2, 3)
def test_rshift(self):
operator = self.module
self.assertRaises(TypeError, operator.rshift)
self.assertRaises(TypeError, operator.rshift, None, 42)
self.assertTrue(operator.rshift(5, 1) == 2)
self.assertTrue(operator.rshift(5, 0) == 5)
self.assertRaises(ValueError, operator.rshift, 2, -1)
def test_contains(self):
operator = self.module
self.assertRaises(TypeError, operator.contains)
self.assertRaises(TypeError, operator.contains, None, None)
self.assertTrue(operator.contains(range(4), 2))
self.assertFalse(operator.contains(range(4), 5))
def test_setitem(self):
operator = self.module
a = list(range(3))
self.assertRaises(TypeError, operator.setitem, a)
self.assertRaises(TypeError, operator.setitem, a, None, None)
self.assertTrue(operator.setitem(a, 0, 2) is None)
self.assertTrue(a == [2, 1, 2])
self.assertRaises(IndexError, operator.setitem, a, 4, 2)
def test_sub(self):
operator = self.module
self.assertRaises(TypeError, operator.sub)
self.assertRaises(TypeError, operator.sub, None, None)
self.assertTrue(operator.sub(5, 2) == 3)
def test_truth(self):
operator = self.module
class C(object):
def __bool__(self):
raise SyntaxError
self.assertRaises(TypeError, operator.truth)
self.assertRaises(SyntaxError, operator.truth, C())
self.assertTrue(operator.truth(5))
self.assertTrue(operator.truth([0]))
self.assertFalse(operator.truth(0))
self.assertFalse(operator.truth([]))
def test_bitwise_xor(self):
operator = self.module
self.assertRaises(TypeError, operator.xor)
self.assertRaises(TypeError, operator.xor, None, None)
self.assertTrue(operator.xor(0xb, 0xc) == 0x7)
def test_is(self):
operator = self.module
a = b = 'xyzpdq'
c = a[:3] + b[3:]
self.assertRaises(TypeError, operator.is_)
self.assertTrue(operator.is_(a, b))
self.assertFalse(operator.is_(a,c))
def test_is_not(self):
operator = self.module
a = b = 'xyzpdq'
c = a[:3] + b[3:]
self.assertRaises(TypeError, operator.is_not)
self.assertFalse(operator.is_not(a, b))
self.assertTrue(operator.is_not(a,c))
def test_attrgetter(self):
operator = self.module
class A:
pass
a = A()
a.name = 'arthur'
f = operator.attrgetter('name')
self.assertEqual(f(a), 'arthur')
f = operator.attrgetter('rank')
self.assertRaises(AttributeError, f, a)
self.assertRaises(TypeError, operator.attrgetter, 2)
self.assertRaises(TypeError, operator.attrgetter)
# multiple gets
record = A()
record.x = 'X'
record.y = 'Y'
record.z = 'Z'
self.assertEqual(operator.attrgetter('x','z','y')(record), ('X', 'Z', 'Y'))
self.assertRaises(TypeError, operator.attrgetter, ('x', (), 'y'))
class C(object):
def __getattr__(self, name):
raise SyntaxError
self.assertRaises(SyntaxError, operator.attrgetter('foo'), C())
# recursive gets
a = A()
a.name = 'arthur'
a.child = A()
a.child.name = 'thomas'
f = operator.attrgetter('child.name')
self.assertEqual(f(a), 'thomas')
self.assertRaises(AttributeError, f, a.child)
f = operator.attrgetter('name', 'child.name')
self.assertEqual(f(a), ('arthur', 'thomas'))
f = operator.attrgetter('name', 'child.name', 'child.child.name')
self.assertRaises(AttributeError, f, a)
f = operator.attrgetter('child.')
self.assertRaises(AttributeError, f, a)
f = operator.attrgetter('.child')
self.assertRaises(AttributeError, f, a)
a.child.child = A()
a.child.child.name = 'johnson'
f = operator.attrgetter('child.child.name')
self.assertEqual(f(a), 'johnson')
f = operator.attrgetter('name', 'child.name', 'child.child.name')
self.assertEqual(f(a), ('arthur', 'thomas', 'johnson'))
def test_itemgetter(self):
operator = self.module
a = 'ABCDE'
f = operator.itemgetter(2)
self.assertEqual(f(a), 'C')
f = operator.itemgetter(10)
self.assertRaises(IndexError, f, a)
class C(object):
def __getitem__(self, name):
raise SyntaxError
self.assertRaises(SyntaxError, operator.itemgetter(42), C())
f = operator.itemgetter('name')
self.assertRaises(TypeError, f, a)
self.assertRaises(TypeError, operator.itemgetter)
d = dict(key='val')
f = operator.itemgetter('key')
self.assertEqual(f(d), 'val')
f = operator.itemgetter('nonkey')
self.assertRaises(KeyError, f, d)
# example used in the docs
inventory = [('apple', 3), ('banana', 2), ('pear', 5), ('orange', 1)]
getcount = operator.itemgetter(1)
self.assertEqual(list(map(getcount, inventory)), [3, 2, 5, 1])
self.assertEqual(sorted(inventory, key=getcount),
[('orange', 1), ('banana', 2), ('apple', 3), ('pear', 5)])
# multiple gets
data = list(map(str, range(20)))
self.assertEqual(operator.itemgetter(2,10,5)(data), ('2', '10', '5'))
self.assertRaises(TypeError, operator.itemgetter(2, 'x', 5), data)
def test_methodcaller(self):
operator = self.module
self.assertRaises(TypeError, operator.methodcaller)
self.assertRaises(TypeError, operator.methodcaller, 12)
class A:
def foo(self, *args, **kwds):
return args[0] + args[1]
def bar(self, f=42):
return f
def baz(*args, **kwds):
return kwds['name'], kwds['self']
a = A()
f = operator.methodcaller('foo')
self.assertRaises(IndexError, f, a)
f = operator.methodcaller('foo', 1, 2)
self.assertEqual(f(a), 3)
f = operator.methodcaller('bar')
self.assertEqual(f(a), 42)
self.assertRaises(TypeError, f, a, a)
f = operator.methodcaller('bar', f=5)
self.assertEqual(f(a), 5)
f = operator.methodcaller('baz', name='spam', self='eggs')
self.assertEqual(f(a), ('spam', 'eggs'))
def test_inplace(self):
operator = self.module
class C(object):
def __iadd__ (self, other): return "iadd"
def __iand__ (self, other): return "iand"
def __ifloordiv__(self, other): return "ifloordiv"
def __ilshift__ (self, other): return "ilshift"
def __imod__ (self, other): return "imod"
def __imul__ (self, other): return "imul"
def __imatmul__ (self, other): return "imatmul"
def __ior__ (self, other): return "ior"
def __ipow__ (self, other): return "ipow"
def __irshift__ (self, other): return "irshift"
def __isub__ (self, other): return "isub"
def __itruediv__ (self, other): return "itruediv"
def __ixor__ (self, other): return "ixor"
def __getitem__(self, other): return 5 # so that C is a sequence
c = C()
self.assertEqual(operator.iadd (c, 5), "iadd")
self.assertEqual(operator.iand (c, 5), "iand")
self.assertEqual(operator.ifloordiv(c, 5), "ifloordiv")
self.assertEqual(operator.ilshift (c, 5), "ilshift")
self.assertEqual(operator.imod (c, 5), "imod")
self.assertEqual(operator.imul (c, 5), "imul")
self.assertEqual(operator.imatmul (c, 5), "imatmul")
self.assertEqual(operator.ior (c, 5), "ior")
self.assertEqual(operator.ipow (c, 5), "ipow")
self.assertEqual(operator.irshift (c, 5), "irshift")
self.assertEqual(operator.isub (c, 5), "isub")
self.assertEqual(operator.itruediv (c, 5), "itruediv")
self.assertEqual(operator.ixor (c, 5), "ixor")
self.assertEqual(operator.iconcat (c, c), "iadd")
def test_length_hint(self):
operator = self.module
class X(object):
def __init__(self, value):
self.value = value
def __length_hint__(self):
if type(self.value) is type:
raise self.value
else:
return self.value
self.assertEqual(operator.length_hint([], 2), 0)
self.assertEqual(operator.length_hint(iter([1, 2, 3])), 3)
self.assertEqual(operator.length_hint(X(2)), 2)
self.assertEqual(operator.length_hint(X(NotImplemented), 4), 4)
self.assertEqual(operator.length_hint(X(TypeError), 12), 12)
with self.assertRaises(TypeError):
operator.length_hint(X("abc"))
with self.assertRaises(ValueError):
operator.length_hint(X(-2))
with self.assertRaises(LookupError):
operator.length_hint(X(LookupError))
def test_dunder_is_original(self):
operator = self.module
names = [name for name in dir(operator) if not name.startswith('_')]
for name in names:
orig = getattr(operator, name)
dunder = getattr(operator, '__' + name.strip('_') + '__', None)
if dunder:
self.assertIs(dunder, orig)
class PyOperatorTestCase(OperatorTestCase, unittest.TestCase):
module = py_operator
@unittest.skipUnless(c_operator, 'requires _operator')
class COperatorTestCase(OperatorTestCase, unittest.TestCase):
module = c_operator
class OperatorPickleTestCase:
def copy(self, obj, proto):
with support.swap_item(sys.modules, 'operator', self.module):
pickled = pickle.dumps(obj, proto)
with support.swap_item(sys.modules, 'operator', self.module2):
return pickle.loads(pickled)
def test_attrgetter(self):
attrgetter = self.module.attrgetter
class A:
pass
a = A()
a.x = 'X'
a.y = 'Y'
a.z = 'Z'
a.t = A()
a.t.u = A()
a.t.u.v = 'V'
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
with self.subTest(proto=proto):
f = attrgetter('x')
f2 = self.copy(f, proto)
self.assertEqual(repr(f2), repr(f))
self.assertEqual(f2(a), f(a))
# multiple gets
f = attrgetter('x', 'y', 'z')
f2 = self.copy(f, proto)
self.assertEqual(repr(f2), repr(f))
self.assertEqual(f2(a), f(a))
# recursive gets
f = attrgetter('t.u.v')
f2 = self.copy(f, proto)
self.assertEqual(repr(f2), repr(f))
self.assertEqual(f2(a), f(a))
def test_itemgetter(self):
itemgetter = self.module.itemgetter
a = 'ABCDE'
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
with self.subTest(proto=proto):
f = itemgetter(2)
f2 = self.copy(f, proto)
self.assertEqual(repr(f2), repr(f))
self.assertEqual(f2(a), f(a))
# multiple gets
f = itemgetter(2, 0, 4)
f2 = self.copy(f, proto)
self.assertEqual(repr(f2), repr(f))
self.assertEqual(f2(a), f(a))
def test_methodcaller(self):
methodcaller = self.module.methodcaller
class A:
def foo(self, *args, **kwds):
return args[0] + args[1]
def bar(self, f=42):
return f
def baz(*args, **kwds):
return kwds['name'], kwds['self']
a = A()
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
with self.subTest(proto=proto):
f = methodcaller('bar')
f2 = self.copy(f, proto)
self.assertEqual(repr(f2), repr(f))
self.assertEqual(f2(a), f(a))
# positional args
f = methodcaller('foo', 1, 2)
f2 = self.copy(f, proto)
self.assertEqual(repr(f2), repr(f))
self.assertEqual(f2(a), f(a))
# keyword args
f = methodcaller('bar', f=5)
f2 = self.copy(f, proto)
self.assertEqual(repr(f2), repr(f))
self.assertEqual(f2(a), f(a))
f = methodcaller('baz', self='eggs', name='spam')
f2 = self.copy(f, proto)
# Can't test repr consistently with multiple keyword args
self.assertEqual(f2(a), f(a))
class PyPyOperatorPickleTestCase(OperatorPickleTestCase, unittest.TestCase):
module = py_operator
module2 = py_operator
@unittest.skipUnless(c_operator, 'requires _operator')
class PyCOperatorPickleTestCase(OperatorPickleTestCase, unittest.TestCase):
module = py_operator
module2 = c_operator
@unittest.skipUnless(c_operator, 'requires _operator')
class CPyOperatorPickleTestCase(OperatorPickleTestCase, unittest.TestCase):
module = c_operator
module2 = py_operator
@unittest.skipUnless(c_operator, 'requires _operator')
class CCOperatorPickleTestCase(OperatorPickleTestCase, unittest.TestCase):
module = c_operator
module2 = c_operator
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "5af0d5dfe15126770955ebb781d1acfe",
"timestamp": "",
"source": "github",
"line_count": 600,
"max_line_length": 83,
"avg_line_length": 38.16833333333334,
"alnum_prop": 0.585083620802585,
"repo_name": "juanyaw/python",
"id": "da9c8ef34f772280c58ad06c013b28f8b52caaad",
"size": "22901",
"binary": false,
"copies": "7",
"ref": "refs/heads/develop",
"path": "cpython/Lib/test/test_operator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "470920"
},
{
"name": "Batchfile",
"bytes": "35551"
},
{
"name": "C",
"bytes": "16518323"
},
{
"name": "C#",
"bytes": "1231"
},
{
"name": "C++",
"bytes": "343272"
},
{
"name": "CSS",
"bytes": "2839"
},
{
"name": "Common Lisp",
"bytes": "24481"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "Groff",
"bytes": "254942"
},
{
"name": "HTML",
"bytes": "130698"
},
{
"name": "JavaScript",
"bytes": "10616"
},
{
"name": "Makefile",
"bytes": "25026"
},
{
"name": "Objective-C",
"bytes": "1390263"
},
{
"name": "PostScript",
"bytes": "13803"
},
{
"name": "PowerShell",
"bytes": "1420"
},
{
"name": "Prolog",
"bytes": "557"
},
{
"name": "Python",
"bytes": "24911704"
},
{
"name": "R",
"bytes": "5378"
},
{
"name": "Shell",
"bytes": "437386"
},
{
"name": "TeX",
"bytes": "323102"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
}
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CourseDeadlines.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
{
"content_hash": "5c418dddf9c6fc5873b3325dada22e6b",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 79,
"avg_line_length": 26.22222222222222,
"alnum_prop": 0.7203389830508474,
"repo_name": "RedBulli/CourseDeadlines",
"id": "0452af3fb55c7b8de99a691b782ea87c0dac802c",
"size": "258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "158680"
},
{
"name": "JavaScript",
"bytes": "196393"
},
{
"name": "Python",
"bytes": "14339"
},
{
"name": "Scala",
"bytes": "232"
}
],
"symlink_target": ""
}
|
"""Main module."""
import sys
from dependency_injector.wiring import inject, Provide
from .services import UserService, AuthService, PhotoService
from .containers import Container
@inject
def main(
email: str,
password: str,
photo: str,
user_service: UserService = Provide[Container.user_service],
auth_service: AuthService = Provide[Container.auth_service],
photo_service: PhotoService = Provide[Container.photo_service],
) -> None:
user = user_service.get_user(email)
auth_service.authenticate(user, password)
photo_service.upload_photo(user, photo)
if __name__ == '__main__':
container = Container()
container.init_resources()
container.config.from_ini('config.ini')
container.wire(modules=[sys.modules[__name__]])
main(*sys.argv[1:])
|
{
"content_hash": "1c9fd022b0c1d4f3a1c9c056e0daa736",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 71,
"avg_line_length": 26.64516129032258,
"alnum_prop": 0.6803874092009685,
"repo_name": "rmk135/objects",
"id": "87ccf71532e07b2b88b0b53062561cfd2b400d49",
"size": "826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/miniapps/application-single-container/example/__main__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "171241"
}
],
"symlink_target": ""
}
|
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.OUT)
GPIO.output(18, True)
|
{
"content_hash": "9e7658b130d3709335cb63495b84b668",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 24,
"avg_line_length": 19,
"alnum_prop": 0.7473684210526316,
"repo_name": "Erfa/sprak",
"id": "3f7f8e4998c617ab2730aac26d1b67c9dc877801",
"size": "95",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gpio.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7165"
},
{
"name": "Shell",
"bytes": "70"
}
],
"symlink_target": ""
}
|
from unittest import TestCase
from .util import (
connect_db,
destroy_db,
create_model
)
from .models import SimpleTestModel
class DatabaseCreationTestCase(TestCase):
def setUp(self):
self.connection = connect_db()
def tearDown(self):
destroy_db(self.connection)
def test_table_creation(self):
create_model(
self.connection,
SimpleTestModel
)
|
{
"content_hash": "5d380306b79d9404301eb7e21f3c4695",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 41,
"avg_line_length": 18.652173913043477,
"alnum_prop": 0.6386946386946387,
"repo_name": "sethdenner/djangocassandra",
"id": "43a0e1679985a9859e4ef8f012f485bab4da5aeb",
"size": "429",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_creation.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "94015"
}
],
"symlink_target": ""
}
|
from twisted.internet import defer
import re
import base
class BaseXmppParser(base.BaseParser):
def formatResult(self, request, result):
if not isinstance(result, dict):
return 'ERROR. Parser has got a strange shit from handler.'
ok = result.get('ok')
if ok is None:
return 'Result unknown.'
else:
fmt = result.get('format')
if fmt:
formatter = self.formatters.get(fmt)
if formatter:
return formatter(request, result)
desc = result.get('desc', '')
if ok:
return 'OK. ' + desc
else:
return 'ERROR. ' + desc
|
{
"content_hash": "67667f29cb88644f350df62fe4c3139d",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 71,
"avg_line_length": 28.56,
"alnum_prop": 0.5224089635854342,
"repo_name": "ojab/bnw",
"id": "f779d7206f199a52a24127b5eaf48382c35f6789",
"size": "739",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bnw/xmpp/parser_basexmpp.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "9310"
},
{
"name": "HTML",
"bytes": "29045"
},
{
"name": "JavaScript",
"bytes": "52028"
},
{
"name": "Python",
"bytes": "238111"
},
{
"name": "Shell",
"bytes": "2934"
}
],
"symlink_target": ""
}
|
from .child_package import *
|
{
"content_hash": "2c32dae6254edc56e5e28aaca12e1506",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 28,
"avg_line_length": 29,
"alnum_prop": 0.7586206896551724,
"repo_name": "kayhayen/Nuitka",
"id": "3de394cc75628d7547322633a7275eb4ef2ccf25",
"size": "798",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/programs/package_init_issue/some_package/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1868"
},
{
"name": "C",
"bytes": "617681"
},
{
"name": "C++",
"bytes": "149777"
},
{
"name": "Python",
"bytes": "6603718"
},
{
"name": "Shell",
"bytes": "1088"
}
],
"symlink_target": ""
}
|
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
import os, time
import multiprocessing as mp
import logging
import logging.config
import Queue
from Queue import Empty
import json
import cgi # for parsing form-data
# -----------------------------------------------------------------------------
# import:
# o) global shared definitions
# o) necessary classes from "code" handling
import sys
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if parentdir not in os.sys.path:
os.sys.path.insert(0,parentdir)
from shared import *
from ModPython.pycode import PyCode
# -----------------------------------------------------------------------------
# configure logger
logging.config.fileConfig("%s/../logging.conf" % os.path.dirname(__file__))
def _fieldStorageToDict( fieldStorage ):
ret = {}
for key in fieldStorage.keys():
ret[ key ] = fieldStorage.getvalue(key) #fieldStorage[ key ].value
return ret
class EPHttpListener(object):
def __init__(self, context, name):
self.logger = logging.getLogger('maaps.ep.httplistener')
self.name = name
self.port = context.get('PORT' , 80)
self.address = context.get('ADDRESS', 'localhost')
self.path = context.get('PATH' , '/')
self.src = context.get('code' , None)
self.code = None
self.queue = mp.Queue()
if self.src is not None:
self.code = PyCode(self.name, 'inline', self.src)
# create server process
self.server = mp.Process(name = self.name,
target= self._run_server,
args = ()
)
# start server process
self.server.daemon = True
self.server.start()
def wait4data(self, runtime_context, timeout=None):
assert timeout > 0.0 or timeout is None
data = {}
try:
data = self.queue.get(timeout=timeout)
except Empty: # timeout happened
self.logger.error("%s: Timeout!", self.name)
self.logger.debug('data is: %r' % (data,))
runtime_context[CTX_PAYLOAD] = data
return data
def _run_server(self):
"called from multiprocess.Process"
self.logger.debug('Starting webserver %s:%s', self.address, self.port)
server = TheHTTPServer( (self.address, self.port),
EPHttpRequestHandler,
path_to_use = self.path,
message_queue= self.queue,
code = self.code
)
server.serve_forever()
def shutdown(self):
""" This terminates the entrypoint process!
Call this only on shutdown or bevor restarting the whole application!
"""
self.server.terminate()
self.logger.debug('%s: Wait %s after termination to shutdown.', self.name, WAIT_SECS_ON_SHUTDOWN)
time.sleep(WAIT_SECS_ON_SHUTDOWN)
return not self.server.is_alive()
class TheHTTPServer(HTTPServer, object):
def __init__(self, server_address, handler_class, path_to_use, message_queue, code = None):
super(TheHTTPServer, self).__init__(server_address, handler_class)
self.logger = logging.getLogger('maaps.ep.httplistener.instance')
self.communication_queue = message_queue
self.path = path_to_use
self.code = code
self.logger.info('Started HTTP with "http://%s:%s%s"', server_address[0], server_address[1], self.path)
class VerificationResult(object):
def __init__(self, response_code=200, response_text='OK'):
self.response_code = response_code
self.response_text = response_text
self.result_data = None
def __str__(self):
return "response_code: %s response_text: '%r' / data: '%r'" % (self.response_code, self.response_text, self.result_data,)
def __repr__(self):
return self.__str__()
class EPHttpRequestHandler(BaseHTTPRequestHandler):
def log_message(self, m_format, *args):
self.server.logger.info(('incoming %s' % (m_format,)) % args)
def verify(self, incoming, runtime_context):
''' If the server has a code object then it will be executed ...
'''
result = VerificationResult()
result.result_data = incoming
if self.server.code is None:
self.server.logger.debug('NO verify() code from user. Running without verification.')
result.response_code = 200
result.response_text = ''
else:
self.server.logger.info('#'*30)
exception_queue = Queue.Queue()
runtime_context[CTX_PAYLOAD] = result.response_text
self.server.code.run(exception_queue, runtime_context)
if not exception_queue.empty():
pass
else:
result.response_code = 500
result.result_data = exception_queue.get(block=False)
return result
def do_POST(self):
self.server.logger.debug('Header Keys: %s', self.headers.keys())
self.server.logger.debug('Header Type: %s', type(self.headers))
for key in self.headers.keys():
self.server.logger.debug("%-20s %s", key, self.headers[key])
# TODO(?): check self.path == self.server.path
runtime_context = create_global_context()
runtime_context[CTX_LOGGER] = self.server.logger
if self.headers['content-type'].startswith('application/x-www-form-urlencoded'):
# Parse the form data posted
form = cgi.FieldStorage(
fp = self.rfile,
headers = self.headers,
keep_blank_values = 1,
environ = { 'REQUEST_METHOD' : 'POST',
'CONTENT_TYPE' : self.headers['Content-Type'],
}
)
data = _fieldStorageToDict(form)
result = self.verify(data, runtime_context)
elif self.headers['content-type'].startswith('application/json'):
varLen = int(self.headers['content-length'])
data_raw = self.rfile.read(varLen)
data = json.loads(data_raw)
result = self.verify(data, runtime_context)
else:
result = VerificationResult()
result.response_code = 501
result.response_text = "Unknown Content-Type: %s" % (self.headers['Content-Type'],)
self.server.logger.error(result.response_text)
assert(result is not None)
self.server.logger.debug('Result is "%r"', result)
self.server.communication_queue.put( result.result_data )
self.send_response(result.response_code, result.response_text)
self.end_headers()
|
{
"content_hash": "b0d58b3f7e4b244b580f5e7b1169a6a7",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 129,
"avg_line_length": 39.13966480446928,
"alnum_prop": 0.5650870682272338,
"repo_name": "hdj666/maaps",
"id": "8dc59d1a524d24c2731bc58d057ed2479983b20d",
"size": "7125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EPHttpListener/EPHttpListener.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "48066"
}
],
"symlink_target": ""
}
|
from django.contrib.auth.models import AnonymousUser
import commonware.log
from rest_framework.authentication import BaseAuthentication
log = commonware.log.getLogger('z.api')
class OAuthError(RuntimeError):
def __init__(self, message='OAuth error occured.'):
self.message = message
class RestOAuthAuthentication(BaseAuthentication):
def authenticate(self, request):
# Most of the work here is in the RestOAuthMiddleware.
if (getattr(request._request, 'user', None) and
'RestOAuth' in getattr(request._request, 'authed_from', [])):
request.user = request._request.user
return request.user, None
class RestSharedSecretAuthentication(BaseAuthentication):
def authenticate(self, request):
# Most of the work here is in the RestSharedSecretMiddleware.
if (getattr(request._request, 'user', None) and
'RestSharedSecret' in getattr(
request._request, 'authed_from', [])):
request.user = request._request.user
return request.user, None
class RestAnonymousAuthentication(BaseAuthentication):
def authenticate(self, request):
return AnonymousUser(), None
|
{
"content_hash": "453e9733cfaf2db365a1268ddcdc2f2b",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 73,
"avg_line_length": 31.076923076923077,
"alnum_prop": 0.6856435643564357,
"repo_name": "jinankjain/zamboni",
"id": "6980049eec5e772d79737a3da5ea55cfec96a770",
"size": "1212",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mkt/api/authentication.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import env
import futures
import mongoengine
import redis
import rq
import setproctitle
import tornadoredis
import tornadotinyfeedback
from tornado import ioloop, web
from tornado.options import define, options, parse_command_line
import api
config = env.prefix('ORL_')
print(config)
if config['debug'] == 'True':
config['debug'] = True
else:
config['debug'] = False
settings = {
'debug': config['debug'],
}
server_settings = {
"xheaders": True,
}
application = web.Application([
(r'/runs/([a-z]+)', api.AddRunHandler),
], **settings)
application.config = config
application.thread_pool = futures.ThreadPoolExecutor(max_workers=3)
if __name__ == '__main__':
define('port', default=11001, help='TCP port to listen on')
parse_command_line()
setproctitle.setproctitle('orl.api')
mongoengine.connect(
config['db_name'],
host=config['db_uri'])
application.tf = tornadotinyfeedback.Client('openrunlog')
application.redis = tornadoredis.Client()
application.redis.connect()
application.redis_sync = redis.StrictRedis()
application.q = rq.Queue(connection=application.redis_sync)
application.listen(options.port, **server_settings)
ioloop.IOLoop.instance().start()
|
{
"content_hash": "5e197994aab92cecf0c2da1bea29704f",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 67,
"avg_line_length": 23.547169811320753,
"alnum_prop": 0.7011217948717948,
"repo_name": "JsonChiu/openrunlog",
"id": "2de877a12d7c6b47f91fddd61f0601667420ef54",
"size": "1249",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openrunlog/apiserver.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "749"
},
{
"name": "HTML",
"bytes": "38395"
},
{
"name": "JavaScript",
"bytes": "71398"
},
{
"name": "Nginx",
"bytes": "1743"
},
{
"name": "Python",
"bytes": "99245"
},
{
"name": "Shell",
"bytes": "158"
}
],
"symlink_target": ""
}
|
import numpy as np
import csv, os, random
from collections import Counter
def dirty_pairtree(htid):
period = htid.find('.')
prefix = htid[0:period]
postfix = htid[(period+1): ]
if '=' in postfix:
postfix = postfix.replace('+',':')
postfix = postfix.replace('=','/')
dirtyname = prefix + "." + postfix
return dirtyname
def get_classvector(classpath, volumeIDs):
with open(classpath, encoding = 'utf-8') as f:
filelines = f.readlines()
classdict = dict()
datedict = dict()
for line in filelines:
line = line.rstrip()
fields = line.split('\t')
volid = dirty_pairtree(fields[0])
theclass = fields[1]
date = int(fields[2])
if theclass == 'elite':
classdict[volid] = 1
elif theclass == 'vulgar':
classdict[volid] = 0
else:
classdict[volid] = 0
print('Anomalous class for ' + volid)
datedict[volid] = date
cleanclassdict = dict()
cleandatedict = dict()
for idx, anid in enumerate(volumeIDs):
dirtyid = dirty_pairtree(anid)
if dirtyid in classdict:
cleanclassdict[anid] = classdict[dirtyid]
cleandatedict[anid] = datedict[dirtyid]
else:
print('Missing from class metadata: ' + anid)
return cleanclassdict, cleandatedict
## MAIN code starts here.
sourcefolder = '/Users/tunder/Dropbox/GenreProject/python/granger/elite/'
extension = '.poe.tsv'
VOCABSIZE = 5000
classpath = '/Users/tunder/Dropbox/GenreProject/python/granger/correctedmeta.tsv'
if not sourcefolder.endswith('/'):
sourcefolder = sourcefolder + '/'
# This just makes things easier.
# Get a list of files.
allthefiles = os.listdir(sourcefolder)
random.shuffle(allthefiles)
volumeIDs = list()
volumepaths = list()
for filename in allthefiles:
if filename.endswith(extension):
volID = filename.replace(extension, "")
# The volume ID is basically the filename minus its extension.
# Extensions are likely to be long enough that there is little
# danger of accidental occurrence inside a filename. E.g.
# '.fic.tsv'
path = sourcefolder + filename
volumeIDs.append(volID)
volumepaths.append(path)
# Get the class and date vectors, indexed by volume ID
classdict, datedict = get_classvector(classpath, volumeIDs)
# make a vocabulary list and a volsize dict
wordcounts = Counter()
volsizes = Counter()
datebins = [1840,1845,1850,1855,1860,1865,1870,1875,1880,1885,1890,1895,1900,1905,1910,1915,1920]
# datebins = [1840,1850,1860,1870,1880,1890,1900,1910,1920]
NUMBINS = len(datebins)
for volid, volpath in zip(volumeIDs, volumepaths):
with open(volpath, encoding = 'utf-8') as f:
for line in f:
fields = line.strip().split('\t')
word = fields[0]
if len(word) > 1 and word[0].isalpha():
count = int(fields[1])
wordcounts[word] += 1
volsizes[volid] += count
etymological_categories = ['pre', 'post', 'stopword', 'missing']
etymo = dict()
with open('/Users/tunder/Dropbox/PythonScripts/mine/metadata/ReMergedEtymologies.txt', encoding = 'utf-8') as f:
for line in f:
fields = line.split('\t')
date = int(fields[1])
if date > 800 and date < 1150:
etymo[fields[0]] = 'pre'
elif date >= 1150 and date < 1700:
etymo[fields[0]] = 'post'
else:
etymo[fields[0]] = 'stopword'
vocablist = [x[0] for x in wordcounts.most_common(VOCABSIZE)]
VOCABSIZE = len(vocablist)
vocabset = set(vocablist)
# Here's the crucial change from make granger data. We map all
# words onto an etymological category
vocabmapper = dict()
for idx, word in enumerate(vocablist):
if word in etymo:
vocabmapper[word] = etymo[word]
else:
vocabmapper[word] = 'missing'
binsforcategory = dict()
for category in [0,1]:
datematrix = list()
for i in range(NUMBINS):
etymmatrix = dict()
for etym in etymological_categories:
etymmatrix[etym] = 0
datematrix.append(etymmatrix)
binsforcategory[category] = datematrix
datemapper = dict()
for volid in volumeIDs:
date = datedict[volid]
for idx, dateceiling in enumerate(datebins):
if date < dateceiling:
datemapper[volid] = idx
category = classdict[volid]
break
for volid, volpath in zip(volumeIDs, volumepaths):
with open(volpath, encoding = 'utf-8') as f:
for line in f:
fields = line.strip().split('\t')
word = fields[0]
if word in vocabset:
count = int(fields[1])
dateidx = datemapper[volid]
category = classdict[volid]
etymcategory = vocabmapper[word]
binsforcategory[category][dateidx][etymcategory] += count
# Turn counts into ratios.
for category in [0, 1]:
for i in range(NUMBINS):
binsforcategory[category][i]['ratio'] = binsforcategory[category][i]['pre'] / binsforcategory[category][i]['post']
with open('/Users/tunder/Dropbox/GenreProject/python/granger/eliteratio.csv', mode = 'w', encoding = 'utf-8') as f:
writer = csv.writer(f)
writer.writerow(['date', 'ratio'])
for idx, row in enumerate(binsforcategory[1]):
writer.writerow([str(datebins[idx]), str(row['ratio'])])
with open('/Users/tunder/Dropbox/GenreProject/python/granger/vulgarratio.csv', mode = 'w', encoding = 'utf-8') as f:
writer = csv.writer(f)
writer.writerow(['date', 'ratio'])
for idx, row in enumerate(binsforcategory[0]):
writer.writerow([str(datebins[idx]), str(row['ratio'])])
|
{
"content_hash": "9cf746b87ddca57099ba28ae9c84bd95",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 122,
"avg_line_length": 30.026041666666668,
"alnum_prop": 0.6256721595836947,
"repo_name": "tedunderwood/GenreProject",
"id": "576eefda28f5a21842e0c80f82ccb22096cf464f",
"size": "5765",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/granger/makegrangerratio.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "187389"
},
{
"name": "Python",
"bytes": "645172"
},
{
"name": "R",
"bytes": "34870"
}
],
"symlink_target": ""
}
|
import re
import html
from socket import *
from threading import *
class IRCClient:
def __init__(self, nickname, channel):
self.__nickname = nickname
self.__channel = channel
self.__socket = socket()
self.__jokes = self.__get_jokes()
def __del__(self):
self.__send("QUIT")
self.__socket.close()
def __send(self, message):
self.__socket.send((message + "\r\n").encode())
def __get_jokes(self):
s = socket()
s.connect(("www.bash.im", 80))
s.send(("GET /random HTTP/1.1\r\nHost: bash.im\r\n\r\n").encode())
data = str(s.recv(65536), encoding='windows-1251')
jokes = re.findall(r"<div class=\"text\">.*</div>", data)
result = []
for text in jokes:
text = text[18:-6]
text = html.unescape(text)
text = text.replace("<br>", "\n").replace("<br/>", "\n").replace("<br />", "\n")
text = text.split("\n")
result.append(text)
return result
def __check(self, message):
if "show some magic" in message and self.__nickname in message:
text = self.__jokes.pop()
for string in text:
if string != "":
print("<" + self.__nickname + ">: " + string)
self.__send("PRIVMSG " + self.__channel + " :" + string)
if len(self.__jokes) == 0:
self.__jokes = self.__get_jokes()
def __write(self):
while True:
message = input()
self.__send("PRIVMSG " + self.__channel + " :" + message)
self.__check(message)
def __listen(self):
while True:
try:
data = str(self.__socket.recv(2048), encoding='utf-8')
except UnicodeDecodeError:
continue
author = re.sub(r":(.*?)!.*", r"\1", data)[:-1].split("\n")[0]
index = data.find("@ " + self.__channel + " :")
if index > 0:
names = data[index + 4 + len(self.__channel):data.find("\n")]
print("Participants:", names.replace(' ', ', '))
if re.match(r".*PING.*", data) is not None:
self.__send("PO" + data[2:-2])
elif re.match(r".*JOIN.*", data) is not None:
print(author + " has joined " + self.__channel)
elif re.match(r".*QUIT.*", data) is not None:
print(author + " has quit")
elif re.match(r".*PRIVMSG.*", data) is not None:
messages = re.findall(r"PRIVMSG " + self.__channel + " :.*", data)
for message in messages:
print("<" + author + ">: " + message[10 + len(self.__channel):-1])
self.__check(message)
def start(self):
print("Connecting...")
self.__socket.connect(("irc.freenode.net", 6667))
self.__send("USER " + self.__nickname + " * * : ")
self.__send("NICK " + self.__nickname)
self.__send("JOIN " + self.__channel)
listen_thread = Thread(target=self.__listen)
write_thread = Thread(target=self.__write)
listen_thread.start()
write_thread.start()
listen_thread.join()
write_thread.join()
client = IRCClient("MaKToff", "#spbnet")
client.start()
|
{
"content_hash": "1b1c90e515334a3fc853aee34b25c887",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 92,
"avg_line_length": 33.08910891089109,
"alnum_prop": 0.4874326750448833,
"repo_name": "MaKToff/SPbSU_Homeworks",
"id": "4902e0c0919e446f02b9639f8628e2b39c306ee1",
"size": "3342",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Semester 5/Computer networks/irc_bot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "48341"
},
{
"name": "F#",
"bytes": "131089"
},
{
"name": "Haskell",
"bytes": "15908"
},
{
"name": "Python",
"bytes": "41993"
}
],
"symlink_target": ""
}
|
import re
from micawber.providers import Provider
class ImageProvider(Provider):
"""
Simple little hack to render any image URL as an <img> tag, use with care
Usage:
pr = micawber.bootstrap_basic()
pr.register(ImageProvider.regex, ImageProvider(''))
"""
regex = 'http://.+?\.(jpg|gif|png)'
def request(self, url, **params):
return {
'url': url,
'type': 'photo',
'title': '',
}
class GoogleMapsProvider(Provider):
"""
Render a map URL as an embedded map
Usage:
pr = micawber.bootstrap_basic()
pr.register(GoogleMapsProvider.regex, GoogleMapsProvider(''))
"""
regex = r'^https?://maps.google.com/maps\?([^\s]+)'
valid_params = ['q', 'z']
def request(self, url, **params):
url_params = re.match(self.regex, url).groups()[0]
url_params = url_params.replace('&', '&').split('&')
map_params = ['output=embed']
for param in url_params:
k, v = param.split('=', 1)
if k in self.valid_params:
map_params.append(param)
width = int(params.get('maxwidth', 640))
height = int(params.get('maxheight', 480))
html = '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="http://maps.google.com/maps?%s"></iframe>' % \
(width, height, '&'.join(map_params))
return {
'height': height,
'html': html,
'provider_name': 'Google maps',
'title': '',
'type': 'rich',
'version': '1.0',
'width': width,
}
|
{
"content_hash": "1e9aaa0c73ac7c4957576a2d02329d57",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 162,
"avg_line_length": 27.661290322580644,
"alnum_prop": 0.5206997084548105,
"repo_name": "HiroIshikawa/21playground",
"id": "ac8de53518fba8019a052f032a1cee595eebefab",
"size": "1715",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "payblog/blog/lib/python3.5/site-packages/micawber/contrib/providers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "51720"
},
{
"name": "CSS",
"bytes": "57775"
},
{
"name": "HTML",
"bytes": "40205"
},
{
"name": "JavaScript",
"bytes": "73667"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "38714409"
},
{
"name": "Shell",
"bytes": "30454"
}
],
"symlink_target": ""
}
|
import collections
import json
import logging
import sys
import django
from django.conf import settings
from django.forms import widgets
from django import http
import django.test
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.http import urlencode
import mock
import six
from horizon import exceptions
from horizon import forms
from horizon.workflows import views
from openstack_dashboard import api
from openstack_dashboard.api import cinder
from openstack_dashboard.dashboards.project.instances import console
from openstack_dashboard.dashboards.project.instances import tables
from openstack_dashboard.dashboards.project.instances import tabs
from openstack_dashboard.dashboards.project.instances import workflows
from openstack_dashboard.test import helpers
from openstack_dashboard.usage import quotas
from openstack_dashboard.views import get_url_with_pagination
INDEX_TEMPLATE = 'horizon/common/_data_table_view.html'
INDEX_URL = reverse('horizon:project:instances:index')
SEC_GROUP_ROLE_PREFIX = \
workflows.update_instance.INSTANCE_SEC_GROUP_SLUG + "_role_"
AVAILABLE = api.cinder.VOLUME_STATE_AVAILABLE
VOLUME_SEARCH_OPTS = dict(status=AVAILABLE, bootable=True)
VOLUME_BOOTABLE_SEARCH_OPTS = dict(bootable=True)
SNAPSHOT_SEARCH_OPTS = dict(status=AVAILABLE)
class InstanceTestHelperMixin(object):
def _mock_extension_supported(self, features):
self._features = features
self._feature_call_counts = collections.defaultdict(int)
def fake_extension_supported(name, request):
self._feature_call_counts[name] += 1
return self._features[name]
self.mock_extension_supported.side_effect = fake_extension_supported
def _check_extension_supported(self, expected_count):
self.assertEqual(expected_count, self._feature_call_counts)
class InstanceTestBase(helpers.ResetImageAPIVersionMixin,
InstanceTestHelperMixin,
helpers.TestCase):
def setUp(self):
super(InstanceTestBase, self).setUp()
if api.glance.VERSIONS.active < 2:
self.versioned_images = self.images
self.versioned_snapshots = self.snapshots
else:
self.versioned_images = self.imagesV2
self.versioned_snapshots = self.snapshotsV2
class InstanceTableTestMixin(object):
def _mock_glance_image_list_detailed(self, image_list):
self.mock_image_list_detailed.side_effect = [
[image_list, False, False],
[[], False, False],
]
def _check_glance_image_list_detailed(self, count=None):
if count is None:
count = 2
self.mock_image_list_detailed.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
filters={'is_public': True, 'status': 'active'}),
mock.call(helpers.IsHttpRequest(),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}),
])
self.assertEqual(count, self.mock_image_list_detailed.call_count)
def _mock_neutron_network_and_port_list(self):
self.mock_network_list.side_effect = [
self.networks.list()[:1],
self.networks.list()[1:],
self.networks.list()[:1],
self.networks.list()[1:],
]
self.mock_port_list_with_trunk_types.return_value = self.ports.list()
def _check_neutron_network_and_port_list(self):
self.assertEqual(4, self.mock_network_list.call_count)
self.mock_network_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(), tenant_id=self.tenant.id,
shared=False),
mock.call(helpers.IsHttpRequest(), shared=True),
mock.call(helpers.IsHttpRequest(), tenant_id=self.tenant.id,
shared=False),
mock.call(helpers.IsHttpRequest(), shared=True),
])
self.assertEqual(len(self.networks.list()),
self.mock_port_list_with_trunk_types.call_count)
self.mock_port_list_with_trunk_types(
[mock.call(helpers.IsHttpRequest(),
network_id=net.id,
tenant_id=self.tenant.id)
for net in self.networks.list()])
def _mock_nova_lists(self):
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_keypair_list.return_value = self.keypairs.list()
self.mock_security_group_list.return_value = \
self.security_groups.list()
self.mock_availability_zone_list.return_value = \
self.availability_zones.list()
def _check_nova_lists(self, flavor_count=None):
if flavor_count is None:
flavor_count = 1
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, flavor_count,
mock.call(helpers.IsHttpRequest()))
self.mock_keypair_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_security_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_availability_zone_list.assert_called_once_with(
helpers.IsHttpRequest())
def _mock_nova_glance_neutron_lists(self, return_value=True):
self._mock_nova_lists()
self._mock_glance_image_list_detailed(
self.versioned_images.list())
self._mock_neutron_network_and_port_list()
def _check_nova_glance_neutron_lists(self, return_value=True,
flavor_count=None,
image_count=None):
self._check_nova_lists(flavor_count=flavor_count)
self._check_glance_image_list_detailed(count=image_count)
self._check_neutron_network_and_port_list()
class InstanceTableTests(InstanceTestBase, InstanceTableTestMixin):
@helpers.create_mocks({
api.nova: (
'flavor_list',
'server_list',
'tenant_absolute_limits',
'extension_supported',
'is_feature_available',
),
api.glance: ('image_list_detailed',),
api.neutron: (
'floating_ip_simple_associate_supported',
'floating_ip_supported',
),
api.network: (
'servers_update_addresses',
),
})
def _get_index(self, use_servers_update_address=True):
servers = self.servers.list()
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = \
(self.images.list(), False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_tenant_absolute_limits.return_value = \
self.limits['absolute']
self.mock_floating_ip_supported.return_value = True
self.mock_floating_ip_simple_associate_supported.return_value = True
return self.client.get(INDEX_URL)
def _check_get_index(self, use_servers_update_address=True,
multiplier=4):
expected_extension_count = {'AdminActions': 4 * multiplier,
'Shelve': 1 * multiplier}
expected_feature_count = 2 * multiplier
expected_fip_supported_count = 2 * multiplier
expected_simple_fip_supported = 1 * multiplier
self._check_extension_supported(expected_extension_count)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, expected_feature_count,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(
helpers.IsHttpRequest(), search_opts=search_opts)
if use_servers_update_address:
servers = self.servers.list()
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
else:
self.assertEqual(0, self.mock_servers_update_addresses.call_count)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_absolute_limits, 2,
mock.call(helpers.IsHttpRequest(), reserved=True))
if expected_fip_supported_count is None:
expected_fip_supported_count = 8
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_supported, expected_fip_supported_count,
mock.call(helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_simple_associate_supported,
expected_simple_fip_supported,
mock.call(helpers.IsHttpRequest()))
def test_index(self):
res = self._get_index()
self.assertTemplateUsed(res, INDEX_TEMPLATE)
instances = res.context['instances_table'].data
self.assertItemsEqual(instances, self.servers.list())
self.assertNotContains(res, "Launch Instance (Quota exceeded)")
self._check_get_index()
@override_settings(OPENSTACK_INSTANCE_RETRIEVE_IP_ADDRESSES=False)
def test_index_without_servers_update_addresses(self):
res = self._get_index(use_servers_update_address=False)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
instances = res.context['instances_table'].data
self.assertItemsEqual(instances, self.servers.list())
self.assertNotContains(res, "Launch Instance (Quota exceeded)")
self._check_get_index(use_servers_update_address=False)
@helpers.create_mocks({
api.nova: ('server_list', 'tenant_absolute_limits', 'flavor_list'),
api.glance: ('image_list_detailed',),
})
def test_index_server_list_exception(self):
search_opts = {'marker': None, 'paginate': True}
flavors = self.flavors.list()
images = self.images.list()
self.mock_flavor_list.return_value = flavors
self.mock_image_list_detailed.return_value = (images, False, False)
self.mock_server_list.side_effect = self.exceptions.nova
self.mock_tenant_absolute_limits.return_value = self.limits['absolute']
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
self.assertEqual(len(res.context['instances_table'].data), 0)
self.assertMessageCount(res, error=1)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_absolute_limits, 2,
mock.call(helpers.IsHttpRequest(), reserved=True))
@helpers.create_mocks({
api.nova: ('flavor_list', 'server_list', 'flavor_get',
'tenant_absolute_limits', 'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.neutron: ('floating_ip_simple_associate_supported',
'floating_ip_supported',),
api.network: ('servers_update_addresses',),
})
def test_index_flavor_list_exception(self):
servers = self.servers.list()
search_opts = {'marker': None, 'paginate': True}
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_flavor_list.side_effect = self.exceptions.nova
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_tenant_absolute_limits.return_value = self.limits['absolute']
self.mock_floating_ip_supported.return_value = True
self.mock_floating_ip_simple_associate_supported.return_value = True
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
instances = res.context['instances_table'].data
self.assertItemsEqual(instances, self.servers.list())
self._check_extension_supported({'AdminActions': 16,
'Shelve': 4})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 8,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_absolute_limits, 2,
mock.call(helpers.IsHttpRequest(), reserved=True))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_supported, 8,
mock.call(helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_simple_associate_supported, 4,
mock.call(helpers.IsHttpRequest()))
@helpers.create_mocks({
api.nova: ('flavor_list', 'server_list', 'tenant_absolute_limits',
'extension_supported', 'is_feature_available',),
api.glance: ('image_list_detailed',),
api.neutron: ('floating_ip_simple_associate_supported',
'floating_ip_supported',),
api.network: ('servers_update_addresses',),
})
def test_index_with_instance_booted_from_volume(self):
volume_server = self.servers.first()
volume_server.image = ""
volume_server.image_name = "(not found)"
servers = self.servers.list()
servers[0] = volume_server
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_tenant_absolute_limits.return_value = self.limits['absolute']
self.mock_floating_ip_supported.return_value = True
self.mock_floating_ip_simple_associate_supported.return_value = True
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
instances = res.context['instances_table'].data
self.assertEqual(len(instances), len(servers))
self.assertContains(res, "(not found)")
self._check_extension_supported({'AdminActions': 16,
'Shelve': 4})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 8,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_absolute_limits, 2,
mock.call(helpers.IsHttpRequest(), reserved=True))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_supported, 8,
mock.call(helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_simple_associate_supported, 4,
mock.call(helpers.IsHttpRequest()))
def test_index_with_console_link(self):
res = self._get_index()
instances_table = res.context['instances_table']
instances = res.context['instances_table'].data
console_link_rendered = False
for instance in instances:
for action in instances_table.get_row_actions(instance):
if isinstance(action, tables.ConsoleLink):
console_link_rendered = True
break
if console_link_rendered:
break
self.assertTrue(console_link_rendered)
self._check_get_index(multiplier=5)
@django.test.utils.override_settings(CONSOLE_TYPE=None)
def test_index_without_console_link(self):
res = self._get_index()
instances_table = res.context['instances_table']
instances = res.context['instances_table'].data
for instance in instances:
for action in instances_table.get_row_actions(instance):
self.assertNotIsInstance(action, tables.ConsoleLink)
self._check_get_index(multiplier=8)
@helpers.create_mocks({api.nova: ('server_list',
'flavor_list',
'server_delete',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_delete_instance(self):
servers = self.servers.list()
server = servers[0]
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_delete.return_value = None
formData = {'action': 'instances__delete__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_server_delete.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_list',
'flavor_list',
'server_delete',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_delete_instance_error_state(self):
servers = self.servers.list()
server = servers[0]
server.status = 'ERROR'
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_delete.return_value = None
formData = {'action': 'instances__delete__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_server_delete.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_list',
'flavor_list',
'server_delete',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_delete_instance_exception(self):
servers = self.servers.list()
server = servers[0]
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_delete.side_effect = self.exceptions.nova
formData = {'action': 'instances__delete__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_server_delete.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_pause',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_pause_instance(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_pause.return_value = None
formData = {'action': 'instances__pause__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_pause.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_pause',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_pause_instance_exception(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_pause.side_effect = self.exceptions.nova
formData = {'action': 'instances__pause__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_pause.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_unpause',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_unpause_instance(self):
servers = self.servers.list()
server = servers[0]
server.status = "PAUSED"
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_unpause.return_value = None
formData = {'action': 'instances__pause__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_unpause.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_unpause',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_unpause_instance_exception(self):
servers = self.servers.list()
server = servers[0]
server.status = "PAUSED"
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_unpause.side_effect = self.exceptions.nova
formData = {'action': 'instances__pause__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_unpause.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_reboot',
'server_list',
'flavor_list',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_reboot_instance(self):
servers = self.servers.list()
server = servers[0]
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_reboot.return_value = None
formData = {'action': 'instances__reboot__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_reboot.assert_called_once_with(
helpers.IsHttpRequest(), server.id, soft_reboot=False)
@helpers.create_mocks({api.nova: ('server_reboot',
'server_list',
'flavor_list',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_reboot_instance_exception(self):
servers = self.servers.list()
server = servers[0]
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_reboot.side_effect = self.exceptions.nova
formData = {'action': 'instances__reboot__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_reboot.assert_called_once_with(
helpers.IsHttpRequest(), server.id, soft_reboot=False)
@helpers.create_mocks({api.nova: ('server_reboot',
'server_list',
'flavor_list',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_soft_reboot_instance(self):
servers = self.servers.list()
server = servers[0]
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_reboot.return_value = None
formData = {'action': 'instances__soft_reboot__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_reboot.assert_called_once_with(
helpers.IsHttpRequest(), server.id, soft_reboot=True)
@helpers.create_mocks({api.nova: ('server_suspend',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_suspend_instance(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_suspend.return_value = None
formData = {'action': 'instances__suspend__%s' % server.id}
url = get_url_with_pagination(
self.request, 'next', 'prev', 'horizon:project:instances:index')
res = self.client.post(url, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_suspend.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@django.test.utils.override_settings(API_RESULT_PAGE_SIZE=2)
@helpers.create_mocks({api.nova: ('server_suspend',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_suspend_instance_if_placed_on_2nd_page(self):
page_size = getattr(settings, 'API_RESULT_PAGE_SIZE', 2)
servers = self.servers.list()[:3]
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers[page_size:], False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_suspend.return_value = None
self.request.GET['marker'] = servers[-2].id
params = "=".join([tables.InstancesTable._meta.pagination_param,
servers[page_size - 1].id])
url = "?".join([reverse('horizon:project:instances:index'),
params])
formData = {'action': 'instances__suspend__%s' % servers[-1].id}
self.client.post(url, formData)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_server_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts={'marker': servers[page_size - 1].id,
'paginate': True})
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers[page_size:])
self.mock_server_suspend.assert_called_once_with(
helpers.IsHttpRequest(), servers[-1].id)
@helpers.create_mocks({api.nova: ('server_suspend',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_suspend_instance_exception(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_suspend.side_effect = self.exceptions.nova
formData = {'action': 'instances__suspend__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_suspend.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_resume',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_resume_instance(self):
servers = self.servers.list()
server = servers[0]
server.status = "SUSPENDED"
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_resume.return_value = None
formData = {'action': 'instances__suspend__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_resume.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_resume',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available'),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_resume_instance_exception(self):
servers = self.servers.list()
server = servers[0]
server.status = "SUSPENDED"
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_resume.side_effect = self.exceptions.nova
formData = {'action': 'instances__suspend__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_resume.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_shelve',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_shelve_instance(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_shelve.return_value = None
formData = {'action': 'instances__shelve__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'Shelve', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_shelve.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_shelve',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_shelve_instance_exception(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_shelve.side_effect = self.exceptions.nova
formData = {'action': 'instances__shelve__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'Shelve', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_shelve.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_unshelve',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_unshelve_instance(self):
servers = self.servers.list()
server = servers[0]
server.status = "SHELVED_OFFLOADED"
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_unshelve.return_value = None
formData = {'action': 'instances__shelve__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'Shelve', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_unshelve.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_unshelve',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_unshelve_instance_exception(self):
servers = self.servers.list()
server = servers[0]
server.status = "SHELVED_OFFLOADED"
self.mock_extension_supported.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_unshelve.side_effect = self.exceptions.nova
formData = {'action': 'instances__shelve__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'Shelve', helpers.IsHttpRequest())
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_unshelve.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_lock',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_lock_instance(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_lock.return_value = None
formData = {'action': 'instances__lock__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), 'locked_attribute')
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_lock.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_lock',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_lock_instance_exception(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_lock.side_effect = self.exceptions.nova
formData = {'action': 'instances__lock__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), 'locked_attribute')
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_lock.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_unlock',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available'),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_unlock_instance(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_unlock.return_value = None
formData = {'action': 'instances__unlock__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), 'locked_attribute')
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_unlock.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_unlock',
'server_list',
'flavor_list',
'extension_supported',
'is_feature_available'),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_unlock_instance_exception(self):
servers = self.servers.list()
server = servers[0]
self.mock_extension_supported.return_value = True
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_server_unlock.side_effect = self.exceptions.nova
formData = {'action': 'instances__unlock__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_extension_supported.assert_called_once_with(
'AdminActions', helpers.IsHttpRequest())
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), 'locked_attribute')
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_server_unlock.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
class InstanceDetailTests(InstanceTestBase):
@helpers.create_mocks({
api.nova: (
"server_get",
"instance_volumes_list",
"flavor_get",
"extension_supported",
'is_feature_available',
),
api.neutron: (
"server_security_groups",
"floating_ip_simple_associate_supported",
"floating_ip_supported"
),
api.network: ('servers_update_addresses',),
})
def _get_instance_details(self, server, qs=None,
flavor_return=None, volumes_return=None,
security_groups_return=None,
flavor_exception=False):
url = reverse('horizon:project:instances:detail', args=[server.id])
if qs:
url += qs
if flavor_return is None:
flavor_return = self.flavors.first()
if volumes_return is None:
volumes_return = []
if security_groups_return is None:
security_groups_return = self.security_groups.list()
self.mock_server_get.return_value = server
self.mock_servers_update_addresses.return_value = None
self.mock_instance_volumes_list.return_value = volumes_return
if flavor_exception:
self.mock_flavor_get.side_effect = self.exceptions.nova
else:
self.mock_flavor_get.return_value = flavor_return
self.mock_server_security_groups.return_value = security_groups_return
self.mock_floating_ip_simple_associate_supported.return_value = True
self.mock_floating_ip_supported.return_value = True
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
res = self.client.get(url)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), mock.ANY)
self.mock_instance_volumes_list.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_flavor_get.assert_called_once_with(
helpers.IsHttpRequest(), server.flavor['id'])
self.mock_server_security_groups.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_floating_ip_simple_associate_supported \
.assert_called_once_with(helpers.IsHttpRequest())
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_supported, 2,
mock.call(helpers.IsHttpRequest()))
self._check_extension_supported({'AdminActions': 4,
'Shelve': 1})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 2,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
return res
@helpers.create_mocks({api.neutron: ['is_extension_supported']})
def test_instance_details_volumes(self):
server = self.servers.first()
volumes = [self.volumes.list()[1]]
security_groups = self.security_groups.list()
self.mock_is_extension_supported.return_value = False
res = self._get_instance_details(
server, volumes_return=volumes,
security_groups_return=security_groups)
self.assertItemsEqual(res.context['instance'].volumes, volumes)
self.mock_is_extension_supported.assert_called_once_with(
helpers.IsHttpRequest(), 'mac-learning')
@helpers.create_mocks({api.neutron: ['is_extension_supported']})
def test_instance_details_volume_sorting(self):
server = self.servers.first()
volumes = self.volumes.list()[1:3]
security_groups = self.security_groups.list()
self.mock_is_extension_supported.return_value = False
res = self._get_instance_details(
server, volumes_return=volumes,
security_groups_return=security_groups)
self.assertItemsEqual(res.context['instance'].volumes, volumes)
self.assertEqual(res.context['instance'].volumes[0].device,
"/dev/hda")
self.assertEqual(res.context['instance'].volumes[1].device,
"/dev/hdk")
self.mock_is_extension_supported.assert_called_once_with(
helpers.IsHttpRequest(), 'mac-learning')
@helpers.create_mocks({api.neutron: ['is_extension_supported']})
def test_instance_details_metadata(self):
server = self.servers.first()
self.mock_is_extension_supported.return_value = False
tg = tabs.InstanceDetailTabs(self.request, instance=server)
qs = "?%s=%s" % (tg.param_name, tg.get_tab("overview").get_id())
res = self._get_instance_details(server, qs)
self.assertContains(res, "<dd>keyName</dd>", 1)
self.assertContains(res, "<dt>someMetaLabel</dt>", 1)
self.assertContains(res, "<dd>someMetaData</dd>", 1)
self.assertContains(res, "<dt>some<b>html</b>label</dt>",
1)
self.assertContains(res, "<dd><!--</dd>", 1)
self.assertContains(res, "<dt>empty</dt>", 1)
self.assertContains(res, "<dd><em>N/A</em></dd>", 1)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_extension_supported, 2,
mock.call(helpers.IsHttpRequest(), 'mac-learning'))
@helpers.create_mocks({api.neutron: ['is_extension_supported']})
def test_instance_details_fault(self):
server = self.servers.first()
self.mock_is_extension_supported.return_value = False
server.status = 'ERROR'
server.fault = {"message": "NoValidHost",
"code": 500,
"details": "No valid host was found. \n "
"File \"/mnt/stack/nova/nova/"
"scheduler/filter_scheduler.py\", "
"line 105, in schedule_run_instance\n "
"raise exception.NoValidHost"
"(reason=\"\")\n",
"created": "2013-10-07T00:08:32Z"}
res = self._get_instance_details(server)
self.assertItemsEqual(res.context['instance'].fault, server.fault)
self.mock_is_extension_supported.assert_called_once_with(
helpers.IsHttpRequest(), 'mac-learning')
@helpers.create_mocks({console: ['get_console'],
api.neutron: ['is_extension_supported']})
def test_instance_details_console_tab(self):
server = self.servers.first()
CONSOLE_OUTPUT = '/vncserver'
CONSOLE_TITLE = '&title=%s(%s)' % (server.name, server.id)
CONSOLE_URL = CONSOLE_OUTPUT + CONSOLE_TITLE
self.mock_get_console.return_value = ('VNC', CONSOLE_URL)
self.mock_is_extension_supported.return_value = False
tg = tabs.InstanceDetailTabs(self.request, instance=server)
qs = "?%s=%s" % (tg.param_name, tg.get_tab("console").get_id())
res = self._get_instance_details(server, qs)
self.assertIn(tabs.ConsoleTab, res.context_data['tab_group'].tabs)
self.assertTemplateUsed(res,
'project/instances/_detail_console.html')
console_tab_rendered = False
for tab in res.context_data['tab_group'].get_loaded_tabs():
if isinstance(tab, tabs.ConsoleTab):
console_tab_rendered = True
break
self.assertTrue(console_tab_rendered)
self.mock_get_console.assert_called_once_with(
mock.ANY, 'AUTO', server)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_extension_supported, 2,
mock.call(helpers.IsHttpRequest(), 'mac-learning'))
@django.test.utils.override_settings(CONSOLE_TYPE=None)
@helpers.create_mocks({api.neutron: ['is_extension_supported']})
def test_instance_details_console_tab_deactivated(self):
server = self.servers.first()
self.mock_is_extension_supported.return_value = False
tg = tabs.InstanceDetailTabs(self.request, instance=server)
self.assertIsNone(tg.get_tab("console"))
res = self._get_instance_details(server)
self.assertTemplateNotUsed(res,
'project/instances/_detail_console.html')
for tab in res.context_data['tab_group'].get_loaded_tabs():
self.assertNotIsInstance(tab, tabs.ConsoleTab)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_extension_supported, 2,
mock.call(helpers.IsHttpRequest(), 'mac-learning'))
@helpers.create_mocks({api.nova: ('server_get',)})
def test_instance_details_exception(self):
server = self.servers.first()
self.mock_server_get.side_effect = self.exceptions.nova
url = reverse('horizon:project:instances:detail',
args=[server.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(helpers.IsHttpRequest(),
server.id)
@helpers.create_mocks({api.nova: ("server_get",)})
def test_instance_details_unauthorized(self):
server = self.servers.first()
url = reverse('horizon:admin:instances:detail',
args=[server.id])
# Avoid the log message in the test
# when unauthorized exception will be logged
logging.disable(logging.ERROR)
res = self.client.get(url)
logging.disable(logging.NOTSET)
self.assertEqual(403, res.status_code)
self.assertEqual(0, self.mock_server_get.call_count)
@helpers.create_mocks({api.neutron: ['is_extension_supported']})
def test_instance_details_flavor_not_found(self):
server = self.servers.first()
self.mock_is_extension_supported.return_value = False
res = self._get_instance_details(server, flavor_exception=True)
self.assertTemplateUsed(res,
'project/instances/_detail_overview.html')
self.assertContains(res, "Not available")
self.mock_is_extension_supported.assert_called_once_with(
helpers.IsHttpRequest(), 'mac-learning')
@helpers.create_mocks({api.nova: ['server_console_output'],
api.neutron: ['is_extension_supported']})
def test_instance_log(self):
server = self.servers.first()
CONSOLE_OUTPUT = 'output'
self.mock_server_console_output.return_value = CONSOLE_OUTPUT
self.mock_is_extension_supported.return_value = False
url = reverse('horizon:project:instances:console',
args=[server.id])
tg = tabs.InstanceDetailTabs(self.request, instance=server)
qs = "?%s=%s" % (tg.param_name, tg.get_tab("log").get_id())
res = self.client.get(url + qs)
self.assertNoMessages()
self.assertIsInstance(res, http.HttpResponse)
self.assertContains(res, CONSOLE_OUTPUT)
self.mock_server_console_output.assert_called_once_with(
helpers.IsHttpRequest(), server.id, tail_length=None)
self.mock_is_extension_supported.assert_called_once_with(
helpers.IsHttpRequest(), 'mac-learning')
@helpers.create_mocks({api.nova: ['server_console_output'],
api.neutron: ['is_extension_supported']})
def test_instance_log_exception(self):
server = self.servers.first()
self.mock_server_console_output.side_effect = self.exceptions.nova
self.mock_is_extension_supported.return_value = False
url = reverse('horizon:project:instances:console',
args=[server.id])
tg = tabs.InstanceDetailTabs(self.request, instance=server)
qs = "?%s=%s" % (tg.param_name, tg.get_tab("log").get_id())
res = self.client.get(url + qs)
self.assertContains(res, "Unable to get log for")
self.mock_server_console_output.assert_called_once_with(
helpers.IsHttpRequest(), server.id, tail_length=None)
self.mock_is_extension_supported.assert_called_once_with(
helpers.IsHttpRequest(), 'mac-learning')
@helpers.create_mocks({api.neutron: ['is_extension_supported']})
def test_instance_log_invalid_input(self):
server = self.servers.first()
self.mock_is_extension_supported.return_value = False
url = reverse('horizon:project:instances:console',
args=[server.id])
tg = tabs.InstanceDetailTabs(self.request, instance=server)
for length in ["-5", "x"]:
qs = "?%s=%s&length=%s" % (tg.param_name,
tg.get_tab("log").get_id(),
length)
res = self.client.get(url + qs)
self.assertContains(res, "Unable to get log for")
self.mock_is_extension_supported.assert_called_once_with(
helpers.IsHttpRequest(), 'mac-learning')
@helpers.create_mocks({api.nova: ['server_get'],
console: ['get_console']})
def test_instance_auto_console(self):
server = self.servers.first()
CONSOLE_OUTPUT = '/vncserver'
CONSOLE_TITLE = '&title=%s(%s)' % (server.name, server.id)
CONSOLE_URL = CONSOLE_OUTPUT + CONSOLE_TITLE
self.mock_server_get.return_value = server
self.mock_get_console.return_value = ('VNC', CONSOLE_URL)
url = reverse('horizon:project:instances:auto_console',
args=[server.id])
res = self.client.get(url)
redirect = CONSOLE_URL
self.assertRedirectsNoFollow(res, redirect)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_get_console.assert_called_once_with(
mock.ANY, 'AUTO', server)
@helpers.create_mocks({api.nova: ['server_get'],
console: ['get_console']})
def test_instance_vnc_error(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_get_console.side_effect = exceptions.NotAvailable('console')
url = reverse('horizon:project:instances:vnc',
args=[server.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_get_console.assert_called_once_with(
mock.ANY, 'VNC', server)
@helpers.create_mocks({api.nova: ['server_get'],
console: ['get_console']})
def test_instance_spice(self):
server = self.servers.first()
CONSOLE_OUTPUT = '/spiceserver'
CONSOLE_TITLE = '&title=%s(%s)' % (server.name, server.id)
CONSOLE_URL = CONSOLE_OUTPUT + CONSOLE_TITLE
self.mock_server_get.return_value = server
self.mock_get_console.return_value = ('SPICE', CONSOLE_URL)
url = reverse('horizon:project:instances:spice',
args=[server.id])
res = self.client.get(url)
redirect = CONSOLE_URL
self.assertRedirectsNoFollow(res, redirect)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_get_console.assert_called_once_with(
mock.ANY, 'SPICE', server)
@helpers.create_mocks({api.nova: ['server_get'],
console: ['get_console']})
def test_instance_spice_exception(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_get_console.side_effect = exceptions.NotAvailable('console')
url = reverse('horizon:project:instances:spice',
args=[server.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_get_console.assert_called_once_with(
mock.ANY, 'SPICE', server)
@helpers.create_mocks({api.nova: ['server_get'],
console: ['get_console']})
def test_instance_rdp(self):
server = self.servers.first()
CONSOLE_OUTPUT = '/rdpserver'
CONSOLE_TITLE = '&title=%s(%s)' % (server.name, server.id)
CONSOLE_URL = CONSOLE_OUTPUT + CONSOLE_TITLE
self.mock_server_get.return_value = server
self.mock_get_console.return_value = ('RDP', CONSOLE_URL)
url = reverse('horizon:project:instances:rdp',
args=[server.id])
res = self.client.get(url)
redirect = CONSOLE_URL
self.assertRedirectsNoFollow(res, redirect)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_get_console.assert_called_once_with(
mock.ANY, 'RDP', server)
@helpers.create_mocks({api.nova: ['server_get'],
console: ['get_console']})
def test_instance_rdp_exception(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_get_console.side_effect = exceptions.NotAvailable('console')
url = reverse('horizon:project:instances:rdp',
args=[server.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_get_console.assert_called_once_with(
mock.ANY, 'RDP', server)
class InstanceTests(InstanceTestBase):
@helpers.create_mocks({api.nova: ('server_get',
'snapshot_create'),
api.glance: ('image_list_detailed',)})
def test_create_instance_snapshot(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_snapshot_create.return_value = self.snapshots.first()
self.mock_image_list_detailed.return_value = \
(self.images.list(), False, False)
formData = {'instance_id': server.id,
'method': 'CreateSnapshot',
'name': 'snapshot1'}
url = reverse('horizon:project:images:snapshots:create',
args=[server.id])
redir_url = reverse('horizon:project:images:index')
res = self.client.post(url, formData)
self.assertRedirects(res, redir_url)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_snapshot_create.assert_called_once_with(
helpers.IsHttpRequest(), server.id, "snapshot1")
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest(), marker=None, paginate=True,
reversed_order=False, sort_dir='asc', sort_key='name')
@django.test.utils.override_settings(
OPENSTACK_ENABLE_PASSWORD_RETRIEVE=False)
def test_instances_index_retrieve_password_action_disabled(self):
self. _test_instances_index_retrieve_password_action()
@django.test.utils.override_settings(
OPENSTACK_ENABLE_PASSWORD_RETRIEVE=True)
def test_instances_index_retrieve_password_action_enabled(self):
self._test_instances_index_retrieve_password_action()
@helpers.create_mocks({
api.nova: ('flavor_list', 'server_list', 'tenant_absolute_limits',
'extension_supported', 'is_feature_available',),
api.glance: ('image_list_detailed',),
api.neutron: ('floating_ip_simple_associate_supported',
'floating_ip_supported',),
api.network: ('servers_update_addresses',),
})
def _test_instances_index_retrieve_password_action(self):
servers = self.servers.list()
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_tenant_absolute_limits.return_value = self.limits['absolute']
self.mock_floating_ip_supported.return_value = True
self.mock_floating_ip_simple_associate_supported.return_value = True
url = reverse('horizon:project:instances:index')
res = self.client.get(url)
for server in servers:
_action_id = ''.join(["instances__row_",
server.id,
"__action_decryptpassword"])
if settings.OPENSTACK_ENABLE_PASSWORD_RETRIEVE and \
server.status == "ACTIVE" and \
server.key_name is not None:
self.assertContains(res, _action_id)
else:
self.assertNotContains(res, _action_id)
self._check_extension_supported({'AdminActions': 16,
'Shelve': 4})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 8,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_absolute_limits, 2,
mock.call(helpers.IsHttpRequest(), reserved=True))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_supported, 8,
mock.call(helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_simple_associate_supported, 4,
mock.call(helpers.IsHttpRequest()))
@helpers.create_mocks({api.nova: ('get_password',)})
def test_decrypt_instance_password(self):
server = self.servers.first()
enc_password = "azerty"
self.mock_get_password.return_value = enc_password
url = reverse('horizon:project:instances:decryptpassword',
args=[server.id,
server.key_name])
res = self.client.get(url)
self.assertTemplateUsed(res, 'project/instances/decryptpassword.html')
self.mock_get_password.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('get_password',)})
def test_decrypt_instance_get_exception(self):
server = self.servers.first()
keypair = self.keypairs.first()
self.mock_get_password.side_effect = self.exceptions.nova
url = reverse('horizon:project:instances:decryptpassword',
args=[server.id,
keypair])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_get_password.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
instance_update_get_stubs = {
api.nova: ('server_get', 'is_feature_available'),
api.neutron: ('security_group_list',
'server_security_groups',)}
@helpers.create_mocks(instance_update_get_stubs)
def test_instance_update_get(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_security_group_list.return_value = []
self.mock_server_security_groups.return_value = []
self.mock_is_feature_available.return_value = False
url = reverse('horizon:project:instances:update', args=[server.id])
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_security_group_list(helpers.IsHttpRequest(), tenant_id=None)
self.mock_server_security_groups(helpers.IsHttpRequest(), server.id)
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@helpers.create_mocks(instance_update_get_stubs)
def test_instance_update_get_server_get_exception(self):
server = self.servers.first()
self.mock_server_get.side_effect = self.exceptions.nova
url = reverse('horizon:project:instances:update',
args=[server.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
def _instance_update_post(self, server_id, server_name, secgroups):
default_role_field_name = 'default_' + \
workflows.update_instance.INSTANCE_SEC_GROUP_SLUG + '_role'
formData = {'name': server_name,
default_role_field_name: 'member',
SEC_GROUP_ROLE_PREFIX + 'member': secgroups}
url = reverse('horizon:project:instances:update',
args=[server_id])
return self.client.post(url, formData)
instance_update_post_stubs = {
api.nova: ('server_get', 'server_update', 'is_feature_available'),
api.neutron: ('security_group_list',
'server_security_groups',
'server_update_security_groups')}
@helpers.create_mocks(instance_update_post_stubs)
def test_instance_update_post(self):
server = self.servers.first()
secgroups = self.security_groups.list()[:3]
server_groups = [secgroups[0], secgroups[1]]
wanted_groups = [secgroups[1].id, secgroups[2].id]
self.mock_server_get.return_value = server
self.mock_is_feature_available.return_value = False
self.mock_security_group_list.return_value = secgroups
self.mock_server_security_groups.return_value = server_groups
self.mock_server_update.return_value = server
self.mock_server_update_security_groups.return_value = None
res = self._instance_update_post(server.id, server.name, wanted_groups)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_security_group_list.assert_called_once_with(
helpers.IsHttpRequest(), tenant_id=self.tenant.id)
self.mock_server_security_groups.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_server_update.assert_called_once_with(
helpers.IsHttpRequest(), server.id, server.name, description=None)
self.mock_server_update_security_groups.assert_called_once_with(
helpers.IsHttpRequest(), server.id, wanted_groups)
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@helpers.create_mocks(instance_update_post_stubs)
def test_instance_update_post_with_desc(self):
server = self.servers.first()
secgroups = self.security_groups.list()[:3]
server_groups = [secgroups[0], secgroups[1]]
test_description = 'test description'
self.mock_server_get.return_value = server
self.mock_is_feature_available.return_value = True
self.mock_security_group_list.return_value = secgroups
self.mock_server_security_groups.return_value = server_groups
self.mock_server_update.return_value = server
formData = {'name': server.name,
'description': test_description}
url = reverse('horizon:project:instances:update',
args=[server.id])
res = self.client.post(url, formData)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_security_group_list.assert_called_once_with(
helpers.IsHttpRequest(), tenant_id=self.tenant.id)
self.mock_server_security_groups.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_server_update.assert_called_once_with(
helpers.IsHttpRequest(), server.id, server.name,
description=test_description)
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@helpers.create_mocks(instance_update_post_stubs)
def test_instance_update_post_api_exception(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_is_feature_available.return_value = False
self.mock_security_group_list.return_value = []
self.mock_server_security_groups.return_value = []
self.mock_server_update.side_effect = self.exceptions.nova
self.mock_server_update_security_groups.return_value = None
res = self._instance_update_post(server.id, server.name, [])
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_security_group_list.assert_called_once_with(
helpers.IsHttpRequest(), tenant_id=self.tenant.id)
self.mock_server_security_groups.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_server_update.assert_called_once_with(
helpers.IsHttpRequest(), server.id, server.name, description=None)
self.mock_server_update_security_groups.assert_called_once_with(
helpers.IsHttpRequest(), server.id, [])
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@helpers.create_mocks(instance_update_post_stubs)
def test_instance_update_post_secgroup_api_exception(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_is_feature_available.return_value = False
self.mock_security_group_list.return_value = []
self.mock_server_security_groups.return_value = []
self.mock_server_update.return_value = server
self.mock_server_update_security_groups.side_effect = \
self.exceptions.nova
res = self._instance_update_post(server.id, server.name, [])
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_security_group_list.assert_called_once_with(
helpers.IsHttpRequest(), tenant_id=self.tenant.id)
self.mock_server_security_groups.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_server_update.assert_called_once_with(
helpers.IsHttpRequest(), server.id, server.name, description=None)
self.mock_server_update_security_groups.assert_called_once_with(
helpers.IsHttpRequest(), server.id, [])
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
class InstanceLaunchInstanceTests(InstanceTestBase,
InstanceTableTestMixin):
@helpers.create_mocks({api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'server_group_list',
'availability_zone_list',),
cinder: ('volume_snapshot_list',
'volume_list',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_quota_usages',)})
def test_launch_instance_get(self,
expect_password_fields=True,
block_device_mapping_v2=True,
custom_flavor_sort=None,
only_one_network=False,
disk_config=True,
config_drive=True,
config_drive_default=False):
image = self.versioned_images.first()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': block_device_mapping_v2,
'DiskConfig': disk_config,
'ConfigDrive': config_drive,
'ServerGroups': True,
})
self.mock_volume_list.return_value = []
self.mock_volume_snapshot_list.return_value = []
self._mock_glance_image_list_detailed(self.versioned_images.list())
self.mock_network_list.side_effect = [
self.networks.list()[:1],
[] if only_one_network else self.networks.list()[1:],
self.networks.list()[:1],
self.networks.list()[1:],
]
self.mock_port_list_with_trunk_types.return_value = self.ports.list()
self.mock_server_group_list.return_value = self.server_groups.list()
self.mock_tenant_quota_usages.return_value = self.quota_usages.first()
self._mock_nova_lists()
url = reverse('horizon:project:instances:launch')
params = urlencode({"source_type": "image_id",
"source_id": image.id})
res = self.client.get("%s?%s" % (url, params))
workflow = res.context['workflow']
self.assertTemplateUsed(res, views.WorkflowView.template_name)
self.assertEqual(res.context['workflow'].name,
workflows.LaunchInstance.name)
step = workflow.get_step("setinstancedetailsaction")
self.assertEqual(step.action.initial['image_id'], image.id)
self.assertQuerysetEqual(
workflow.steps,
['<SetInstanceDetails: setinstancedetailsaction>',
'<SetAccessControls: setaccesscontrolsaction>',
'<SetNetwork: setnetworkaction>',
'<SetNetworkPorts: setnetworkportsaction>',
'<PostCreationStep: customizeaction>',
'<SetAdvanced: setadvancedaction>'])
if custom_flavor_sort == 'id':
# Reverse sorted by id
sorted_flavors = (
('eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee', 'm1.metadata'),
('dddddddd-dddd-dddd-dddd-dddddddddddd', 'm1.secret'),
('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'm1.massive'),
('aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'm1.tiny'),
)
elif custom_flavor_sort == 'name':
sorted_flavors = (
('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'm1.massive'),
('eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee', 'm1.metadata'),
('dddddddd-dddd-dddd-dddd-dddddddddddd', 'm1.secret'),
('aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'm1.tiny'),
)
elif custom_flavor_sort == helpers.my_custom_sort:
sorted_flavors = (
('dddddddd-dddd-dddd-dddd-dddddddddddd', 'm1.secret'),
('aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'm1.tiny'),
('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'm1.massive'),
('eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee', 'm1.metadata'),
)
else:
# Default - sorted by RAM
sorted_flavors = (
('aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'm1.tiny'),
('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'm1.massive'),
('dddddddd-dddd-dddd-dddd-dddddddddddd', 'm1.secret'),
('eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee', 'm1.metadata'),
)
select_options = ''.join([
'<option value="%s">%s</option>' % (f[0], f[1])
for f in sorted_flavors
])
self.assertContains(res, select_options)
password_field_label = 'Admin Pass'
if expect_password_fields:
self.assertContains(res, password_field_label)
else:
self.assertNotContains(res, password_field_label)
boot_from_image_field_label = 'Boot from image (creates a new volume)'
if block_device_mapping_v2:
self.assertContains(res, boot_from_image_field_label)
else:
self.assertNotContains(res, boot_from_image_field_label)
# NOTE(adriant): Django 1.11 changes the checked syntax to use html5
# "checked" rather than XHTML's "checked='checked'".
checked_box = (
'<input type="checkbox" name="network" '
'value="82288d84-e0a5-42ac-95be-e6af08727e42" '
'id="id_network_0" checked />'
)
if only_one_network:
self.assertContains(res, checked_box, html=True)
else:
self.assertNotContains(res, checked_box, html=True)
disk_config_field_label = 'Disk Partition'
if disk_config:
self.assertContains(res, disk_config_field_label)
else:
self.assertNotContains(res, disk_config_field_label)
config_drive_field_label = 'Configuration Drive'
if config_drive:
self.assertContains(res, config_drive_field_label)
else:
self.assertNotContains(res, config_drive_field_label)
step = workflow.get_step("setadvancedaction")
self.assertEqual(step.action.initial['config_drive'],
config_drive_default)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self._check_glance_image_list_detailed(count=5)
self.mock_network_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
tenant_id=self.tenant.id, shared=False),
mock.call(helpers.IsHttpRequest(), shared=True),
mock.call(helpers.IsHttpRequest(),
tenant_id=self.tenant.id, shared=False),
mock.call(helpers.IsHttpRequest(), shared=True),
])
self.assertEqual(4, self.mock_network_list.call_count)
self.mock_port_list_with_trunk_types.assert_has_calls(
[mock.call(helpers.IsHttpRequest(),
network_id=net.id, tenant_id=self.tenant.id)
for net in self.networks.list()])
self.assertEqual(len(self.networks.list()),
self.mock_port_list_with_trunk_types.call_count)
self.mock_server_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes'))
self._check_nova_lists(flavor_count=2)
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_instance_get_glance_v1(self):
self.test_launch_instance_get()
@django.test.utils.override_settings(
OPENSTACK_HYPERVISOR_FEATURES={'can_set_password': False})
def test_launch_instance_get_without_password(self):
self.test_launch_instance_get(expect_password_fields=False)
@django.test.utils.override_settings(
OPENSTACK_HYPERVISOR_FEATURES={'requires_keypair': True})
def test_launch_instance_required_key(self):
flavor = self.flavors.first()
image = self.images.first()
image.min_ram = flavor.ram
image.min_disk = flavor.disk
res = self._launch_form_instance(image, flavor, keypair=None)
msg = "This field is required"
self.assertContains(res, msg)
@django.test.utils.override_settings(
LAUNCH_INSTANCE_DEFAULTS={'config_drive': True})
def test_launch_instance_get_with_config_drive_default(self):
self.test_launch_instance_get(config_drive_default=True)
def test_launch_instance_get_no_block_device_mapping_v2_supported(self):
self.test_launch_instance_get(block_device_mapping_v2=False)
def test_launch_instance_get_no_disk_config_supported(self):
self.test_launch_instance_get(disk_config=False)
def test_launch_instance_get_no_config_drive_supported(self):
self.test_launch_instance_get(config_drive=False)
@django.test.utils.override_settings(
CREATE_INSTANCE_FLAVOR_SORT={
'key': 'id',
'reverse': True,
})
def test_launch_instance_get_custom_flavor_sort_by_id(self):
self.test_launch_instance_get(custom_flavor_sort='id')
@django.test.utils.override_settings(
CREATE_INSTANCE_FLAVOR_SORT={
'key': 'name',
'reverse': False,
})
def test_launch_instance_get_custom_flavor_sort_by_name(self):
self.test_launch_instance_get(custom_flavor_sort='name')
@django.test.utils.override_settings(
CREATE_INSTANCE_FLAVOR_SORT={
'key': helpers.my_custom_sort,
'reverse': False,
})
def test_launch_instance_get_custom_flavor_sort_by_callable(self):
self.test_launch_instance_get(
custom_flavor_sort=helpers.my_custom_sort)
@django.test.utils.override_settings(
CREATE_INSTANCE_FLAVOR_SORT={
'key': 'no_such_column',
'reverse': False,
})
def test_launch_instance_get_custom_flavor_sort_by_missing_column(self):
self.test_launch_instance_get(custom_flavor_sort='no_such_column')
def test_launch_instance_get_with_only_one_network(self):
self.test_launch_instance_get(only_one_network=True)
@helpers.create_mocks({api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'server_group_list',
'availability_zone_list',),
cinder: ('volume_snapshot_list',
'volume_list',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_quota_usages',)})
def test_launch_instance_get_images_snapshots(self,
block_device_mapping_v2=True,
only_one_network=False,
disk_config=True,
config_drive=True):
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': block_device_mapping_v2,
'DiskConfig': disk_config,
'ConfigDrive': config_drive,
'ServerGroups': True,
})
self.mock_volume_list.return_value = []
self.mock_volume_snapshot_list.return_value = []
self._mock_glance_image_list_detailed(self.versioned_images.list() +
self.versioned_snapshots.list())
self.mock_network_list.side_effect = [
self.networks.list()[:1],
[] if only_one_network else self.networks.list()[1:],
self.networks.list()[:1],
self.networks.list()[1:],
]
self.mock_port_list_with_trunk_types.return_value = self.ports.list()
self.mock_server_group_list.return_value = self.server_groups.list()
self.mock_tenant_quota_usages.return_value = self.limits['absolute']
self._mock_nova_lists()
url = reverse('horizon:project:instances:launch')
res = self.client.get(url)
image_sources = (res.context_data['workflow'].steps[0].
action.fields['image_id'].choices)
snapshot_sources = (res.context_data['workflow'].steps[0].
action.fields['instance_snapshot_id'].choices)
images = [image.id for image in self.versioned_images.list()]
snapshots = [s.id for s in self.versioned_snapshots.list()]
image_sources_ids = []
snapshot_sources_ids = []
for image in image_sources:
self.assertTrue(image[0] in images or image[0] == '')
if image[0] != '':
image_sources_ids.append(image[0])
for image in images:
self.assertIn(image, image_sources_ids)
for snapshot in snapshot_sources:
self.assertTrue(snapshot[0] in snapshots or snapshot[0] == '')
if snapshot[0] != '':
snapshot_sources_ids.append(snapshot[0])
for snapshot in snapshots:
self.assertIn(snapshot, snapshot_sources_ids)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self._check_glance_image_list_detailed(count=5)
self.mock_network_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
tenant_id=self.tenant.id, shared=False),
mock.call(helpers.IsHttpRequest(), shared=True),
mock.call(helpers.IsHttpRequest(),
tenant_id=self.tenant.id, shared=False),
mock.call(helpers.IsHttpRequest(), shared=True),
])
self.assertEqual(4, self.mock_network_list.call_count)
self.mock_port_list_with_trunk_types.assert_has_calls(
[mock.call(helpers.IsHttpRequest(),
network_id=net.id, tenant_id=self.tenant.id)
for net in self.networks.list()])
self.mock_server_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes'))
self._check_nova_lists(flavor_count=2)
@helpers.create_mocks({api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'server_group_list',
'availability_zone_list',),
cinder: ('volume_snapshot_list',
'volume_list',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_quota_usages',)})
def test_launch_instance_get_bootable_volumes(self,
block_device_mapping_v2=True,
only_one_network=False,
disk_config=True,
config_drive=True):
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': block_device_mapping_v2,
'DiskConfig': disk_config,
'ConfigDrive': config_drive,
'ServerGroups': True,
})
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = []
self._mock_glance_image_list_detailed(self.versioned_images.list())
self.mock_network_list.side_effect = [
self.networks.list()[:1],
[] if only_one_network else self.networks.list()[1:],
self.networks.list()[:1],
self.networks.list()[1:],
]
self.mock_port_list_with_trunk_types.return_value = self.ports.list()
self.mock_server_group_list.return_value = self.server_groups.list()
self.mock_tenant_quota_usages.return_value = self.quota_usages.first()
self._mock_nova_lists()
url = reverse('horizon:project:instances:launch')
res = self.client.get(url)
bootable_volumes = [v.id for v in self.volumes.list()
if (v.bootable == 'true' and
v.status == 'available')]
volume_sources = (res.context_data['workflow'].steps[0].
action.fields['volume_id'].choices)
volume_sources_ids = []
for volume in volume_sources:
self.assertTrue(volume[0].split(":vol")[0] in bootable_volumes or
volume[0] == '')
if volume[0] != '':
volume_sources_ids.append(volume[0].split(":vol")[0])
for volume in bootable_volumes:
self.assertIn(volume, volume_sources_ids)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self._check_glance_image_list_detailed(count=5)
self.mock_network_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
tenant_id=self.tenant.id, shared=False),
mock.call(helpers.IsHttpRequest(), shared=True),
mock.call(helpers.IsHttpRequest(),
tenant_id=self.tenant.id, shared=False),
mock.call(helpers.IsHttpRequest(), shared=True),
])
self.assertEqual(4, self.mock_network_list.call_count)
self.mock_port_list_with_trunk_types.assert_has_calls(
[mock.call(helpers.IsHttpRequest(),
network_id=net.id, tenant_id=self.tenant.id)
for net in self.networks.list()])
self.mock_server_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes'))
self._check_nova_lists(flavor_count=2)
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_instance_get_bootable_volumes_glance_v1(self):
self.test_launch_instance_get_bootable_volumes()
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_create',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',
'server_group_list',
'server_create',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def test_launch_instance_post(self,
disk_config=True,
config_drive=True):
flavor = self.flavors.first()
image = self.versioned_images.first()
keypair = self.keypairs.first()
server = self.servers.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
nics = [{"net-id": self.networks.first().id, "v4-fixed-ip": ''}]
quota_usages = self.quota_usages.first()
scheduler_hints = {"group": self.server_groups.first().id}
self._mock_nova_glance_neutron_lists()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': disk_config,
'ConfigDrive': config_drive,
'ServerGroups': True,
})
self.mock_server_group_list.return_value = self.server_groups.list()
self.mock_volume_list.return_value = []
self.mock_volume_snapshot_list.return_value = []
self.mock_server_create.return_value = None
self.mock_tenant_quota_usages.return_value = quota_usages
self.mock_flavor_list.return_value = self.flavors.list()
form_data = {'flavor': flavor.id,
'source_type': 'image_id',
'image_id': image.id,
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'availability_zone': avail_zone.zoneName,
'volume_type': '',
'network': self.networks.first().id,
'count': 1,
'server_group': self.server_groups.first().id}
if disk_config:
form_data['disk_config'] = 'AUTO'
if config_drive:
form_data['config_drive'] = True
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self._check_nova_glance_neutron_lists(flavor_count=2, image_count=5)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_server_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
if disk_config:
disk_config_value = u'AUTO'
else:
disk_config_value = None
if config_drive:
config_drive_value = True
else:
config_drive_value = None
self.mock_server_create.assert_called_once_with(
helpers.IsHttpRequest(),
server.name,
image.id,
flavor.id,
keypair.name,
customization_script,
[str(sec_group.id)],
block_device_mapping=None,
block_device_mapping_v2=None,
nics=nics,
availability_zone=avail_zone.zoneName,
instance_count=helpers.IsA(int),
admin_pass=u'',
disk_config=disk_config_value,
config_drive=config_drive_value,
scheduler_hints=scheduler_hints)
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', ))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_instance_post_glance_v1(self):
self.test_launch_instance_post()
def test_launch_instance_post_no_disk_config_supported(self):
self.test_launch_instance_post(disk_config=False)
def test_launch_instance_post_no_config_drive_supported(self):
self.test_launch_instance_post(config_drive=False)
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_create',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',
'server_create',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def test_launch_instance_post_boot_from_volume(
self,
test_with_bdmv2=False
):
flavor = self.flavors.first()
keypair = self.keypairs.first()
server = self.servers.first()
volume = self.volumes.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
device_name = u'vda'
volume_choice = "%s:vol" % volume.id
if test_with_bdmv2:
volume_source_id = volume.id.split(':')[0]
block_device_mapping = None
block_device_mapping_2 = [
{'device_name': u'vda',
'source_type': 'volume',
'destination_type': 'volume',
'delete_on_termination': False,
'uuid': volume_source_id,
'boot_index': '0',
'volume_size': 1
}
]
else:
block_device_mapping = {device_name: u"%s::False" % volume_choice}
block_device_mapping_2 = None
nics = [{"net-id": self.networks.first().id, "v4-fixed-ip": ''}]
quota_usages = self.quota_usages.first()
self._mock_nova_glance_neutron_lists(return_value=test_with_bdmv2)
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': test_with_bdmv2,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': False,
})
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = []
self.mock_server_create.return_value = None
self.mock_tenant_quota_usages.return_value = quota_usages
form_data = {'flavor': flavor.id,
'source_type': 'volume_id',
'source_id': volume_choice,
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'availability_zone': avail_zone.zoneName,
'volume_size': '1',
'volume_id': volume_choice,
'device_name': device_name,
'network': self.networks.first().id,
'count': 1,
'disk_config': 'AUTO',
'config_drive': True}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self._check_nova_glance_neutron_lists(flavor_count=2, image_count=4)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 2,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.mock_server_create.assert_called_once_with(
helpers.IsHttpRequest(),
server.name,
'',
flavor.id,
keypair.name,
customization_script,
[str(sec_group.id)],
block_device_mapping=block_device_mapping,
block_device_mapping_v2=block_device_mapping_2,
nics=nics,
availability_zone=avail_zone.zoneName,
instance_count=helpers.IsA(int),
admin_pass=u'',
disk_config=u'AUTO',
config_drive=True,
scheduler_hints={})
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', ))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_instance_post_boot_from_volume_glance_v1(self):
self.test_launch_instance_post_boot_from_volume()
def test_launch_instance_post_boot_from_volume_with_bdmv2(self):
self.test_launch_instance_post_boot_from_volume(test_with_bdmv2=True)
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_create',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('server_create',
'extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',
'server_group_list',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def test_launch_instance_post_no_images_available_boot_from_volume(self):
flavor = self.flavors.first()
keypair = self.keypairs.first()
server = self.servers.first()
volume = self.volumes.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
device_name = u'vda'
volume_choice = "%s:vol" % volume.id
block_device_mapping = {device_name: u"%s::False" % volume_choice}
nics = [{"net-id": self.networks.first().id, "v4-fixed-ip": ''}]
quota_usages = self.quota_usages.first()
self._mock_nova_glance_neutron_lists()
self.mock_flavor_list.return_value = self.flavors.list()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': False,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': True,
})
self.mock_server_group_list.return_value = []
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = []
self.mock_tenant_quota_usages.return_value = quota_usages
self.mock_server_create.return_value = None
form_data = {'flavor': flavor.id,
'source_type': 'volume_id',
# 'image_id': '',
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'availability_zone': avail_zone.zoneName,
'network': self.networks.first().id,
'volume_type': 'volume_id',
'volume_id': volume_choice,
'device_name': device_name,
'count': 1,
'disk_config': 'MANUAL',
'config_drive': True}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self._check_nova_glance_neutron_lists(flavor_count=2,
image_count=4)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 2,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_server_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', ))
self.mock_server_create.assert_called_once_with(
helpers.IsHttpRequest(),
server.name,
'',
flavor.id,
keypair.name,
customization_script,
[str(sec_group.id)],
block_device_mapping=block_device_mapping,
block_device_mapping_v2=None,
nics=nics,
availability_zone=avail_zone.zoneName,
instance_count=helpers.IsA(int),
admin_pass=u'',
disk_config='MANUAL',
config_drive=True,
scheduler_hints={})
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_lnch_inst_post_no_images_avail_boot_from_volume_glance_v1(self):
self.test_launch_instance_post_no_images_available_boot_from_volume()
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list'),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def test_launch_instance_post_no_images_available(self):
flavor = self.flavors.first()
keypair = self.keypairs.first()
server = self.servers.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
quota_usages = self.quota_usages.first()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': False,
})
self.mock_tenant_quota_usages.return_value = self.quota_usages.first()
self._mock_glance_image_list_detailed([])
self._mock_neutron_network_and_port_list()
self._mock_nova_lists()
self.mock_volume_list.return_value = []
self.mock_volume_snapshot_list.return_value = []
self.mock_tenant_quota_usages.return_value = quota_usages
form_data = {'flavor': flavor.id,
'source_type': 'image_id',
'image_id': '',
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'availability_zone': avail_zone.zoneName,
'volume_type': '',
'count': 1}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertFormErrors(res, 1, "You must select an image.")
self.assertTemplateUsed(res, views.WorkflowView.template_name)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_tenant_quota_usages.assert_has_calls([
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', )),
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes')),
])
self.assertEqual(2, self.mock_tenant_quota_usages.call_count)
self._check_glance_image_list_detailed(count=5)
self._check_neutron_network_and_port_list()
self._check_nova_lists(flavor_count=3)
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.mock_tenant_quota_usages.assert_has_calls([
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', )),
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes')),
])
self.assertEqual(2, self.mock_tenant_quota_usages.call_count)
@helpers.create_mocks({
api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_create',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',
'server_group_list',
'server_create',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def test_launch_instance_post_boot_from_snapshot(
self,
test_with_bdmv2=False
):
flavor = self.flavors.first()
keypair = self.keypairs.first()
server = self.servers.first()
snapshot = self.cinder_volume_snapshots.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
device_name = u'vda'
snapshot_choice = "%s:snap" % snapshot.id
if test_with_bdmv2:
snapshot_source_id = snapshot.id.split(':')[0]
block_device_mapping = None
block_device_mapping_2 = [
{'device_name': u'vda',
'source_type': 'snapshot',
'destination_type': 'volume',
'delete_on_termination': 0,
'uuid': snapshot_source_id,
'boot_index': '0',
'volume_size': 1
}
]
else:
block_device_mapping = {device_name:
u"%s::False" % snapshot_choice}
block_device_mapping_2 = None
nics = [{"net-id": self.networks.first().id, "v4-fixed-ip": ''}]
quota_usages = self.quota_usages.first()
self._mock_nova_glance_neutron_lists(return_value=test_with_bdmv2)
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': test_with_bdmv2,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': True,
})
self.mock_server_group_list.return_value = []
volumes = [v for v in self.cinder_volumes.list()
if (getattr(v, 'bootable', 'false') == 'true')]
snapshots = [v for v in self.cinder_volume_snapshots.list()
if (v.status == AVAILABLE)]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = snapshots
self.mock_server_create.return_value = None
self.mock_tenant_quota_usages.return_value = quota_usages
form_data = {'flavor': flavor.id,
'source_type': 'volume_snapshot_id',
'source_id': snapshot_choice,
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'availability_zone': avail_zone.zoneName,
'volume_size': '1',
'volume_snapshot_id': snapshot_choice,
'device_name': device_name,
'network': self.networks.first().id,
'count': 1,
'disk_config': 'AUTO',
'config_drive': True}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 2,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_server_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.mock_server_create.assert_called_once_with(
helpers.IsHttpRequest(),
server.name,
'',
flavor.id,
keypair.name,
customization_script,
[str(sec_group.id)],
block_device_mapping=block_device_mapping,
block_device_mapping_v2=block_device_mapping_2,
nics=nics,
availability_zone=avail_zone.zoneName,
instance_count=helpers.IsA(int),
admin_pass=u'',
disk_config=u'AUTO',
config_drive=True,
scheduler_hints={})
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', ))
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_instance_post_boot_from_snapshot_glance_v1(self):
self.test_launch_instance_post_boot_from_snapshot()
def test_launch_instance_post_boot_from_snapshot_with_bdmv2(self):
self.test_launch_instance_post_boot_from_snapshot(test_with_bdmv2=True)
@helpers.create_mocks({
api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_create',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',
'server_create'),
cinder: ('volume_list',
'volume_snapshot_list'),
quotas: ('tenant_quota_usages',)})
def test_launch_instance_post_boot_from_snapshot_error(self):
flavor = self.flavors.first()
keypair = self.keypairs.first()
server = self.servers.first()
avail_zone = self.availability_zones.first()
quota_usages = self.quota_usages.first()
self.mock_image_list_detailed.return_value = [[], False, False]
self.mock_tenant_quota_usages.return_value = quota_usages
self._mock_neutron_network_and_port_list()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': False,
})
bad_snapshot_id = 'a-bogus-id'
form_data = {'flavor': flavor.id,
'source_type': 'instance_snapshot_id',
'instance_snapshot_id': bad_snapshot_id,
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'availability_zone': avail_zone.zoneName,
'network': self.networks.first().id,
'volume_id': '',
'volume_snapshot_id': '',
'image_id': '',
'device_name': 'vda',
'count': 1,
'customization_script': ''}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertFormErrors(res, 3, "You must select a snapshot.")
self.assertEqual(3, self.mock_image_list_detailed.call_count)
self.mock_image_list_detailed.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
filters={'is_public': True,
'status': 'active'}),
mock.call(helpers.IsHttpRequest(),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}),
mock.call(helpers.IsHttpRequest(),
filters={'status': 'active', 'visibility': 'shared'}),
])
self.mock_tenant_quota_usages.assert_has_calls([
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', )),
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes')),
])
self.assertEqual(2, self.mock_tenant_quota_usages.call_count)
self._check_neutron_network_and_port_list()
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
cinder: ('volume_list',
'volume_snapshot_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',),
quotas: ('tenant_quota_usages',)})
def test_launch_flavorlist_error(self):
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': False,
})
self.mock_volume_list.return_value = []
self.mock_volume_snapshot_list.return_value = []
self._mock_glance_image_list_detailed(self.versioned_images.list())
self._mock_neutron_network_and_port_list()
self.mock_tenant_quota_usages.return_value = self.quota_usages.first()
self.mock_flavor_list.side_effect = self.exceptions.nova
self.mock_keypair_list.return_value = self.keypairs.list()
self.mock_security_group_list.return_value = \
self.security_groups.list()
self.mock_availability_zone_list.return_value = \
self.availability_zones.list()
url = reverse('horizon:project:instances:launch')
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self._check_glance_image_list_detailed(count=5)
self._check_neutron_network_and_port_list()
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes'))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
self.mock_keypair_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_security_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_availability_zone_list.assert_called_once_with(
helpers.IsHttpRequest())
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_flavorlist_error_glance_v1(self):
self.test_launch_flavorlist_error()
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_create',
'port_delete',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',
'server_group_list',
'server_create',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def test_launch_form_keystone_exception(self):
flavor = self.flavors.first()
image = self.versioned_images.first()
keypair = self.keypairs.first()
server = self.servers.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
nics = [{"net-id": self.networks.first().id, "v4-fixed-ip": ''}]
quota_usages = self.quota_usages.first()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': False,
})
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE)]
self.mock_volume_snapshot_list.return_value = volumes
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_keypair_list.return_value = self.keypairs.list()
self.mock_security_group_list.return_value = \
self.security_groups.list()
self.mock_availability_zone_list.return_value = \
self.availability_zones.list()
self._mock_glance_image_list_detailed(self.versioned_images.list())
self._mock_neutron_network_and_port_list()
self.mock_server_create.side_effect = self.exceptions.keystone
self.mock_tenant_quota_usages.return_value = quota_usages
self.mock_flavor_list.return_value = self.flavors.list()
form_data = {'flavor': flavor.id,
'source_type': 'image_id',
'source_id': image.id,
'volume_size': '1',
'image_id': image.id,
'availability_zone': avail_zone.zoneName,
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'volume_type': '',
'network': self.networks.first().id,
'count': 1,
'admin_pass': 'password',
'confirm_admin_pass': 'password',
'disk_config': 'AUTO',
'config_drive': False}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
self.mock_keypair_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_security_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_availability_zone_list.assert_called_once_with(
helpers.IsHttpRequest())
self._check_glance_image_list_detailed(count=5)
self._check_neutron_network_and_port_list()
self.mock_server_create.assert_called_once_with(
helpers.IsHttpRequest(),
server.name,
image.id,
flavor.id,
keypair.name,
customization_script,
[str(sec_group.id)],
block_device_mapping=None,
block_device_mapping_v2=None,
nics=nics,
availability_zone=avail_zone.zoneName,
instance_count=helpers.IsA(int),
admin_pass='password',
disk_config='AUTO',
config_drive=False,
scheduler_hints={})
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', ))
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_form_keystone_exception_with_glance_v1(self):
self.test_launch_form_keystone_exception()
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def test_launch_form_instance_count_error(self):
flavor = self.flavors.first()
image = self.versioned_images.first()
keypair = self.keypairs.first()
server = self.servers.first()
volume = self.volumes.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
device_name = u'vda'
volume_choice = "%s:vol" % volume.id
quota_usages = self.quota_usages.first()
self._mock_nova_glance_neutron_lists()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': False,
})
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = []
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_tenant_quota_usages.return_value = quota_usages
form_data = {'flavor': flavor.id,
'source_type': 'image_id',
'image_id': image.id,
'availability_zone': avail_zone.zoneName,
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'volume_type': 'volume_id',
'volume_id': volume_choice,
'device_name': device_name,
'count': 0}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertContains(res, "greater than or equal to 1")
self._check_nova_glance_neutron_lists(flavor_count=3,
image_count=6)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 3,
mock.call(helpers.IsHttpRequest()))
self.mock_tenant_quota_usages.assert_has_calls([
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', )),
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes')),
])
self.assertEqual(2, self.mock_tenant_quota_usages.call_count)
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_form_instance_count_error_glance_v1(self):
self.test_launch_form_instance_count_error()
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'server_group_list',
'availability_zone_list',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def _test_launch_form_count_error(self, resource, avail):
flavor = self.flavors.first()
image = self.versioned_images.first()
keypair = self.keypairs.first()
server = self.servers.first()
volume = self.volumes.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
device_name = u'vda'
volume_choice = "%s:vol" % volume.id
quota_usages = self.quota_usages.first()
if resource == 'both':
quota_usages['cores']['available'] = avail
quota_usages['ram']['available'] = 512
else:
quota_usages[resource]['available'] = avail
self._mock_nova_glance_neutron_lists()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': True,
})
self.mock_server_group_list.return_value = self.server_groups.list()
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = []
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_tenant_quota_usages.return_value = quota_usages
form_data = {'flavor': flavor.id,
'source_type': 'image_id',
'image_id': image.id,
'availability_zone': avail_zone.zoneName,
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'volume_type': 'volume_id',
'volume_id': volume_choice,
'device_name': device_name,
'count': 2}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
if resource == 'ram':
msg = ("The following requested resource(s) exceed quota(s): "
"RAM(Available: %s" % avail)
if resource == 'cores':
msg = ("The following requested resource(s) exceed quota(s): "
"Cores(Available: %s" % avail)
if resource == 'both':
msg = ("The following requested resource(s) exceed quota(s): "
"Cores(Available: %(avail)s, Requested: 2), RAM(Available: "
"512, Requested: 1024)" % {'avail': avail})
self.assertContains(res, msg)
self._check_nova_glance_neutron_lists(flavor_count=3,
image_count=6)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_server_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 3,
mock.call(helpers.IsHttpRequest()))
self.mock_tenant_quota_usages.assert_has_calls([
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', )),
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes')),
])
self.assertEqual(2, self.mock_tenant_quota_usages.call_count)
def test_launch_form_cores_count_error_glance_v2(self):
self._test_launch_form_count_error('cores', 1)
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_form_cores_count_error_glance_v1(self):
self._test_launch_form_count_error('cores', 1)
def test_launch_form_ram_count_error(self):
self._test_launch_form_count_error('ram', 512)
def test_launch_form_ram_cores_count_error(self):
self._test_launch_form_count_error('both', 1)
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def _launch_form_instance(self, image, flavor, keypair=None):
server = self.servers.first()
volume = self.volumes.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
device_name = u'vda'
volume_choice = "%s:vol" % volume.id
quota_usages = self.quota_usages.first()
self._mock_nova_glance_neutron_lists()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': False,
})
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = []
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_tenant_quota_usages.return_value = quota_usages
form_data = {'flavor': flavor.id,
'source_type': 'image_id',
'image_id': image.id,
'availability_zone': avail_zone.zoneName,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'volume_type': 'volume_id',
'volume_id': volume_choice,
'device_name': device_name,
'count': 1}
if keypair:
form_data['keypair'] = keypair.name
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self._check_nova_glance_neutron_lists(flavor_count=3,
image_count=6)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 3,
mock.call(helpers.IsHttpRequest()))
self.mock_tenant_quota_usages.assert_has_calls([
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', )),
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes')),
])
self.assertEqual(2, self.mock_tenant_quota_usages.call_count)
return res
def test_launch_form_instance_requirement_error_disk(self):
flavor = self.flavors.get(id="bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb")
image = self.versioned_images.first()
image.min_ram = flavor.ram
image.min_disk = flavor.disk + 1
keypair = self.keypairs.first()
res = self._launch_form_instance(image, flavor, keypair)
msg = "The flavor '%s' is too small" % flavor.name
self.assertContains(res, msg)
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_form_instance_requirement_error_disk_glance_v1(self):
self.test_launch_form_instance_requirement_error_disk()
def test_launch_form_instance_requirement_error_ram(self):
flavor = self.flavors.first()
image = self.versioned_images.first()
image.min_ram = flavor.ram + 1
image.min_disk = flavor.disk
keypair = self.keypairs.first()
res = self._launch_form_instance(image, flavor, keypair)
msg = "The flavor '%s' is too small" % flavor.name
self.assertContains(res, msg)
def test_launch_form_instance_zero_value_flavor_with_min_req(self):
flavor = self.flavors.first()
image = self.versioned_images.first()
image.min_ram = flavor.ram
image.min_disk = flavor.disk + 1
keypair = self.keypairs.first()
res = self._launch_form_instance(image, flavor, keypair)
msg = "The flavor '%s' is too small" % flavor.name
self.assertNotContains(res, msg)
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def _test_launch_form_instance_show_device_name(self, device_name,
widget_class,
widget_attrs):
flavor = self.flavors.first()
image = self.versioned_images.first()
keypair = self.keypairs.first()
server = self.servers.first()
volume = self.volumes.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
volume_choice = "%s:vol" % volume.id
quota_usages = self.quota_usages.first()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': False,
})
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_keypair_list.return_value = self.keypairs.list()
self.mock_security_group_list.return_value = \
self.security_groups.list()
self.mock_availability_zone_list.return_value = \
self.availability_zones.list()
self.mock_image_list_detailed.side_effect = [
[self.versioned_images.list(), False, False],
[[], False, False],
]
self.mock_network_list.side_effect = [
self.networks.list()[:1],
self.networks.list()[1:],
self.networks.list()[:1],
self.networks.list()[1:],
]
self.mock_port_list_with_trunk_types.return_value = self.ports.list()
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = []
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_tenant_quota_usages.return_value = quota_usages
form_data = {'flavor': flavor.id,
'source_type': 'volume_image_id',
'image_id': image.id,
'availability_zone': avail_zone.zoneName,
'keypair': keypair.name,
'name': server.name,
'customization_script': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'volume_type': 'volume_id',
'volume_id': volume_choice,
'volume_size': max(
image.min_disk, image.size // 1024 ** 3),
'device_name': device_name,
'count': 1}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
widget_content = widget_class().render(**widget_attrs)
# In django 1.4, the widget's html attributes are not always rendered
# in the same order and checking the fully rendered widget fails.
for widget_part in widget_content.split():
self.assertContains(res, widget_part)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 3,
mock.call(helpers.IsHttpRequest()))
self.mock_keypair_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_security_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_availability_zone_list.assert_called_once_with(
helpers.IsHttpRequest())
self.assertEqual(6, self.mock_image_list_detailed.call_count)
self.mock_image_list_detailed.assert_has_calls(
[
mock.call(helpers.IsHttpRequest(),
filters={'is_public': True,
'status': 'active'}),
mock.call(helpers.IsHttpRequest(),
filters={'property-owner_id': self.tenant.id,
'status': 'active'})
] +
[
mock.call(helpers.IsHttpRequest(),
filters={'status': 'active',
'visibility': 'shared'})
] * 3
)
self.assertEqual(4, self.mock_network_list.call_count)
self.mock_network_list.assert_has_calls([
mock.call(
helpers.IsHttpRequest(),
tenant_id=self.tenant.id,
shared=False),
mock.call(
helpers.IsHttpRequest(),
shared=True),
mock.call(
helpers.IsHttpRequest(),
tenant_id=self.tenant.id,
shared=False),
mock.call(
helpers.IsHttpRequest(),
shared=True),
])
self.assertEqual(len(self.networks.list()),
self.mock_port_list_with_trunk_types.call_count)
self.mock_port_list_with_trunk_types.assert_has_calls(
[mock.call(helpers.IsHttpRequest(),
network_id=net.id,
tenant_id=self.tenant.id)
for net in self.networks.list()])
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 3,
mock.call(helpers.IsHttpRequest()))
self.mock_tenant_quota_usages.assert_has_calls([
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', )),
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes')),
])
self.assertEqual(2, self.mock_tenant_quota_usages.call_count)
@override_settings(
OPENSTACK_HYPERVISOR_FEATURES={'can_set_mount_point': True},)
def test_launch_form_instance_device_name_showed(self):
self._test_launch_form_instance_show_device_name(
u'vda', widgets.TextInput, {
'name': 'device_name', 'value': 'vda',
'attrs': {'id': 'id_device_name'}}
)
@override_settings(
OPENSTACK_HYPERVISOR_FEATURES={'can_set_mount_point': True},
OPENSTACK_API_VERSIONS={'image': 1}
)
def test_launch_form_instance_device_name_showed_glance_v1(self):
self._test_launch_form_instance_show_device_name(
u'vda', widgets.TextInput, {
'name': 'device_name', 'value': 'vda',
'attrs': {'id': 'id_device_name'}}
)
@django.test.utils.override_settings(
OPENSTACK_HYPERVISOR_FEATURES={'can_set_mount_point': False})
def test_launch_form_instance_device_name_hidden(self):
self._test_launch_form_instance_show_device_name(
u'', widgets.HiddenInput, {
'name': 'device_name', 'value': '',
'attrs': {'id': 'id_device_name'}}
)
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'server_group_list',
'availability_zone_list',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def _test_launch_form_instance_volume_size(self, image, volume_size, msg,
avail_volumes=None):
flavor = self.flavors.get(name='m1.massive')
keypair = self.keypairs.first()
server = self.servers.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
device_name = u'vda'
quota_usages = self.quota_usages.first()
quota_usages['cores']['available'] = 2000
if avail_volumes is not None:
quota_usages['volumes']['available'] = avail_volumes
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': True,
})
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_keypair_list.return_value = self.keypairs.list()
self.mock_security_group_list.return_value = \
self.security_groups.list()
self.mock_availability_zone_list.return_value = \
self.availability_zones.list()
self._mock_glance_image_list_detailed(self.versioned_images.list())
self._mock_neutron_network_and_port_list()
self.mock_server_group_list.return_value = self.server_groups.list()
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = []
self.mock_tenant_quota_usages.return_value = quota_usages
form_data = {
'flavor': flavor.id,
'source_type': 'volume_image_id',
'image_id': image.id,
'availability_zone': avail_zone.zoneName,
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'volume_size': volume_size,
'device_name': device_name,
'count': 1
}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertContains(res, msg)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_keypair_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_security_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_availability_zone_list.assert_called_once_with(
helpers.IsHttpRequest())
if avail_volumes is None:
image_list_count = 6
else:
image_list_count = 5
self._check_glance_image_list_detailed(count=image_list_count)
self._check_neutron_network_and_port_list()
self.mock_server_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
if avail_volumes is None:
flavor_list_count = 3
else:
flavor_list_count = 2
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, flavor_list_count,
mock.call(helpers.IsHttpRequest()))
self.mock_tenant_quota_usages.assert_has_calls([
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', )),
mock.call(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes')),
])
self.assertEqual(2, self.mock_tenant_quota_usages.call_count)
def test_launch_form_instance_volume_size_error(self):
image = self.versioned_images.get(name='protected_images')
volume_size = image.min_disk // 2
msg = ("The Volume size is too small for the '%s' image" %
image.name)
self._test_launch_form_instance_volume_size(image, volume_size, msg)
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_form_instance_volume_size_error_glance_v1(self):
self.test_launch_form_instance_volume_size_error()
def test_launch_form_instance_non_int_volume_size(self):
image = self.versioned_images.get(name='protected_images')
msg = "Enter a whole number."
self._test_launch_form_instance_volume_size(image, 1.5, msg)
def test_launch_form_instance_volume_exceed_quota(self):
image = self.versioned_images.get(name='protected_images')
msg = "Requested volume exceeds quota: Available: 0, Requested: 1"
self._test_launch_form_instance_volume_size(image, image.min_disk,
msg, 0)
@helpers.create_mocks({
api.nova: ('flavor_list', 'server_list', 'tenant_absolute_limits',
'extension_supported', 'is_feature_available',),
api.glance: ('image_list_detailed',),
api.neutron: ('floating_ip_simple_associate_supported',
'floating_ip_supported',),
api.network: ('servers_update_addresses',),
})
def test_launch_button_attributes(self):
servers = self.servers.list()
limits = self.limits['absolute']
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_tenant_absolute_limits.return_value = limits
self.mock_floating_ip_supported.return_value = True
self.mock_floating_ip_simple_associate_supported.return_value = True
tables.LaunchLink()
res = self.client.get(INDEX_URL)
launch_action = self.getAndAssertTableAction(res, 'instances',
'launch-ng')
self.assertEqual(set(['btn-launch']),
set(launch_action.classes))
self.assertEqual('Launch Instance', launch_action.verbose_name)
self.assertEqual((('compute', 'os_compute_api:servers:create'),),
launch_action.policy_rules)
self._check_extension_supported({'AdminActions': 16,
'Shelve': 4})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 8,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_absolute_limits, 3,
mock.call(helpers.IsHttpRequest(), reserved=True))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_supported, 8,
mock.call(helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_simple_associate_supported, 4,
mock.call(helpers.IsHttpRequest()))
@helpers.create_mocks({
api.nova: ('flavor_list', 'server_list', 'tenant_absolute_limits',
'extension_supported', 'is_feature_available',),
api.glance: ('image_list_detailed',),
api.neutron: ('floating_ip_simple_associate_supported',
'floating_ip_supported',),
api.network: ('servers_update_addresses',),
})
def test_launch_button_disabled_when_quota_exceeded(self):
servers = self.servers.list()
limits = self.limits['absolute']
limits['totalInstancesUsed'] = limits['maxTotalInstances']
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_tenant_absolute_limits.return_value = limits
self.mock_floating_ip_supported.return_value = True
self.mock_floating_ip_simple_associate_supported.return_value = True
tables.LaunchLink()
res = self.client.get(INDEX_URL)
launch_action = self.getAndAssertTableAction(
res, 'instances', 'launch-ng')
self.assertIn('disabled', launch_action.classes,
'The launch button should be disabled')
self.assertEqual('Launch Instance (Quota exceeded)',
six.text_type(launch_action.verbose_name))
self._check_extension_supported({'AdminActions': 16,
'Shelve': 4})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 8,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_absolute_limits, 3,
mock.call(helpers.IsHttpRequest(), reserved=True))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_supported, 8,
mock.call(helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_simple_associate_supported, 4,
mock.call(helpers.IsHttpRequest()))
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list',
'server_group_list',
'tenant_absolute_limits',
'server_create',),
cinder: ('volume_list',
'volume_snapshot_list',),
quotas: ('tenant_quota_usages',)})
def test_launch_with_empty_device_name_allowed(self):
flavor = self.flavors.get(name='m1.massive')
image = self.versioned_images.first()
keypair = self.keypairs.first()
server = self.servers.first()
sec_group = self.security_groups.first()
avail_zone = self.availability_zones.first()
customization_script = 'user data'
nics = [{'net-id': self.networks.first().id, 'v4-fixed-ip': ''}]
device_name = u''
quota_usages = self.quota_usages.first()
quota_usages['cores']['available'] = 2000
device_mapping_v2 = [{'device_name': None, # device_name must be None
'source_type': 'image',
'destination_type': 'volume',
'delete_on_termination': False,
'uuid': image.id,
'boot_index': '0',
'volume_size': image.size}]
self._mock_nova_glance_neutron_lists()
self._mock_extension_supported({
'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': True,
})
self.mock_server_group_list.return_value = []
volumes = [v for v in self.volumes.list()
if (v.status == AVAILABLE and v.bootable == 'true')]
self.mock_volume_list.return_value = volumes
self.mock_volume_snapshot_list.return_value = []
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_tenant_quota_usages.return_value = quota_usages
self.mock_server_create.return_value = None
form_data = {
'flavor': flavor.id,
'source_type': 'volume_image_id',
'image_id': image.id,
'availability_zone': avail_zone.zoneName,
'keypair': keypair.name,
'name': server.name,
'script_source': 'raw',
'script_data': customization_script,
'project_id': self.tenants.first().id,
'user_id': self.user.id,
'groups': str(sec_group.id),
'volume_size': image.size,
'device_name': device_name,
'network': self.networks.first().id,
'count': 1
}
url = reverse('horizon:project:instances:launch')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self._check_nova_glance_neutron_lists(flavor_count=2,
image_count=5)
self._check_extension_supported({
'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1,
})
self.mock_server_group_list.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(),
search_opts=SNAPSHOT_SEARCH_OPTS)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', ))
self.mock_server_create.assert_called_once_with(
helpers.IsHttpRequest(),
server.name,
'',
flavor.id,
keypair.name,
customization_script,
[str(sec_group.id)],
block_device_mapping=None,
block_device_mapping_v2=device_mapping_v2,
nics=nics,
availability_zone=avail_zone.zoneName,
instance_count=helpers.IsA(int),
admin_pass=u'',
config_drive=False,
disk_config=u'',
scheduler_hints={})
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_launch_with_empty_device_name_allowed_glance_v1(self):
self.test_launch_with_empty_device_name_allowed()
class InstanceTests2(InstanceTestBase, InstanceTableTestMixin):
@helpers.create_mocks({
api.nova: ('flavor_list', 'server_list', 'tenant_absolute_limits',
'extension_supported', 'is_feature_available',),
api.glance: ('image_list_detailed',),
api.neutron: ('floating_ip_simple_associate_supported',
'floating_ip_supported',),
api.network: ('servers_update_addresses',),
})
def test_index_options_after_migrate(self):
servers = self.servers.list()
server = self.servers.first()
server.status = "VERIFY_RESIZE"
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_tenant_absolute_limits.return_value = self.limits['absolute']
self.mock_floating_ip_supported.return_value = True
self.mock_floating_ip_simple_associate_supported.return_value = True
res = self.client.get(INDEX_URL)
self.assertContains(res, "instances__confirm")
self.assertContains(res, "instances__revert")
self._check_extension_supported({'AdminActions': 16,
'Shelve': 4})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 8,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(
helpers.IsHttpRequest(), search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_absolute_limits, 2,
mock.call(helpers.IsHttpRequest(), reserved=True))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_supported, 8,
mock.call(helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_simple_associate_supported, 4,
mock.call(helpers.IsHttpRequest()))
@helpers.create_mocks({api.nova: ('extension_supported',
'is_feature_available',
'flavor_list',
'keypair_list',
'availability_zone_list'),
cinder: ('volume_snapshot_list',
'volume_list',),
api.neutron: ('network_list',
'port_list_with_trunk_types',
'security_group_list',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_quota_usages',)})
def test_select_default_keypair_if_only_one(self):
keypair = self.keypairs.first()
self.mock_volume_list.return_value = []
self.mock_volume_snapshot_list.return_value = []
self._mock_glance_image_list_detailed(self.versioned_images.list())
self._mock_neutron_network_and_port_list()
self.mock_tenant_quota_usages.return_value = self.quota_usages.first()
self._mock_extension_supported({'BlockDeviceMappingV2Boot': True,
'DiskConfig': True,
'ConfigDrive': True,
'ServerGroups': False})
self._mock_nova_lists()
url = reverse('horizon:project:instances:launch')
res = self.client.get(url)
self.assertContains(
res, "<option selected='selected' value='%(key)s'>"
"%(key)s</option>" % {'key': keypair.name},
html=True,
msg_prefix="The default key pair was not selected.")
self.mock_volume_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_SEARCH_OPTS),
mock.call(helpers.IsHttpRequest(),
search_opts=VOLUME_BOOTABLE_SEARCH_OPTS),
])
self.mock_volume_snapshot_list.assert_called_once_with(
helpers.IsHttpRequest(), search_opts=SNAPSHOT_SEARCH_OPTS)
self._check_glance_image_list_detailed(count=5)
self._check_neutron_network_and_port_list()
self.mock_tenant_quota_usages.assert_called_once_with(
helpers.IsHttpRequest(),
targets=('instances', 'cores', 'ram', 'volumes', 'gigabytes'))
self._check_extension_supported({'BlockDeviceMappingV2Boot': 1,
'DiskConfig': 1,
'ConfigDrive': 1,
'ServerGroups': 1})
self._check_nova_lists(flavor_count=2)
@override_settings(OPENSTACK_API_VERSIONS={'image': 1})
def test_select_default_keypair_if_only_one_glance_v1(self):
self.test_select_default_keypair_if_only_one()
@helpers.create_mocks({
api.neutron: ('floating_ip_target_list_by_instance',
'tenant_floating_ip_list',
'floating_ip_disassociate',
'tenant_floating_ip_release'),
})
def _test_disassociate_floating_ip(self, is_release):
servers = self.servers.list()
server = servers[0]
port = [p for p in self.ports.list() if p.device_id == server.id][0]
fip_target = api.neutron.FloatingIpTarget(
port, port['fixed_ips'][0]['ip_address'], server.name)
fip = self.floating_ips.first()
fip.port_id = port.id
self.mock_floating_ip_target_list_by_instance.return_value = \
[fip_target]
self.mock_tenant_floating_ip_list.return_value = [fip]
self.mock_floating_ip_disassociate.return_value = None
self.mock_tenant_floating_ip_release.return_value = None
url = reverse('horizon:project:instances:disassociate',
args=[server.id])
form_data = {'fip': fip.id,
'is_release': is_release}
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_floating_ip_target_list_by_instance.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_tenant_floating_ip_list.assert_called_once_with(
helpers.IsHttpRequest())
if is_release:
self.mock_floating_ip_disassociate.assert_not_called()
self.mock_tenant_floating_ip_release.assert_called_once_with(
helpers.IsHttpRequest(), fip.id)
else:
self.mock_floating_ip_disassociate.assert_called_once_with(
helpers.IsHttpRequest(), fip.id)
self.mock_tenant_floating_ip_release.assert_not_called()
@helpers.create_mocks({api.neutron: ('floating_ip_disassociate',)})
def test_disassociate_floating_ip(self):
self._test_disassociate_floating_ip(is_release=False)
@helpers.create_mocks({api.neutron: ('tenant_floating_ip_release',)})
def test_disassociate_floating_ip_with_release(self):
self._test_disassociate_floating_ip(is_release=True)
@helpers.create_mocks({api.nova: ('server_get',
'flavor_list',
'tenant_absolute_limits',
'is_feature_available',
'extension_supported')})
def test_instance_resize_get(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_tenant_absolute_limits.return_value = self.limits['absolute']
self._mock_extension_supported({'DiskConfig': True,
'ServerGroups': False})
url = reverse('horizon:project:instances:resize', args=[server.id])
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
config_drive_field_label = 'Configuration Drive'
self.assertNotContains(res, config_drive_field_label)
option = '<option value="%s">%s</option>'
for flavor in self.flavors.list():
if flavor.id == server.flavor['id']:
self.assertNotContains(res, option % (flavor.id, flavor.name))
else:
self.assertContains(res, option % (flavor.id, flavor.name))
self.mock_server_get.assert_called_once_with(helpers.IsHttpRequest(),
server.id)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
self.mock_tenant_absolute_limits.assert_called_once_with(
helpers.IsHttpRequest(), reserved=True)
self._check_extension_supported({'DiskConfig': 1,
'ServerGroups': 1})
@helpers.create_mocks({api.nova: ('server_get',)})
def test_instance_resize_get_server_get_exception(self):
server = self.servers.first()
self.mock_server_get.side_effect = self.exceptions.nova
url = reverse('horizon:project:instances:resize',
args=[server.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_get',
'flavor_list',)})
def test_instance_resize_get_flavor_list_exception(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_flavor_list.side_effect = self.exceptions.nova
url = reverse('horizon:project:instances:resize',
args=[server.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(helpers.IsHttpRequest(),
server.id)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
@helpers.create_mocks({api.nova: ('server_get',
'flavor_list',
'flavor_get',
'tenant_absolute_limits',
'is_feature_available',
'extension_supported')})
def test_instance_resize_get_current_flavor_not_found(self):
server = self.servers.first()
self.mock_server_get.return_value = server
self.mock_flavor_list.return_value = []
self.mock_flavor_get.side_effect = self.exceptions.nova
self.mock_tenant_absolute_limits.return_value = self.limits['absolute']
self._mock_extension_supported({'DiskConfig': True,
'ServerGroups': False})
url = reverse('horizon:project:instances:resize', args=[server.id])
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
self.mock_server_get.assert_called_once_with(helpers.IsHttpRequest(),
server.id)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
self.mock_flavor_get.assert_called_once_with(
helpers.IsHttpRequest(), server.flavor['id'])
self.mock_tenant_absolute_limits.assert_called_once_with(
helpers.IsHttpRequest(), reserved=True)
self._check_extension_supported({'DiskConfig': 1,
'ServerGroups': 1})
def _instance_resize_post(self, server_id, flavor_id, disk_config):
formData = {'flavor': flavor_id,
'default_role': 'member',
'disk_config': disk_config}
url = reverse('horizon:project:instances:resize',
args=[server_id])
return self.client.post(url, formData)
instance_resize_post_stubs = {
api.nova: ('server_get', 'server_resize',
'flavor_list', 'flavor_get',
'is_feature_available',
'extension_supported')}
@helpers.create_mocks(instance_resize_post_stubs)
def test_instance_resize_post(self):
server = self.servers.first()
flavors = [flavor for flavor in self.flavors.list()
if flavor.id != server.flavor['id']]
flavor = flavors[0]
self.mock_server_get.return_value = server
self.mock_flavor_list.return_value = self.flavors.list()
self._mock_extension_supported({'DiskConfig': True,
'ServerGroups': False})
self.mock_server_resize.return_value = []
res = self._instance_resize_post(server.id, flavor.id, u'AUTO')
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(helpers.IsHttpRequest(),
server.id)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self._check_extension_supported({'DiskConfig': 1,
'ServerGroups': 1})
self.mock_server_resize.assert_called_once_with(
helpers.IsHttpRequest(), server.id, flavor.id, 'AUTO')
@helpers.create_mocks(instance_resize_post_stubs)
def test_instance_resize_post_api_exception(self):
server = self.servers.first()
flavors = [flavor for flavor in self.flavors.list()
if flavor.id != server.flavor['id']]
flavor = flavors[0]
self.mock_server_get.return_value = server
self.mock_flavor_list.return_value = self.flavors.list()
self._mock_extension_supported({'DiskConfig': True,
'ServerGroups': False})
self.mock_server_resize.side_effect = self.exceptions.nova
res = self._instance_resize_post(server.id, flavor.id, 'AUTO')
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(helpers.IsHttpRequest(),
server.id)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self._check_extension_supported({'DiskConfig': 1,
'ServerGroups': 1})
self.mock_server_resize.assert_called_once_with(
helpers.IsHttpRequest(), server.id, flavor.id, 'AUTO')
@helpers.create_mocks({api.glance: ('image_list_detailed',),
api.nova: ('server_get',
'extension_supported',
'is_feature_available',)})
def test_rebuild_instance_get(self, expect_password_fields=True):
server = self.servers.first()
self._mock_glance_image_list_detailed(self.images.list())
self.mock_extension_supported.return_value = True
self.mock_is_feature_available.return_value = False
self.mock_server_get.return_value = server
url = reverse('horizon:project:instances:rebuild', args=[server.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'project/instances/rebuild.html')
password_field_label = 'Rebuild Password'
if expect_password_fields:
self.assertContains(res, password_field_label)
else:
self.assertNotContains(res, password_field_label)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self._check_glance_image_list_detailed(count=3)
self.mock_extension_supported.assert_called_once_with(
'DiskConfig', helpers.IsHttpRequest())
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@django.test.utils.override_settings(
OPENSTACK_HYPERVISOR_FEATURES={'can_set_password': False})
def test_rebuild_instance_get_without_set_password(self):
self.test_rebuild_instance_get(expect_password_fields=False)
def _instance_rebuild_post(self, server_id, image_id,
password=None, confirm_password=None,
disk_config=None):
form_data = {'instance_id': server_id,
'image': image_id,
'disk_config': disk_config}
if password is not None:
form_data.update(password=password)
if confirm_password is not None:
form_data.update(confirm_password=confirm_password)
url = reverse('horizon:project:instances:rebuild',
args=[server_id])
return self.client.post(url, form_data)
instance_rebuild_post_stubs = {
api.nova: ('server_get',
'server_rebuild',
'extension_supported',
'is_feature_available',),
api.glance: ('image_list_detailed',)}
@helpers.create_mocks(instance_rebuild_post_stubs)
def test_rebuild_instance_post_with_password(self):
server = self.servers.first()
image = self.images.first()
password = u'testpass'
self.mock_server_get.return_value = server
self._mock_glance_image_list_detailed(self.images.list())
self.mock_extension_supported.return_value = True
self.mock_server_rebuild.return_value = []
self.mock_is_feature_available.return_value = False
res = self._instance_rebuild_post(server.id, image.id,
password=password,
confirm_password=password,
disk_config='AUTO')
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self._check_glance_image_list_detailed(count=3)
self.mock_extension_supported.assert_called_once_with(
'DiskConfig', helpers.IsHttpRequest())
self.mock_server_rebuild.assert_called_once_with(
helpers.IsHttpRequest(), server.id, image.id, password, 'AUTO',
description=None)
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@helpers.create_mocks(instance_rebuild_post_stubs)
def test_rebuild_instance_post_with_password_equals_none(self):
server = self.servers.first()
image = self.images.first()
self.mock_server_get.return_value = server
self._mock_glance_image_list_detailed(self.images.list())
self.mock_extension_supported.return_value = True
self.mock_server_rebuild.side_effect = self.exceptions.nova
self.mock_is_feature_available.return_value = False
res = self._instance_rebuild_post(server.id, image.id,
password=None,
confirm_password=None,
disk_config='AUTO')
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self._check_glance_image_list_detailed(count=3)
self.mock_extension_supported.assert_called_once_with(
'DiskConfig', helpers.IsHttpRequest())
self.mock_server_rebuild.assert_called_once_with(
helpers.IsHttpRequest(), server.id, image.id, None, 'AUTO',
description=None)
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@helpers.create_mocks(instance_rebuild_post_stubs)
def test_rebuild_instance_post_password_do_not_match(self):
server = self.servers.first()
image = self.images.first()
pass1 = u'somepass'
pass2 = u'notsomepass'
self.mock_server_get.return_value = server
self._mock_glance_image_list_detailed(self.images.list())
self.mock_extension_supported.return_value = True
self.mock_is_feature_available.return_value = False
res = self._instance_rebuild_post(server.id, image.id,
password=pass1,
confirm_password=pass2,
disk_config='MANUAL')
self.assertEqual(res.context['form'].errors['__all__'],
["Passwords do not match."])
if django.VERSION >= (1, 9):
image_list_count = 6
ext_count = 2
else:
image_list_count = 3
ext_count = 1
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self._check_glance_image_list_detailed(count=image_list_count)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_extension_supported, ext_count,
mock.call('DiskConfig', helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 2,
mock.call(helpers.IsHttpRequest(), 'instance_description'))
@helpers.create_mocks(instance_rebuild_post_stubs)
def test_rebuild_instance_post_with_empty_string(self):
server = self.servers.first()
image = self.images.first()
self.mock_server_get.return_value = server
self._mock_glance_image_list_detailed(self.images.list())
self.mock_extension_supported.return_value = True
self.mock_server_rebuild.return_value = []
self.mock_is_feature_available.return_value = False
res = self._instance_rebuild_post(server.id, image.id,
password=u'',
confirm_password=u'',
disk_config=u'AUTO')
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self._check_glance_image_list_detailed(count=3)
self.mock_extension_supported.assert_called_once_with(
'DiskConfig', helpers.IsHttpRequest())
self.mock_server_rebuild.assert_called_once_with(
helpers.IsHttpRequest(), server.id, image.id, None, 'AUTO',
description=None)
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@helpers.create_mocks(instance_rebuild_post_stubs)
def test_rebuild_instance_post_with_desc(self):
server = self.servers.first()
image = self.images.first()
test_description = 'test description'
self.mock_server_get.return_value = server
self._mock_glance_image_list_detailed(self.images.list())
self.mock_extension_supported.return_value = True
self.mock_server_rebuild.return_value = []
self.mock_is_feature_available.return_value = True
form_data = {'instance_id': server.id,
'image': image.id,
'description': test_description}
url = reverse('horizon:project:instances:rebuild',
args=[server.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self._check_glance_image_list_detailed(count=3)
self.mock_extension_supported.assert_called_once_with(
'DiskConfig', helpers.IsHttpRequest())
self.mock_server_rebuild.assert_called_once_with(
helpers.IsHttpRequest(), server.id, image.id, None, '',
description=test_description)
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@helpers.create_mocks(instance_rebuild_post_stubs)
def test_rebuild_instance_post_api_exception(self):
server = self.servers.first()
image = self.images.first()
password = u'testpass'
self.mock_server_get.return_value = server
self._mock_glance_image_list_detailed(self.images.list())
self.mock_extension_supported.return_value = True
self.mock_server_rebuild.side_effect = self.exceptions.nova
self.mock_is_feature_available.return_value = False
res = self._instance_rebuild_post(server.id, image.id,
password=password,
confirm_password=password,
disk_config='AUTO')
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_server_get.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self._check_glance_image_list_detailed(count=3)
self.mock_extension_supported.assert_called_once_with(
'DiskConfig', helpers.IsHttpRequest())
self.mock_server_rebuild.assert_called_once_with(
helpers.IsHttpRequest(), server.id, image.id, password, 'AUTO',
description=None)
self.mock_is_feature_available.assert_called_once_with(
helpers.IsHttpRequest(), "instance_description"
)
@django.test.utils.override_settings(API_RESULT_PAGE_SIZE=2)
@helpers.create_mocks({
api.nova: ('flavor_list', 'server_list', 'tenant_absolute_limits',
'extension_supported', 'is_feature_available',),
api.glance: ('image_list_detailed',),
api.neutron: ('floating_ip_simple_associate_supported',
'floating_ip_supported',),
api.network: ('servers_update_addresses',),
})
def test_index_form_action_with_pagination(self):
# The form action on the next page should have marker
# object from the previous page last element.
page_size = getattr(settings, 'API_RESULT_PAGE_SIZE', 2)
servers = self.servers.list()[:3]
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_list.side_effect = [
[servers[:page_size], True],
[servers[page_size:], False]
]
self.mock_servers_update_addresses.return_value = None
self.mock_tenant_absolute_limits.return_value = self.limits['absolute']
self.mock_floating_ip_supported.return_value = True
self.mock_floating_ip_simple_associate_supported.return_value = True
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
# get first page with 2 items
self.assertEqual(len(res.context['instances_table'].data), page_size)
# update INDEX_URL with marker object
params = "=".join([tables.InstancesTable._meta.pagination_param,
servers[page_size - 1].id])
next_page_url = "?".join([reverse('horizon:project:instances:index'),
params])
form_action = 'action="%s"' % next_page_url
res = self.client.get(next_page_url)
# get next page with remaining items (item 3)
self.assertEqual(len(res.context['instances_table'].data), 1)
# ensure that marker object exists in form action
self.assertContains(res, form_action, count=1)
self._check_extension_supported({'AdminActions': 12,
'Shelve': 3})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 6,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_flavor_list, 2,
mock.call(helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_image_list_detailed, 2,
mock.call(helpers.IsHttpRequest()))
self.mock_server_list.assert_has_calls([
mock.call(helpers.IsHttpRequest(),
search_opts={'marker': None, 'paginate': True}),
mock.call(helpers.IsHttpRequest(),
search_opts={'marker': servers[page_size - 1].id,
'paginate': True}),
])
self.assertEqual(2, self.mock_server_list.call_count)
self.mock_servers_update_addresses.assert_has_calls([
mock.call(helpers.IsHttpRequest(), servers[:page_size]),
mock.call(helpers.IsHttpRequest(), servers[page_size:]),
])
self.assertEqual(2, self.mock_servers_update_addresses.call_count)
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_absolute_limits, 4,
mock.call(helpers.IsHttpRequest(), reserved=True))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_supported, 6,
mock.call(helpers.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_floating_ip_simple_associate_supported, 3,
mock.call(helpers.IsHttpRequest()))
@django.test.utils.override_settings(API_RESULT_PAGE_SIZE=2)
@helpers.create_mocks({api.nova: ('server_list',
'flavor_list',
'server_delete',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_delete_instance_with_pagination(self):
# Instance should be deleted from the next page.
page_size = getattr(settings, 'API_RESULT_PAGE_SIZE', 2)
servers = self.servers.list()[:3]
server = servers[-1]
self.mock_server_list.return_value = [servers[page_size:], False]
self.mock_servers_update_addresses.return_value = None
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_delete.return_value = None
# update INDEX_URL with marker object
params = "=".join([tables.InstancesTable._meta.pagination_param,
servers[page_size - 1].id])
next_page_url = "?".join([reverse('horizon:project:instances:index'),
params])
formData = {'action': 'instances__delete__%s' % server.id}
res = self.client.post(next_page_url, formData)
self.assertRedirectsNoFollow(res, next_page_url)
self.assertMessageCount(success=1)
search_opts = {'marker': servers[page_size - 1].id, 'paginate': True}
self.mock_server_list.assert_called_once_with(
helpers.IsHttpRequest(), search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers[page_size:])
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_server_delete.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
class SimpleFile(object):
def __init__(self, name, data, size):
self.name = name
self.data = data
self._size = size
def read(self):
return self.data
def test_clean_file_upload_form_oversize_data(self):
t = workflows.create_instance.CustomizeAction(self.request, {})
upload_str = 'user data'
files = {'script_upload':
self.SimpleFile('script_name',
upload_str,
(16 * 1024) + 1)}
self.assertRaises(
forms.ValidationError,
t.clean_uploaded_files,
'script',
files)
def test_clean_file_upload_form_invalid_data(self):
t = workflows.create_instance.CustomizeAction(self.request, {})
upload_str = b'\x81'
files = {'script_upload':
self.SimpleFile('script_name',
upload_str,
sys.getsizeof(upload_str))}
self.assertRaises(
forms.ValidationError,
t.clean_uploaded_files,
'script',
files)
def test_clean_file_upload_form_valid_data(self):
t = workflows.create_instance.CustomizeAction(self.request, {})
precleaned = 'user data'
upload_str = 'user data'
files = {'script_upload':
self.SimpleFile('script_name',
upload_str,
sys.getsizeof(upload_str))}
cleaned = t.clean_uploaded_files('script', files)
self.assertEqual(
cleaned,
precleaned)
def _server_rescue_post(self, server_id, image_id,
password=None):
form_data = {'instance_id': server_id,
'image': image_id}
if password is not None:
form_data["password"] = password
url = reverse('horizon:project:instances:rescue',
args=[server_id])
return self.client.post(url, form_data)
@helpers.create_mocks({api.nova: ('server_rescue',),
api.glance: ('image_list_detailed',)})
def test_rescue_instance_post(self):
server = self.servers.first()
image = self.images.first()
password = u'testpass'
self._mock_glance_image_list_detailed(self.images.list())
self.mock_server_rescue.return_value = []
res = self._server_rescue_post(server.id, image.id,
password=password)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self._check_glance_image_list_detailed(count=3)
self.mock_server_rescue.assert_called_once_with(
helpers.IsHttpRequest(), server.id, image=image.id,
password=password)
@helpers.create_mocks({api.nova: ('server_list',
'flavor_list',
'server_unrescue',),
api.glance: ('image_list_detailed',),
api.network: ('servers_update_addresses',)})
def test_unrescue_instance(self):
servers = self.servers.list()
server = servers[0]
server.status = "RESCUE"
self.mock_server_list.return_value = [servers, False]
self.mock_servers_update_addresses.return_value = None
self.mock_flavor_list.return_value = self.flavors.list()
self.mock_image_list_detailed.return_value = (self.images.list(),
False, False)
self.mock_server_unrescue.return_value = None
formData = {'action': 'instances__unrescue__%s' % server.id}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
search_opts = {'marker': None, 'paginate': True}
self.mock_server_list.assert_called_once_with(helpers.IsHttpRequest(),
search_opts=search_opts)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), servers)
self.mock_flavor_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_image_list_detailed.assert_called_once_with(
helpers.IsHttpRequest())
self.mock_server_unrescue.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
class InstanceAjaxTests(helpers.TestCase, InstanceTestHelperMixin):
@helpers.create_mocks({api.nova: ("server_get",
"flavor_get",
"extension_supported",
"is_feature_available"),
api.network: ('servers_update_addresses',)})
def test_row_update(self):
server = self.servers.first()
instance_id = server.id
flavor_id = server.flavor["id"]
flavors = self.flavors.list()
full_flavors = collections.OrderedDict([(f.id, f) for f in flavors])
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_server_get.return_value = server
self.mock_flavor_get.return_value = full_flavors[flavor_id]
self.mock_servers_update_addresses.return_value = None
params = {'action': 'row_update',
'table': 'instances',
'obj_id': instance_id,
}
res = self.client.get('?'.join((INDEX_URL, urlencode(params))),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertContains(res, server.name)
self._check_extension_supported({'AdminActions': 4,
'Shelve': 1})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 2,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_server_get.assert_called_once_with(helpers.IsHttpRequest(),
instance_id)
self.mock_flavor_get.assert_called_once_with(helpers.IsHttpRequest(),
flavor_id)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), [server])
@helpers.create_mocks({api.nova: ("server_get",
"flavor_get",
'is_feature_available',
"extension_supported"),
api.network: ('servers_update_addresses',)})
def test_row_update_instance_error(self):
server = self.servers.first()
instance_id = server.id
flavor_id = server.flavor["id"]
flavors = self.flavors.list()
full_flavors = collections.OrderedDict([(f.id, f) for f in flavors])
server.status = 'ERROR'
server.fault = {"message": "NoValidHost",
"code": 500,
"details": "No valid host was found. \n "
"File \"/mnt/stack/nova/nova/"
"scheduler/filter_scheduler.py\", "
"line 105, in schedule_run_instance\n "
"raise exception.NoValidHost"
"(reason=\"\")\n",
"created": "2013-10-07T00:08:32Z"}
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_server_get.return_value = server
self.mock_flavor_get.return_value = full_flavors[flavor_id]
self.mock_servers_update_addresses.return_value = None
params = {'action': 'row_update',
'table': 'instances',
'obj_id': instance_id,
}
res = self.client.get('?'.join((INDEX_URL, urlencode(params))),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertContains(res, server.name)
self.assertTrue(res.has_header('X-Horizon-Messages'))
messages = json.loads(res['X-Horizon-Messages'])
self.assertEqual(len(messages), 1)
# (Pdb) messages
# [[u'error', u'Failed to launch instance "server_1": \
# There is not enough capacity for this flavor in the \
# selected availability zone. Try again later or select \
# a different availability zone.', u'']]
self.assertEqual(messages[0][0], 'error')
self.assertTrue(messages[0][1].startswith('Failed'))
self._check_extension_supported({'AdminActions': 4,
'Shelve': 1})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 2,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_server_get.assert_called_once_with(helpers.IsHttpRequest(),
instance_id)
self.mock_flavor_get.assert_called_once_with(helpers.IsHttpRequest(),
flavor_id)
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), [server])
@helpers.create_mocks({api.nova: ("server_get",
"flavor_get",
'is_feature_available',
"extension_supported"),
api.network: ('servers_update_addresses',)})
def test_row_update_flavor_not_found(self):
server = self.servers.first()
instance_id = server.id
self._mock_extension_supported({'AdminActions': True,
'Shelve': True})
self.mock_is_feature_available.return_value = True
self.mock_server_get.return_value = server
self.mock_flavor_get.side_effect = self.exceptions.nova
self.mock_servers_update_addresses.return_value = None
params = {'action': 'row_update',
'table': 'instances',
'obj_id': instance_id,
}
res = self.client.get('?'.join((INDEX_URL, urlencode(params))),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertContains(res, server.name)
self.assertContains(res, "Not available")
self._check_extension_supported({'AdminActions': 4,
'Shelve': 1})
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_feature_available, 2,
mock.call(helpers.IsHttpRequest(), 'locked_attribute'))
self.mock_server_get.assert_called_once_with(helpers.IsHttpRequest(),
instance_id)
self.mock_flavor_get.assert_called_once_with(helpers.IsHttpRequest(),
server.flavor['id'])
self.mock_servers_update_addresses.assert_called_once_with(
helpers.IsHttpRequest(), [server])
class ConsoleManagerTests(helpers.ResetImageAPIVersionMixin, helpers.TestCase):
def setup_consoles(self):
# Need to refresh with mocks or will fail since mock do not detect
# the api_call() as mocked.
console.CONSOLES = collections.OrderedDict([
('VNC', api.nova.server_vnc_console),
('SPICE', api.nova.server_spice_console),
('RDP', api.nova.server_rdp_console),
('SERIAL', api.nova.server_serial_console)])
def _get_console_vnc(self, server):
console_mock = mock.Mock(spec=api.nova.VNCConsole)
console_mock.url = '/VNC'
self.mock_server_vnc_console.return_value = console_mock
self.setup_consoles()
@helpers.create_mocks({api.nova: ('server_vnc_console',)})
def test_get_console_vnc(self):
server = self.servers.first()
self._get_console_vnc(server)
url = '/VNC&title=%s(%s)' % (server.name, server.id)
data = console.get_console(self.request, 'VNC', server)[1]
self.assertEqual(data, url)
self.mock_server_vnc_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
def _get_console_spice(self, server):
console_mock = mock.Mock(spec=api.nova.SPICEConsole)
console_mock.url = '/SPICE'
self.mock_server_spice_console.return_value = console_mock
self.setup_consoles()
@helpers.create_mocks({api.nova: ('server_spice_console',)})
def test_get_console_spice(self):
server = self.servers.first()
self._get_console_spice(server)
url = '/SPICE&title=%s(%s)' % (server.name, server.id)
data = console.get_console(self.request, 'SPICE', server)[1]
self.assertEqual(data, url)
self.mock_server_spice_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
def _get_console_rdp(self, server):
console_mock = mock.Mock(spec=api.nova.RDPConsole)
console_mock.url = '/RDP'
self.mock_server_rdp_console.return_value = console_mock
self.setup_consoles()
@helpers.create_mocks({api.nova: ('server_rdp_console',)})
def test_get_console_rdp(self):
server = self.servers.first()
self._get_console_rdp(server)
url = '/RDP&title=%s(%s)' % (server.name, server.id)
data = console.get_console(self.request, 'RDP', server)[1]
self.assertEqual(data, url)
self.mock_server_rdp_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
def _get_console_serial(self, server):
console_mock = mock.Mock(spec=api.nova.SerialConsole)
console_mock.url = '/SERIAL'
self.mock_server_serial_console.return_value = console_mock
self.setup_consoles()
@helpers.create_mocks({api.nova: ('server_serial_console',)})
def test_get_console_serial(self):
server = self.servers.first()
self._get_console_serial(server)
url = '/SERIAL'
data = console.get_console(self.request, 'SERIAL', server)[1]
self.assertEqual(data, url)
self.mock_server_serial_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_vnc_console',
'server_spice_console',
'server_rdp_console')})
def test_get_console_auto_iterate_available(self):
server = self.servers.first()
console_mock = mock.Mock(spec=api.nova.RDPConsole)
console_mock.url = '/RDP'
self.mock_server_vnc_console.side_effect = self.exceptions.nova
self.mock_server_spice_console.side_effect = self.exceptions.nova
self.mock_server_rdp_console.return_value = console_mock
self.setup_consoles()
url = '/RDP&title=%s(%s)' % (server.name, server.id)
data = console.get_console(self.request, 'AUTO', server)[1]
self.assertEqual(data, url)
self.mock_server_vnc_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_server_spice_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_server_rdp_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('server_vnc_console',
'server_spice_console',
'server_rdp_console',
'server_serial_console')})
def test_get_console_auto_iterate_serial_available(self):
server = self.servers.first()
console_mock = mock.Mock(spec=api.nova.SerialConsole)
console_mock.url = '/SERIAL'
self.mock_server_vnc_console.side_effect = self.exceptions.nova
self.mock_server_spice_console.side_effect = self.exceptions.nova
self.mock_server_rdp_console.side_effect = self.exceptions.nova
self.mock_server_serial_console.return_value = console_mock
self.setup_consoles()
url = '/SERIAL'
data = console.get_console(self.request, 'AUTO', server)[1]
self.assertEqual(data, url)
self.mock_server_vnc_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_server_spice_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_server_rdp_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_server_serial_console.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
def test_invalid_console_type_raise_value_error(self):
self.assertRaises(exceptions.NotAvailable,
console.get_console, None, 'FAKE', None)
@helpers.create_mocks({api.neutron: ('network_list_for_tenant',)})
def test_interface_attach_get(self):
server = self.servers.first()
self.mock_network_list_for_tenant.side_effect = [
self.networks.list()[:1],
[],
]
url = reverse('horizon:project:instances:attach_interface',
args=[server.id])
res = self.client.get(url)
self.assertTemplateUsed(res,
'project/instances/attach_interface.html')
self.mock_network_list_for_tenant.assert_has_calls([
mock.call(helpers.IsHttpRequest(), self.tenant.id),
mock.call(helpers.IsHttpRequest(), self.tenant.id),
])
self.assertEqual(2, self.mock_network_list_for_tenant.call_count)
@helpers.create_mocks({api.neutron: ('network_list_for_tenant',),
api.nova: ('interface_attach',)})
def test_interface_attach_post(self):
fixed_ip = '10.0.0.10'
server = self.servers.first()
network = self.networks.first()
self.mock_network_list_for_tenant.side_effect = [
[network],
[],
]
self.mock_interface_attach.return_value = None
form_data = {'instance_id': server.id,
'network': network.id,
'specification_method': 'network',
'fixed_ip': fixed_ip}
url = reverse('horizon:project:instances:attach_interface',
args=[server.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_network_list_for_tenant.assert_has_calls([
mock.call(helpers.IsHttpRequest(), self.tenant.id),
mock.call(helpers.IsHttpRequest(), self.tenant.id),
])
self.assertEqual(2, self.mock_network_list_for_tenant.call_count)
self.mock_interface_attach.assert_called_once_with(
helpers.IsHttpRequest(), server.id,
net_id=network.id, fixed_ip=fixed_ip, port_id=None)
@helpers.create_mocks({api.cinder: ('volume_list',)})
def test_volume_attach_get(self):
server = self.servers.first()
self.mock_volume_list.return_value = self.cinder_volumes.list()
url = reverse('horizon:project:instances:attach_volume',
args=[server.id])
res = self.client.get(url)
form = res.context['form']
self.assertEqual(res.status_code, 200)
self.assertFalse(form.fields['device'].required)
self.assertIsInstance(form.fields['volume'].widget,
forms.ThemableSelectWidget)
self.assertTemplateUsed(res,
'project/instances/attach_volume.html')
self.mock_volume_list.assert_called_once_with(helpers.IsHttpRequest())
@helpers.create_mocks({api.nova: ('instance_volume_attach',),
api.cinder: ('volume_list',)})
def test_volume_attach_post(self):
server = self.servers.first()
self.mock_volume_list.return_value = self.cinder_volumes.list()
self.mock_instance_volume_attach.return_value = None
volume = self.cinder_volumes.list()[1]
form_data = {"volume": volume.id,
"instance_id": server.id,
"device": None}
url = reverse('horizon:project:instances:attach_volume',
args=[server.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_volume_list.assert_called_once_with(helpers.IsHttpRequest())
self.mock_instance_volume_attach.assert_called_once_with(
helpers.IsHttpRequest(), volume.id, server.id, str(None))
@mock.patch.object(api.cinder, 'volume_list')
@mock.patch.object(api.cinder, 'volume_get')
@mock.patch.object(api.nova, 'get_microversion', return_value='2.60')
@mock.patch.object(api.nova, 'novaclient')
def test_volume_attach_post_multiattach(
self, mock_client, mock_get_microversion, mock_volume_get,
mock_volume_list):
# Tests that a multiattach volume must be attached with compute API
# microversion 2.60 and the feature is supported.
server = self.servers.first()
volumes = self.cinder_volumes.list()
volume = volumes[1]
volume.multiattach = True
mock_volume_list.return_value = volumes
mock_volume_get.return_value = volume
form_data = {"volume": volume.id,
"instance_id": server.id,
"device": None}
url = reverse('horizon:project:instances:attach_volume',
args=[server.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
mock_client.assert_called_once_with(mock.ANY, '2.60')
@mock.patch.object(api.cinder, 'volume_list')
@mock.patch.object(api.cinder, 'volume_get')
@mock.patch.object(api.nova, 'get_microversion', return_value=None)
@mock.patch.object(api.nova, 'novaclient')
def test_volume_attach_post_multiattach_feature_not_available(
self, mock_client, mock_get_microversion, mock_volume_get,
mock_volume_list):
# Tests that a multiattach volume must be attached with compute API
# microversion 2.60 and the feature is not available.
server = self.servers.first()
volumes = self.cinder_volumes.list()
volume = volumes[1]
volume.multiattach = True
mock_volume_list.return_value = volumes
mock_volume_get.return_value = volume
form_data = {"volume": volume.id,
"instance_id": server.id,
"device": None}
url = reverse('horizon:project:instances:attach_volume',
args=[server.id])
self.client.post(url, form_data)
# TODO(mriedem): Assert the actual error from the response but
# the test helpers don't seem to handle this case.
mock_client.assert_not_called()
@helpers.create_mocks({api.nova: ('instance_volumes_list',)})
def test_volume_detach_get(self):
server = self.servers.first()
self.mock_instance_volumes_list.return_value = \
self.cinder_volumes.list()
url = reverse('horizon:project:instances:detach_volume',
args=[server.id])
res = self.client.get(url)
form = res.context['form']
self.assertIsInstance(form.fields['volume'].widget,
forms.ThemableSelectWidget)
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res,
'project/instances/detach_volume.html')
self.mock_instance_volumes_list.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
@helpers.create_mocks({api.nova: ('instance_volumes_list',
'instance_volume_detach')})
def test_volume_detach_post(self):
server = self.servers.first()
volume = self.cinder_volumes.list()[1]
self.mock_instance_volumes_list.return_value = \
self.cinder_volumes.list()
self.mock_instance_volume_detach.return_value = None
form_data = {"volume": volume.id,
"instance_id": server.id}
url = reverse('horizon:project:instances:detach_volume',
args=[server.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_instance_volumes_list.assert_called_once_with(
helpers.IsHttpRequest(), server.id)
self.mock_instance_volume_detach.assert_called_once_with(
helpers.IsHttpRequest(), server.id, volume.id)
@helpers.create_mocks({api.neutron: ('port_list',)})
def test_interface_detach_get(self):
server = self.servers.first()
self.mock_port_list.return_value = [self.ports.first()]
url = reverse('horizon:project:instances:detach_interface',
args=[server.id])
res = self.client.get(url)
self.assertTemplateUsed(res,
'project/instances/detach_interface.html')
self.mock_port_list.assert_called_once_with(helpers.IsHttpRequest(),
device_id=server.id)
@helpers.create_mocks({api.neutron: ('port_list',),
api.nova: ('interface_detach',)})
def test_interface_detach_post(self):
server = self.servers.first()
port = self.ports.first()
self.mock_port_list.return_value = [port]
self.mock_interface_detach.return_value = None
form_data = {'instance_id': server.id,
'port': port.id}
url = reverse('horizon:project:instances:detach_interface',
args=[server.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_port_list.assert_called_once_with(helpers.IsHttpRequest(),
device_id=server.id)
self.mock_interface_detach.assert_called_once_with(
helpers.IsHttpRequest(), server.id, port.id)
|
{
"content_hash": "bf8ae26bd2a91787bb811b52d4113b25",
"timestamp": "",
"source": "github",
"line_count": 5573,
"max_line_length": 79,
"avg_line_length": 45.740534720976136,
"alnum_prop": 0.5644967675119257,
"repo_name": "noironetworks/horizon",
"id": "d2bbcfdad9bbe2cac9e027acd39d2c8c671a3bcb",
"size": "255676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/project/instances/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "129247"
},
{
"name": "HTML",
"bytes": "581169"
},
{
"name": "JavaScript",
"bytes": "2455930"
},
{
"name": "Python",
"bytes": "5190295"
},
{
"name": "Shell",
"bytes": "7108"
}
],
"symlink_target": ""
}
|
import unittest
from app import app
from app.models import Todo
from werkzeug.exceptions import HTTPException
class TodoTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def tearDown(self):
# clear data for each test
todos = Todo.objects.all()
for todo in todos:
todo.delete()
def test_index(self):
rv = self.app.get('/')
assert "awesome-flask-todo" in rv.data
def test_empty(self):
rv = self.app.get('/')
assert "No todos, please add" in rv.data
def test_add_todo(self):
self.app.post("/add", data=dict(content="test add todo"))
todo = Todo.objects.get_or_404(content="test add todo")
assert todo is not None
def test_none_todo(self):
try:
todo = Todo.objects.get_or_404(content='test todo none')
except HTTPException as e:
assert e.code == 404
def test_done_todo(self):
todo = Todo(content='test done todo')
todo.save()
url = '/done/'+str(todo.id)
rv = self.app.get(url)
assert '/undone/'+str(todo.id) in rv.data
def test_undone_todo(self):
todo = Todo(content='test undone todo')
todo.save()
url = '/undone/'+str(todo.id)
rv = self.app.get(url)
assert '/done/'+str(todo.id) in rv.data
def test_delete_todo(self):
todo = Todo(content='test delete done')
todo.save()
url = '/delete/'+str(todo.id)
rv = self.app.get(url)
assert "No todos, please add" in rv.data
def test_404(self):
rv = self.app.get('/404test')
assert "Not Found" in rv.data
|
{
"content_hash": "c7b5cd70f916e97eb3523fb782cac7c0",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 68,
"avg_line_length": 27.80327868852459,
"alnum_prop": 0.5772405660377359,
"repo_name": "dangger/awesome-flask-todo",
"id": "3d59aa2e2720910253940f0e529ee8b1e4c6a519",
"size": "1696",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/tests/test_todo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2314"
},
{
"name": "Python",
"bytes": "4060"
}
],
"symlink_target": ""
}
|
from oslo_log import log as logging
import oslo_messaging
from neutron._i18n import _LE
from neutron.api.rpc.callbacks.consumer import registry
from neutron.api.rpc.callbacks import events
from neutron.api.rpc.callbacks import resources
from neutron.services.trunk.drivers.openvswitch.agent import ovsdb_handler
from neutron.services.trunk.drivers.openvswitch.agent import trunk_manager
from neutron.services.trunk.rpc import agent
LOG = logging.getLogger(__name__)
TRUNK_SKELETON = None
class OVSTrunkSkeleton(agent.TrunkSkeleton):
"""It processes Neutron Server events to create the physical resources
associated to a logical trunk in response to user initiated API events
(such as trunk subport add/remove). It collaborates with the OVSDBHandler
to implement the trunk control plane.
"""
def __init__(self, ovsdb_handler):
super(OVSTrunkSkeleton, self).__init__()
self.ovsdb_handler = ovsdb_handler
registry.unsubscribe(self.handle_trunks, resources.TRUNK)
def handle_trunks(self, trunk, event_type):
"""This method is not required by the OVS Agent driver.
Trunk notifications are handled via local OVSDB events.
"""
raise NotImplementedError()
def handle_subports(self, subports, event_type):
# Subports are always created with the same trunk_id and there is
# always at least one item in subports list
trunk_id = subports[0].trunk_id
if self.ovsdb_handler.manages_this_trunk(trunk_id):
if event_type not in (events.CREATED, events.DELETED):
LOG.error(_LE("Unknown or unimplemented event %s"), event_type)
return
ctx = self.ovsdb_handler.context
try:
LOG.debug("Event %s for subports: %s", event_type, subports)
if event_type == events.CREATED:
status = self.ovsdb_handler.wire_subports_for_trunk(
ctx, trunk_id, subports)
elif event_type == events.DELETED:
subport_ids = [subport.port_id for subport in subports]
status = self.ovsdb_handler.unwire_subports_for_trunk(
trunk_id, subport_ids)
self.ovsdb_handler.report_trunk_status(ctx, trunk_id, status)
except oslo_messaging.MessagingException as e:
LOG.error(_LE(
"Error on event %(event)s for subports "
"%(subports)s: %(err)s"),
{'event': event_type, 'subports': subports, 'err': e})
def init_handler(resource, event, trigger, agent=None):
"""Handler for agent init event."""
# Set up agent-side RPC for receiving trunk events; we may want to
# make this setup conditional based on server-side capabilities.
global TRUNK_SKELETON
manager = trunk_manager.TrunkManager(agent.int_br)
handler = ovsdb_handler.OVSDBHandler(manager)
TRUNK_SKELETON = OVSTrunkSkeleton(handler)
|
{
"content_hash": "b625992e7b465e29b71b276a4435662f",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 79,
"avg_line_length": 41.97222222222222,
"alnum_prop": 0.6542025148908008,
"repo_name": "cloudbase/neutron",
"id": "68fa78b8708a2ebbd4650ec5286edc2690175ba1",
"size": "3561",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "neutron/services/trunk/drivers/openvswitch/agent/driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "9942988"
},
{
"name": "Shell",
"bytes": "14325"
}
],
"symlink_target": ""
}
|
import random
from locust import task, HttpUser, TaskSet
products = [
'0PUK6V6EV0',
'1YMWWN1N4O',
'2ZYFJ3GM2N',
'66VCHSJNUP',
'6E92ZMYYFZ',
'9SIQT8TOJO',
'L9ECAV7KIM',
'LS4PSXUNUM',
'OLJCESPC7Z']
currencies = [
'EUR',
'USD',
'JPY',
'GBP',
'TRY',
'CAD']
# Define specific frontend actions.
@task
def index(l):
l.client.get("/")
@task
def setCurrency(l):
l.client.post("/setCurrency",
{'currency_code': random.choice(currencies)})
@task
def browseProduct(l):
l.client.get("/product/" + random.choice(products))
@task
def viewCart(l):
l.client.get("/cart")
@task
def emptyCart(l):
l.client.post("/cart/empty")
@task
def addToCart(l):
product = random.choice(products)
l.client.get("/product/" + product)
l.client.post("/cart", {
'product_id': product,
'quantity': random.choice([1,2,3,4,5,10])})
@task
def checkout(l):
addToCart(l)
l.client.post("/cart/checkout", {
'email': 'someone@example.com',
'street_address': '1600 Amphitheatre Parkway',
'zip_code': '94043',
'city': 'Mountain View',
'state': 'CA',
'country': 'United States',
'credit_card_number': '4432-8015-6152-0454',
'credit_card_expiration_month': '1',
'credit_card_expiration_year': '2039',
'credit_card_cvv': '672',
})
# LocustIO TaskSet classes defining detailed user behaviors.
class PurchasingBehavior(TaskSet):
def on_start(self):
index(self)
tasks = {index: 1,
setCurrency: 1,
browseProduct: 2,
addToCart: 2,
viewCart: 1,
checkout: 1}
class WishlistBehavior(TaskSet):
def on_start(self):
index(self)
tasks = {index: 1,
setCurrency: 1,
browseProduct: 5,
addToCart: 10,
viewCart: 5,
emptyCart: 2}
class BrowsingBehavior(TaskSet):
def on_start(self):
index(self)
tasks = {index: 5,
setCurrency: 1,
browseProduct: 10}
# LocustIO Locust classes defining general user scenarios.
class PurchasingUser(HttpUser):
'''
User that browses products, adds to cart, and purchases via checkout.
'''
tasks = [PurchasingBehavior]
min_wait = 1000
max_wait = 10000
class WishlistUser(HttpUser):
'''
User that browses products, adds to cart, empties cart, but never purchases.
'''
tasks = [WishlistBehavior]
min_wait = 1000
max_wait = 10000
class BrowsingUser(HttpUser):
'''
User that only browses products.
'''
tasks = [BrowsingBehavior]
min_wait = 1000
max_wait = 10000
|
{
"content_hash": "b850e703df141bbd343f1a798532ccd2",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 80,
"avg_line_length": 20.666666666666668,
"alnum_prop": 0.5978994748687172,
"repo_name": "GoogleCloudPlatform/cloud-ops-sandbox",
"id": "d0d1d1e1d49d85658f5c6f3ef1f5b7aab761e424",
"size": "3287",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/loadgenerator/locust_tasks/basic_locustfile.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3931"
},
{
"name": "C#",
"bytes": "25602"
},
{
"name": "CSS",
"bytes": "22609"
},
{
"name": "Dockerfile",
"bytes": "17435"
},
{
"name": "Go",
"bytes": "106453"
},
{
"name": "HCL",
"bytes": "71640"
},
{
"name": "HTML",
"bytes": "1252708"
},
{
"name": "Java",
"bytes": "16361"
},
{
"name": "JavaScript",
"bytes": "82747"
},
{
"name": "Makefile",
"bytes": "2585"
},
{
"name": "Python",
"bytes": "293979"
},
{
"name": "Shell",
"bytes": "52342"
},
{
"name": "Smarty",
"bytes": "448"
}
],
"symlink_target": ""
}
|
import hashlib
def hashmd5(handler, s):
return hashlib.md5(s).hexdigest()
|
{
"content_hash": "944c2f644b890238b73793f198966a96",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 37,
"avg_line_length": 15.8,
"alnum_prop": 0.7215189873417721,
"repo_name": "codeskyblue/bootstrap-tornado",
"id": "85c6e6e27c4642f174380f5896d05b5c193d6322",
"size": "98",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "template_filters.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4452"
},
{
"name": "Python",
"bytes": "13061"
}
],
"symlink_target": ""
}
|
"""
Django settings for project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@jy()%r3_d!-&pqol7&izdkcu@^(b_^b-7l81k89mz5rwqhle3'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'project.urls'
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
|
{
"content_hash": "9d7c82e821c9efae4787936c6e08438a",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 71,
"avg_line_length": 25.627659574468087,
"alnum_prop": 0.7251971772519717,
"repo_name": "GroupwareStartup/Norisuke",
"id": "efd09787ba914392fa7fd3d686c746477d072f94",
"size": "2409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3603"
}
],
"symlink_target": ""
}
|
import mock
import os
import pkg_resources
import trove
import ConfigParser as config_parser
from trove.common import extensions
from trove.extensions.routes.account import Account
from trove.extensions.routes.mgmt import Mgmt
from trove.extensions.routes.mysql import Mysql
from trove.extensions.routes.security_group import Security_group
from trove.tests.unittests import trove_testtools
DEFAULT_EXTENSION_MAP = {
'Account': [Account, extensions.ExtensionDescriptor],
'Mgmt': [Mgmt, extensions.ExtensionDescriptor],
'MYSQL': [Mysql, extensions.ExtensionDescriptor],
'SecurityGroup': [Security_group, extensions.ExtensionDescriptor]
}
EP_TEXT = '''
account = trove.extensions.routes.account:Account
mgmt = trove.extensions.routes.mgmt:Mgmt
mysql = trove.extensions.routes.mysql:Mysql
security_group = trove.extensions.routes.security_group:Security_group
invalid = trove.tests.unittests.api.common.test_extensions:InvalidExtension
'''
class InvalidExtension(object):
def get_name(self):
return "Invalid"
def get_description(self):
return "Invalid Extension"
def get_alias(self):
return "Invalid"
def get_namespace(self):
return "http://TBD"
def get_updated(self):
return "2014-08-14T13:25:27-06:00"
def get_resources(self):
return []
class TestExtensionLoading(trove_testtools.TestCase):
def setUp(self):
super(TestExtensionLoading, self).setUp()
def tearDown(self):
super(TestExtensionLoading, self).tearDown()
def _assert_default_extensions(self, ext_list):
for alias, ext in ext_list.items():
for clazz in DEFAULT_EXTENSION_MAP[alias]:
self.assertIsInstance(ext, clazz, "Improper extension class")
@mock.patch("pkg_resources.iter_entry_points")
def test_default_extensions(self, mock_iter_eps):
trove_base = os.path.abspath(os.path.join(
os.path.dirname(trove.__file__), ".."))
setup_path = "%s/setup.cfg" % trove_base
# check if we are running as unit test without module installed
if os.path.isfile(setup_path):
parser = config_parser.ConfigParser()
parser.read(setup_path)
entry_points = parser.get(
'entry_points', extensions.ExtensionManager.EXT_NAMESPACE)
eps = pkg_resources.EntryPoint.parse_group('plugins', entry_points)
mock_iter_eps.return_value = eps.values()
extension_mgr = extensions.ExtensionManager()
self.assertEqual(DEFAULT_EXTENSION_MAP.keys().sort(),
extension_mgr.extensions.keys().sort(),
"Invalid extension names")
self._assert_default_extensions(extension_mgr.extensions)
@mock.patch("pkg_resources.iter_entry_points")
def test_invalid_extension(self, mock_iter_eps):
eps = pkg_resources.EntryPoint.parse_group('mock', EP_TEXT)
mock_iter_eps.return_value = eps.values()
extension_mgr = extensions.ExtensionManager()
self.assertEqual(len(DEFAULT_EXTENSION_MAP.keys()),
len(extension_mgr.extensions),
"Loaded invalid extensions")
self._assert_default_extensions(extension_mgr.extensions)
|
{
"content_hash": "aad814af65c33252e6443e648d64e886",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 79,
"avg_line_length": 36.5,
"alnum_prop": 0.6767123287671233,
"repo_name": "fabian4/trove",
"id": "e2023861afd47d5b7201859657a374a787c1c3b9",
"size": "3888",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "trove/tests/unittests/api/common/test_extensions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "88"
},
{
"name": "CSS",
"bytes": "21914"
},
{
"name": "JavaScript",
"bytes": "60526"
},
{
"name": "Python",
"bytes": "3297002"
},
{
"name": "Shell",
"bytes": "15239"
},
{
"name": "XSLT",
"bytes": "50542"
}
],
"symlink_target": ""
}
|
import unittest
import numpy as np
import os
import shutil
import subprocess
from rmgpy import getPath
from rmgpy.qm.main import QMCalculator
from rmgpy.molecule import Molecule
from rmgpy.qm.mopac import Mopac, MopacMolPM3, MopacMolPM6, MopacMolPM7
executablePath = Mopac.executablePath
if not os.path.exists(executablePath):
NO_MOPAC = NO_LICENCE = True
else:
NO_MOPAC = False
process = subprocess.Popen(executablePath,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate("\n")
NO_LICENCE = 'To install the MOPAC license' in stderr
mol1 = Molecule().fromSMILES('C1=CC=C2C=CC=CC2=C1')
class TestMopacMolPM3(unittest.TestCase):
"""
Contains unit tests for the Geometry class.
"""
@unittest.skipIf(NO_MOPAC, "MOPAC not found. Try resetting your environment variables if you want to use it.")
@unittest.skipIf(NO_LICENCE, "MOPAC license not installed. Run mopac for instructions")
def setUp(self):
"""
A function run before each unit test in this class.
"""
RMGpy_path = os.path.normpath(os.path.join(getPath(), '..'))
qm = QMCalculator(software='mopac',
method='pm3',
fileStore=os.path.join(RMGpy_path, 'testing', 'qm', 'QMfiles'),
scratchDirectory=os.path.join(RMGpy_path, 'testing', 'qm', 'QMscratch'),
)
if not os.path.exists(qm.settings.fileStore):
os.makedirs(qm.settings.fileStore)
self.qmmol1 = MopacMolPM3(mol1, qm.settings)
def testGenerateThermoData(self):
"""
Test that generateThermoData() works correctly for MOPAC PM3
"""
# First ensure any old data are removed, or else they'll be reused!
for directory in (self.qmmol1.settings.fileStore, self.qmmol1.settings.scratchDirectory):
shutil.rmtree(directory, ignore_errors=True)
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM MopacMolPM3 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units == 'amu':
self.assertAlmostEqual(result.molecularMass.value, 128.173, 2)
self.assertAlmostEqual(self.qmmol1.thermo.H298.value_si, 169708.0608, 0) # to 1 decimal place
self.assertAlmostEqual(self.qmmol1.thermo.S298.value_si, 334.5007584, 1) # to 1 decimal place
def testLoadThermoData(self):
"""
Test that generateThermoData() can load thermo from the previous MOPAC PM3 run.
Check that it loaded, and the values are the same as above.
"""
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM MopacMolPM3 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units == 'amu':
self.assertAlmostEqual(result.molecularMass.value, 128.173, 2)
self.assertAlmostEqual(self.qmmol1.thermo.H298.value_si, 169708.0608, 0) # to 1 decimal place
self.assertAlmostEqual(self.qmmol1.thermo.S298.value_si, 334.5007584, 1) # to 1 decimal place
class TestMopacMolPM6(unittest.TestCase):
"""
Contains unit tests for the Geometry class.
"""
@unittest.skipIf(NO_MOPAC, "MOPAC not found. Try resetting your environment variables if you want to use it.")
@unittest.skipIf(NO_LICENCE, "MOPAC license not installed. Run mopac for instructions")
def setUp(self):
"""
A function run before each unit test in this class.
"""
RMGpy_path = os.path.normpath(os.path.join(getPath(), '..'))
qm = QMCalculator(software='mopac',
method='pm6',
fileStore=os.path.join(RMGpy_path, 'testing', 'qm', 'QMfiles'),
scratchDirectory=os.path.join(RMGpy_path, 'testing', 'qm', 'QMscratch'),
)
if not os.path.exists(qm.settings.fileStore):
os.makedirs(qm.settings.fileStore)
self.qmmol1 = MopacMolPM6(mol1, qm.settings)
def testGenerateThermoData(self):
"""
Test that generateThermoData() works correctly for MOPAC PM6
"""
# First ensure any old data are removed, or else they'll be reused!
for directory in (self.qmmol1.settings.fileStore, self.qmmol1.settings.scratchDirectory):
shutil.rmtree(directory, ignore_errors=True)
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM MopacMolPM6 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units == 'amu':
self.assertAlmostEqual(result.molecularMass.value, 128.173, 2)
self.assertAlmostEqual(self.qmmol1.thermo.H298.value_si, 167704.4270, 0) # to 1 decimal place
self.assertAlmostEqual(self.qmmol1.thermo.S298.value_si, 338.0999241, 1) # to 1 decimal place
def testLoadThermoData(self):
"""
Test that generateThermoData() can load thermo from the previous MOPAC PM6 run.
Check that it loaded, and the values are the same as above.
"""
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM MopacMolPM6 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units == 'amu':
self.assertEqual(result.molecularMass.value, 128.173)
self.assertAlmostEqual(self.qmmol1.thermo.H298.value_si, 167704.0681, 0) # to 0 decimal place
self.assertAlmostEqual(self.qmmol1.thermo.S298.value_si, 338.0999241, 1) # to 1 decimal place
class TestMopacMolPM7(unittest.TestCase):
"""
Contains unit tests for the Geometry class.
"""
@unittest.skipIf(NO_MOPAC, "MOPAC not found. Try resetting your environment variables if you want to use it.")
@unittest.skipIf(NO_LICENCE, "MOPAC license not installed. Run mopac for instructions")
def setUp(self):
"""
A function run before each unit test in this class.
"""
RMGpy_path = os.path.normpath(os.path.join(getPath(), '..'))
qm = QMCalculator(software='mopac',
method='pm7',
fileStore=os.path.join(RMGpy_path, 'testing', 'qm', 'QMfiles'),
scratchDirectory=os.path.join(RMGpy_path, 'testing', 'qm', 'QMscratch'),
)
if not os.path.exists(qm.settings.fileStore):
os.makedirs(qm.settings.fileStore)
mol1 = Molecule().fromSMILES('C1=CC=C2C=CC=CC2=C1')
self.qmmol1 = MopacMolPM7(mol1, qm.settings)
def testGenerateThermoData(self):
"""
Test that generateThermoData() works correctly for MOPAC PM7
"""
# First ensure any old data are removed, or else they'll be reused!
for directory in (self.qmmol1.settings.fileStore, self.qmmol1.settings.scratchDirectory):
shutil.rmtree(directory, ignore_errors=True)
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM MopacMolPM7 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units == 'amu':
self.assertAlmostEqual(result.molecularMass.value, 128.173, 2)
self.assertAlmostEqual(self.qmmol1.thermo.H298.value_si, 166168.9863, 0) # to 1 decimal place
self.assertAlmostEqual(self.qmmol1.thermo.S298.value_si, 336.3330406, 1) # to 1 decimal place
def testLoadThermoData(self):
"""
Test that generateThermoData() can load thermo from the previous MOPAC PM7 run.
Check that it loaded, and the values are the same as above.
"""
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM MopacMolPM7 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units == 'amu':
self.assertAlmostEqual(result.molecularMass.value, 128.173, 2)
self.assertAlmostEqual(self.qmmol1.thermo.H298.value_si, 166168.8571, 0) # to 1 decimal place
self.assertAlmostEqual(self.qmmol1.thermo.S298.value_si, 336.3330406, 1) # to 1 decimal place
################################################################################
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
|
{
"content_hash": "a08d4b403cfd99aa83669ad43c0666f0",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 114,
"avg_line_length": 42.04524886877828,
"alnum_prop": 0.6467929401635816,
"repo_name": "nickvandewiele/RMG-Py",
"id": "13e340222f0d729769e40a9614f91167fb445fb3",
"size": "9335",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "rmgpy/qm/mopacTest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "461"
},
{
"name": "Jupyter Notebook",
"bytes": "23683"
},
{
"name": "Makefile",
"bytes": "5832"
},
{
"name": "Python",
"bytes": "3423274"
},
{
"name": "Shell",
"bytes": "2733"
}
],
"symlink_target": ""
}
|
import os
import sys
sys.path.append('..')
import python_common as common
def _expected_keys_and_values_of_cluster_query_rec(
xroad_message_id, security_server_address):
return [
("messageId", xroad_message_id),
("serviceSecurityServerAddress", security_server_address),
]
def run(client_security_server_address, producer_security_server_address,
ssh_user, request_template_dir):
xroad_query_to_ss0_service_template_filename = os.path.join(
request_template_dir, "xroad_query_to_ss0_service_template.xml")
xroad_query_to_ss2_service_template_filename = os.path.join(
request_template_dir, "xroad_query_to_ss2_service_template.xml")
query_data_client_template_filename = os.path.join(
request_template_dir, "query_operational_data_client_template.xml")
client_timestamp_before_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
message_id_ss0 = common.generate_message_id()
print("\nGenerated message ID %s for X-Road request" % (message_id_ss0, ))
### Regular and operational data requests and the relevant checks
print("\n---- Sending an X-Road request to the service provider in " \
"security server 00000000_1 ----\n")
request_contents = common.format_xroad_request_template(
xroad_query_to_ss0_service_template_filename, message_id_ss0)
print("Generated the following X-Road request: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents)
print("\nReceived the following X-Road response: \n")
xml = common.parse_and_clean_xml(response.text)
print(xml.toprettyxml())
common.check_soap_fault(xml)
message_id_ss2 = common.generate_message_id()
print("\nGenerated message ID %s for X-Road request" % (message_id_ss2, ))
print("\n---- Sending an X-Road request to the service provider in " \
"security server 00000002_1 ----\n")
request_contents = common.format_xroad_request_template(
xroad_query_to_ss2_service_template_filename, message_id_ss2)
print("Generated the following X-Road request: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents)
print("\nReceived the following X-Road response: \n")
xml = common.parse_and_clean_xml(response.text)
print(xml.toprettyxml())
common.check_soap_fault(xml)
common.wait_for_operational_data()
client_timestamp_after_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
# Now make an operational data request to the client's security server
# and check the response payload.
print("\n---- Sending an operational data request to the client's security server ----\n")
message_id = common.generate_message_id()
print("Generated message ID %s for query data request" % (message_id, ))
request_contents = common.format_query_operational_data_request_template(
query_data_client_template_filename, message_id,
client_timestamp_before_requests, client_timestamp_after_requests)
print("Generated the following query data request for the client's security server: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents,
get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count)
json_payload = common.get_multipart_json_payload(mime_parts[1])
# Check the presence of the required fields in at least one JSON structure.
# The record describing the X-Road request to service provider in
# security server 00000000_1
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_cluster_query_rec(
message_id_ss0, "xtee8.ci.kit"))
# The record describing the X-Road request to service provider in
# security server 00000002_1
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_cluster_query_rec(
message_id_ss2, "xtee10.ci.kit"))
common.print_multipart_query_data_response(json_payload)
else:
common.parse_and_check_soap_response(raw_response)
|
{
"content_hash": "350fa3720e8afc154b5dbea6d84cf57c",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 94,
"avg_line_length": 39.64102564102564,
"alnum_prop": 0.6811125485122898,
"repo_name": "vrk-kpa/X-Road",
"id": "b80d4d6bf21fa7769a2c3be30f1e9a5ca5013a30",
"size": "6199",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "src/systemtest/op-monitoring/integration/testcases/test_service_cluster.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "1602"
},
{
"name": "C",
"bytes": "54401"
},
{
"name": "CSS",
"bytes": "62804"
},
{
"name": "HTML",
"bytes": "135934"
},
{
"name": "Java",
"bytes": "4233585"
},
{
"name": "JavaScript",
"bytes": "407789"
},
{
"name": "Makefile",
"bytes": "4405"
},
{
"name": "Perl",
"bytes": "1376"
},
{
"name": "Python",
"bytes": "238897"
},
{
"name": "Roff",
"bytes": "2170"
},
{
"name": "Ruby",
"bytes": "1007868"
},
{
"name": "Scala",
"bytes": "14352"
},
{
"name": "Shell",
"bytes": "142230"
},
{
"name": "XSLT",
"bytes": "1244"
}
],
"symlink_target": ""
}
|
import abc
import re
import typing
# This module defines the abstraction of a Test (of JSON data)
# and several specific test classes.
# A Test can be used multiple times, even concurrently.
# Data is the type of Python data that corresponds to JSON values.
Data = typing.Union[int, float, str, bool, typing.Tuple['Data', ...],
typing.Mapping[str, 'Data'], None]
# Globals is a dict of variables for communication among tests.
# There is a distinct dict for each top-level use of a test.
Globals = typing.Mapping[str, Data]
# PCR_Contents maps digest name to map from PCR index to PCR value.
# Here digest name is something like 'sha256'.
# Each PCR index is a decimal string, so that this can be JSON data
PCR_Contents = typing.Mapping[str, typing.Mapping[str, int]]
class Test(metaclass=abc.ABCMeta):
"""Test is something that can examine a value and either approve it or give a reason for rejection"""
@abc.abstractmethod
def why_not(self, globs: Globals, subject: Data) -> str:
"""Test the given value, return empty string for pass, explanation for fail.
The explanation is (except in deliberate exceptions) English that
makes a sentence when placed after a noun phrase identifying the subject.
The test can read and write in the given globs dict.
"""
raise NotImplementedError
# type_test constructs a test of data type that is expected to pass
def type_test(t) -> typing.Callable[[typing.Any], bool]:
"""Returns a lambda that tests against the given type.
The lambda returns True on pass, raises Exception on fail."""
def test(v: typing.Any) -> bool:
if isinstance(v, t):
return True
raise Exception(f'{v!r} is a {type(v)} rather than a {t}')
return test
def list_test(elt_test: typing.Callable[[typing.Any], bool]) -> typing.Callable[[typing.Any], bool]:
"""Return a lambda that tests for list with certain type of element"""
def test(dat: typing.Any) -> bool:
type_test(list)(dat)
for elt in dat:
elt_test(elt)
return True
return test
def dict_test(dom_test: typing.Callable[[typing.Any], bool], rng_test: typing.Callable[[typing.Any], bool]) -> typing.Callable[[typing.Any], bool]:
"""Return a lambda that tests for dict with certain type key and value"""
def test(dat: typing.Any) -> bool:
type_test(dict)(dat)
for dom, rng in dat.items():
dom_test(dom)
rng_test(rng)
return True
return test
def obj_test(**field_tests: typing.Callable[[typing.Any], bool]) -> typing.Callable[[typing.Any], bool]:
"""Return a lambda that tests for dict with string keys and a particular type for each key"""
def test(dat: typing.Any) -> bool:
type_test(dict)(dat)
dom_test = type_test(str)
for dom, rng in dat.items():
dom_test(dom)
if dom not in field_tests:
continue
rng_test = field_tests[dom]
rng_test(rng)
missing = set(field_tests.keys()) - set(dat.keys())
if missing:
raise Exception(f'{dat!r} lacks fields {missing}')
return True
return test
class AcceptAll(Test):
"""Every value passes this test"""
def why_not(self, _: Globals, subject: Data) -> str:
return ''
class RejectAll(Test):
"""No value passes this test"""
def __init__(self, why: str):
super().__init__()
if not why:
raise Exception(f'the truth value of {why!r} is false')
self.why = why
def why_not(self, _: Globals, subject: Data) -> str:
return self.why
class And(Test):
"""Conjunction of given tests
The tests are run in series, stopping as soon as one fails."""
def __init__(self, *tests: Test):
super().__init__()
list(map(type_test(Test), tests))
self.tests = tests
def why_not(self, globs: Globals, subject: Data) -> str:
for test in self.tests:
reason = test.why_not(globs, subject)
if reason:
return reason
return ''
class Or(Test):
"""Disjunction of given tests
The tests are run in series, stopping as soon as one succeeds."""
def __init__(self, *tests: Test):
super().__init__()
list(map(type_test(Test), tests))
self.tests = tests
def why_not(self, globs: Globals, subject: Data) -> str:
if not self.tests:
return 'does not pass empty disjunction'
reasons = []
for test in self.tests:
reason = test.why_not(globs, subject)
if not reason:
return ''
reasons.append(reason)
return '[' + ', '.join(reasons) + ']'
class Dispatcher(Test):
"""Apply a specific test for each key tuple.
This kind of test applies when the subject is a dict and
it is desired to apply a different test depending on
the value(s) of one or more entries."""
def __init__(self, key_names: typing.Tuple[str, ...]):
"""Initialize a Dispatcher Test.
key_names identifies the subject dict entries that determine
which subsidiary test to apply."""
super().__init__()
if len(key_names) < 1:
raise Exception('Dispatcher given empty list of key names')
list(map(type_test(str), key_names))
self.key_names = key_names
self.tests = dict()
def set(self, key_vals: typing.Tuple[str, ...], test: Test) -> None:
"""Set the test for the given value tuple"""
if len(key_vals) != len(self.key_names):
raise Exception(
f'{key_vals!a} does not match length of {self.key_names}')
if key_vals in self.tests:
raise Exception(f'multiple tests for {key_vals!a}')
self.tests[key_vals] = test
def why_not(self, globs: Globals, subject: Data) -> str:
if not isinstance(subject, dict):
return 'is not a dict'
key_vals = tuple()
for kn in self.key_names:
if kn not in subject:
return f'has no {kn}'
key_vals += (subject[kn],)
test = self.tests.get(key_vals)
if test is None:
return f'has unexpected {self.key_names} combination {key_vals}'
return test.why_not(globs, subject)
class FieldTest(Test):
"""Applies given test to field having given name"""
def __init__(self, field_name: str, field_test: Test, show_name: bool = True):
super().__init__()
type_test(str)(field_name)
type_test(Test)(field_test)
self.field_name = field_name
self.field_test = field_test
self.show_name = show_name
def why_not(self, globs: Globals, subject: Data) -> str:
if not isinstance(subject, dict):
return 'is not a dict'
if self.field_name not in subject:
return f'has no {self.field_name!a} field'
reason = self.field_test.why_not(globs, subject[self.field_name])
if reason and self.show_name:
return self.field_name + ' ' + reason
return reason
class FieldsTest(And):
"""Tests a collection of fields"""
def __init__(self, **fields: Test):
tests = [FieldTest(field_name, field_test)
for field_name, field_test in fields.items()]
super().__init__(*tests)
class IterateTest(Test):
"""Applies a test to every member of a list"""
def __init__(self, elt_test: Test, show_elt: bool = False):
super().__init__()
self.elt_test = elt_test
self.show_elt = show_elt
def why_not(self, globs: Globals, subject: Data) -> str:
if not isinstance(subject, list):
return 'is not a list'
for idx, elt in enumerate(subject):
reason = self.elt_test.why_not(globs, elt)
if not reason:
continue
if self.show_elt:
return f'{elt!a} ' + reason
return f'[{idx}] ' + reason
return ''
class TupleTest(Test):
"""Applies a sequence of tests to a sequence of values
The tests are run in series, stopping as soon as one fails"""
def __init__(self, *member_tests: Test, pad: bool = False):
super().__init__()
list(map(type_test(Test), member_tests))
self.member_tests = member_tests
self.pad = pad
def why_not(self, globs: Globals, subject: Data) -> str:
if not isinstance(subject, list):
return 'is not a list'
subject_len = len(subject)
test_len = len(self.member_tests)
if subject_len > test_len:
return f' is longer ({subject_len}) than the applicable tests ({test_len})'
if (subject_len < test_len) and not self.pad:
return f' is shorter ({subject_len}) than the applicable tests ({test_len})'
for idx, test in enumerate(self.member_tests):
subject_elt = subject[idx] if idx < subject_len else None
reason = test.why_not(globs, subject_elt)
if reason:
return f'[{idx}] ' + reason
return ''
class DelayedField(Test):
"""Remembers a field value for later testing"""
def __init__(self, delayer: 'DelayToFields', field_name: str):
super().__init__()
self.delayer = delayer
self.field_name = field_name
def why_not(self, globs: Globals, subject: Data) -> str:
"""Add the value to the list stashed for later testing"""
val_list = globs[self.field_name]
if not isinstance(val_list, list):
return f'malformed test: global {self.field_name} is not a list'
val_list.append(subject)
return ''
class DelayInitializer(Test):
"""A Test that initializes the globals used by a DelayToFields and reports acceptance"""
def __init__(self, delayer: 'DelayToFields'):
super().__init__()
self.delayer = delayer
def why_not(self, globs: Globals, subject):
self.delayer.initialize_globals(globs)
return ''
class DelayToFields(Test):
"""A test to apply after stashing fields to test.
For each field, accumulates a list of values
in a correspondingly-named global.
As a test, ignores the given subject and instead applies the
configured fields_test to the record of accumulated value lists.
"""
def __init__(self, fields_test: Test, *field_names: str):
super().__init__()
self.field_names = field_names
self.fields_test = fields_test
def initialize_globals(self, globs: Globals) -> None:
"""Initialize for a new pass over data"""
for field_name in self.field_names:
globs[field_name] = []
def get_initializer(self) -> DelayInitializer:
"""Get a Test that accepts the subject and initializes the relevant globals"""
return DelayInitializer(self)
def get(self, field_name: str) -> DelayedField:
"""Return a Test that adds the subject to the list stashed for later evaulation"""
if field_name not in self.field_names:
raise Exception(f'{field_name} not in {self.field_names}')
return DelayedField(self, field_name)
def why_not(self, globs: Globals, subject: Data) -> str:
"""Test the stashed field values"""
delayed = dict()
for field_name in self.field_names:
delayed[field_name] = globs.get(field_name, None)
return self.fields_test.why_not(globs, delayed)
class IntEqual(Test):
"""Compares with a given int"""
def __init__(self, expected: int):
super().__init__()
type_test(int)(expected)
self.expected = expected
def why_not(self, _: Globals, subject: Data) -> str:
if not isinstance(subject, int):
return 'is not a int'
if subject == self.expected:
return ''
return f'is not {self.expected}'
class StringEqual(Test):
"""Compares with a given string"""
def __init__(self, expected: str):
super().__init__()
type_test(str)(expected)
self.expected = expected
def why_not(self, _: Globals, subject: Data) -> str:
if not isinstance(subject, str):
return 'is not a str'
if subject == self.expected:
return ''
return f'is not {self.expected!a}'
class RegExp(Test):
"""Does a full match against a regular expression"""
def __init__(self, pattern: str, flags=0):
super().__init__()
self.regexp = re.compile(pattern, flags)
def why_not(self, _: Globals, subject: Data) -> str:
if not isinstance(subject, str):
return 'is not a str'
if self.regexp.fullmatch(subject):
return ''
return f'does not match {self.regexp.pattern}'
# hash algorithm -> hash value in hex (sans leading 0x)
Digest = typing.Mapping[str, str]
class DigestsTest(Test):
"""Tests whether subject has a digest that is in a list of good ones"""
def __init__(self, good_digests_list: typing.Iterable[Digest]):
"""good_digests_list is a list of good {alg:hash}"""
super().__init__()
self.good_digests = dict()
'map from alg to set of good digests'
for good_digests in good_digests_list:
type_test(dict)(good_digests)
for alg, hash_val in good_digests.items():
if alg in self.good_digests:
self.good_digests[alg].add(hash_val)
else:
self.good_digests[alg] = set((hash_val,))
def why_not(self, _: Globals, subject: Data) -> str:
if not isinstance(subject, dict):
return 'is not a dict'
if 'Digests' not in subject:
return 'has no Digests'
digest_list = subject['Digests']
if not isinstance(digest_list, list):
return 'Digests is not a list'
for idx, subject_digest in enumerate(digest_list):
if not isinstance(subject_digest, dict):
return f'Digests[{idx}] is {subject_digest!r}, not a dict'
if 'AlgorithmId' not in subject_digest:
return f'digest {idx} has no AlgorithmId'
alg = subject_digest['AlgorithmId']
if not isinstance(alg, str):
return f'Digests[{idx}].AlgorithmId is {alg!r}, not a str'
if 'Digest' not in subject_digest:
return f'digest {idx} has no Digest'
hash_val = subject_digest['Digest']
if not isinstance(hash_val, str):
return f'Digests[{idx}].Digest is {hash_val!r}, not a str'
if alg not in self.good_digests:
continue
if hash_val in self.good_digests[alg]:
return ''
return f'has no digest approved by {self.good_digests}'
class DigestTest(DigestsTest):
"""Tests whether subject has a digest that equals a given one"""
def __init__(self, good_digest: Digest):
super().__init__([good_digest])
StrOrRE = typing.Union[str, typing.Pattern]
class VariableTest(Test):
"""Test whether a given variable has value passing given test"""
def __init__(self, variable_name: str, unicode_name: StrOrRE, data_test: Test):
"""variable_name and unicode_name are as in the parsed event; data_test applies to VariableData"""
super().__init__()
self.variable_name = variable_name
#pylint: disable=isinstance-second-argument-not-valid-type
if not isinstance(unicode_name, (str, typing.Pattern)):
#pylint: enable=isinstance-second-argument-not-valid-type
raise Exception(
f'unicode_name={unicode_name!r} is neither a str nor an re.Pattern')
self.unicode_name = unicode_name
self.data_test = data_test
def why_not(self, globs: Globals, subject: Data) -> str:
if not isinstance(subject, dict):
return 'is not a dict'
if 'Event' not in subject:
return 'has no Event field'
evt = subject['Event']
if not isinstance(evt, dict):
return 'Event is not a dict'
if 'VariableName' not in evt:
return 'Event has no VariableName field'
variable_name = evt['VariableName']
if variable_name != self.variable_name:
return f'Event.VariableName is {variable_name} rather than {self.variable_name}'
if 'UnicodeName' not in evt:
return 'Event has no UnicodeName field'
unicode_name = evt['UnicodeName']
if 'VariableData' not in evt:
return 'Event has no VariableData field'
if not isinstance(unicode_name, str):
return 'Event.UnicodeName is not a str'
variable_data = evt['VariableData']
if isinstance(self.unicode_name, str):
if unicode_name != self.unicode_name:
return f'Event.UnicodeName is {unicode_name} rather than {self.unicode_name}'
elif not self.unicode_name.fullmatch(unicode_name):
return f'Event.UnicodeName, {unicode_name}, does not match {self.unicode_name.pattern}'
return self.data_test.why_not(globs, variable_data)
class VariableDispatch(FieldTest):
"""Do a specific test for each variable"""
def __init__(self):
self.vd = Dispatcher(('VariableName', 'UnicodeName'))
super().__init__('Event', self.vd)
def set(self, variable_name: str, unicode_name: str, data_test: Test) -> None:
"""Define the test for a specific variable"""
self.vd.set((variable_name, unicode_name),
FieldTest('VariableData', data_test))
# Signature has the following fields.
# - SignatureOwner, value is a string UUID
# - SignatureData, value is a hex string without leading 0x
Signature = typing.Mapping[str, str]
class SignatureTest(And):
"""Compares to a particular signature"""
def __init__(self, owner: str, data: str):
"""owner is SignatureOwner, data is SignatureData"""
super().__init__(
FieldTest('SignatureOwner', StringEqual(owner)),
FieldTest('SignatureData', StringEqual(data))
)
class SignatureSetMember(Or):
"""Tests for membership in the given list of signatures"""
def __init__(self, sigs: typing.Iterable[Signature]):
tests = [SignatureTest(sig['SignatureOwner'],
sig['SignatureData']) for sig in sigs]
super().__init__(*tests)
class KeySubset(IterateTest):
def __init__(self, sig_type: str, keys: typing.Iterable[typing.Mapping[str, str]]):
super().__init__(And(
FieldTest('SignatureType', StringEqual(sig_type)),
FieldTest('Keys', IterateTest(SignatureSetMember(keys)))))
class FieldsMismatchError(Exception):
"""Represents a mismatch between expected and actual sets of field names."""
def __init__(self, expected, actual):
"""Constructor."""
super().__init__(expected, actual)
type_test(set)(expected)
type_test(set)(actual)
list(map(type_test(str), expected))
list(map(type_test(str), actual))
self.expected = expected
self.actual = actual
def __str__(self):
return f'expected fields {self.expected} but got {self.actual}'
class SupersetOfDicts(Test):
"""Tests that the subject is a list of dicts with at least certain members
All dicts must have the same field names"""
@staticmethod
def dict_to_tuple(it: dict, field_names: typing.Tuple[str]) -> typing.Tuple:
actual_keys = set(it.keys())
expected_keys = set(field_names)
if actual_keys != expected_keys:
raise FieldsMismatchError(expected_keys, actual_keys)
return tuple(it.get(field_name) for field_name in field_names)
def __init__(self, reqs: typing.Iterable[dict], field_names: typing.Tuple[str]):
list(map(type_test(dict), reqs))
type_test(tuple)(field_names)
list(map(type_test(str), field_names))
self.field_names = field_names
self.reqs = {SupersetOfDicts.dict_to_tuple(
req, field_names) for req in reqs}
def why_not(self, globs: Globals, subject: Data) -> str:
if not isinstance(subject, list):
return 'is not a list'
actual = set()
for elt in subject:
if not isinstance(elt, dict):
return f'member {elt} is not a dict'
try:
tup = SupersetOfDicts.dict_to_tuple(elt, self.field_names)
except FieldsMismatchError:
return f'member {elt!r} does not have the right set of field names {self.field_names}'
actual.add(tup)
missing = self.reqs - actual
if not missing:
return ''
return f'lacks {self.field_names} combinations {missing}'
class KeySuperset(TupleTest):
"""Tests that there is one Keys dict containing at least certain members"""
def __init__(self, sig_type: str, keys: typing.Iterable[Signature]):
super().__init__(And(
FieldTest('SignatureType', StringEqual(sig_type)),
FieldTest('Keys',
SupersetOfDicts(keys, ('SignatureOwner', 'SignatureData')))
))
|
{
"content_hash": "9b2bcc984771d5bbe4b7db6f1bd1429f",
"timestamp": "",
"source": "github",
"line_count": 596,
"max_line_length": 147,
"avg_line_length": 35.85906040268456,
"alnum_prop": 0.6033595358412877,
"repo_name": "mit-ll/python-keylime",
"id": "30a92ac33d7d4fd170905a36b9a1e27cfca578b0",
"size": "21372",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keylime/elchecking/tests.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "3128"
},
{
"name": "CSS",
"bytes": "4767"
},
{
"name": "JavaScript",
"bytes": "18188"
},
{
"name": "Python",
"bytes": "617887"
},
{
"name": "Shell",
"bytes": "51983"
}
],
"symlink_target": ""
}
|
"""
tests specific to "pip install --user"
"""
import os
import textwrap
import pytest
from os.path import curdir, isdir, isfile
from pip.compat import uses_pycache, cache_from_source
from tests.lib.local_repos import local_checkout
from tests.lib import pyversion
def _patch_dist_in_site_packages(script):
sitecustomize_path = script.lib_path.join("sitecustomize.py")
sitecustomize_path.write(textwrap.dedent("""
def dist_in_site_packages(dist):
return False
from pip.req import req_install
req_install.dist_in_site_packages = dist_in_site_packages
"""))
# Caught py32 with an outdated __pycache__ file after a sitecustomize
# update (after python should have updated it) so will delete the cache
# file to be sure
# See: https://github.com/pypa/pip/pull/893#issuecomment-16426701
if uses_pycache:
cache_path = cache_from_source(sitecustomize_path)
if os.path.isfile(cache_path):
os.remove(cache_path)
class Tests_UserSite:
@pytest.mark.network
def test_reset_env_system_site_packages_usersite(self, script, virtualenv):
"""
reset_env(system_site_packages=True) produces env where a --user
install can be found using pkg_resources
"""
virtualenv.system_site_packages = True
script.pip('install', '--user', 'INITools==0.2')
result = script.run(
'python', '-c',
"import pkg_resources; print(pkg_resources.get_distribution"
"('initools').project_name)",
)
project_name = result.stdout.strip()
assert (
'INITools' == project_name, "'%s' should be 'INITools'" %
project_name
)
@pytest.mark.network
def test_install_subversion_usersite_editable_with_distribute(
self, script, virtualenv, tmpdir):
"""
Test installing current directory ('.') into usersite after installing
distribute
"""
virtualenv.system_site_packages = True
result = script.pip(
'install', '--user', '-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache"),
)
)
result.assert_installed('INITools', use_user_site=True)
def test_install_curdir_usersite(self, script, virtualenv, data):
"""
Test installing current directory ('.') into usersite
"""
virtualenv.system_site_packages = True
run_from = data.packages.join("FSPkg")
result = script.pip(
'install', '-vvv', '--user', curdir,
cwd=run_from,
expect_error=False,
)
fspkg_folder = script.user_site / 'fspkg'
egg_info_folder = (
script.user_site / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, result.stdout
assert egg_info_folder in result.files_created
def test_install_user_venv_nositepkgs_fails(self, script, data):
"""
user install in virtualenv (with no system packages) fails with message
"""
run_from = data.packages.join("FSPkg")
result = script.pip(
'install', '--user', curdir,
cwd=run_from,
expect_error=True,
)
assert (
"Can not perform a '--user' install. User site-packages are not "
"visible in this virtualenv." in result.stderr
)
@pytest.mark.network
def test_install_user_conflict_in_usersite(self, script, virtualenv):
"""
Test user install with conflict in usersite updates usersite.
"""
virtualenv.system_site_packages = True
script.pip('install', '--user', 'INITools==0.3', '--no-binary=:all:')
result2 = script.pip(
'install', '--user', 'INITools==0.1', '--no-binary=:all:')
# usersite has 0.1
egg_info_folder = (
script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion
)
initools_v3_file = (
# file only in 0.3
script.base_path / script.user_site / 'initools' /
'configparser.py'
)
assert egg_info_folder in result2.files_created, str(result2)
assert not isfile(initools_v3_file), initools_v3_file
@pytest.mark.network
def test_install_user_conflict_in_globalsite(self, script, virtualenv):
"""
Test user install with conflict in global site ignores site and
installs to usersite
"""
# the test framework only supports testing using virtualenvs
# the sys.path ordering for virtualenvs with --system-site-packages is
# this: virtualenv-site, user-site, global-site
# this test will use 2 modifications to simulate the
# user-site/global-site relationship
# 1) a monkey patch which will make it appear INITools==0.2 is not in
# the virtualenv site if we don't patch this, pip will return an
# installation error: "Will not install to the usersite because it
# will lack sys.path precedence..."
# 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence
# over the virtualenv site
virtualenv.system_site_packages = True
script.environ["PYTHONPATH"] = script.base_path / script.user_site
_patch_dist_in_site_packages(script)
script.pip('install', 'INITools==0.2', '--no-binary=:all:')
result2 = script.pip(
'install', '--user', 'INITools==0.1', '--no-binary=:all:')
# usersite has 0.1
egg_info_folder = (
script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion
)
initools_folder = script.user_site / 'initools'
assert egg_info_folder in result2.files_created, str(result2)
assert initools_folder in result2.files_created, str(result2)
# site still has 0.2 (can't look in result1; have to check)
egg_info_folder = (
script.base_path / script.site_packages /
'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.base_path / script.site_packages / 'initools'
assert isdir(egg_info_folder)
assert isdir(initools_folder)
@pytest.mark.network
def test_upgrade_user_conflict_in_globalsite(self, script, virtualenv):
"""
Test user install/upgrade with conflict in global site ignores site and
installs to usersite
"""
# the test framework only supports testing using virtualenvs
# the sys.path ordering for virtualenvs with --system-site-packages is
# this: virtualenv-site, user-site, global-site
# this test will use 2 modifications to simulate the
# user-site/global-site relationship
# 1) a monkey patch which will make it appear INITools==0.2 is not in
# the virtualenv site if we don't patch this, pip will return an
# installation error: "Will not install to the usersite because it
# will lack sys.path precedence..."
# 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence
# over the virtualenv site
virtualenv.system_site_packages = True
script.environ["PYTHONPATH"] = script.base_path / script.user_site
_patch_dist_in_site_packages(script)
script.pip('install', 'INITools==0.2', '--no-binary=:all:')
result2 = script.pip(
'install', '--user', '--upgrade', 'INITools', '--no-binary=:all:')
# usersite has 0.3.1
egg_info_folder = (
script.user_site / 'INITools-0.3.1-py%s.egg-info' % pyversion
)
initools_folder = script.user_site / 'initools'
assert egg_info_folder in result2.files_created, str(result2)
assert initools_folder in result2.files_created, str(result2)
# site still has 0.2 (can't look in result1; have to check)
egg_info_folder = (
script.base_path / script.site_packages /
'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.base_path / script.site_packages / 'initools'
assert isdir(egg_info_folder), result2.stdout
assert isdir(initools_folder)
@pytest.mark.network
def test_install_user_conflict_in_globalsite_and_usersite(
self, script, virtualenv):
"""
Test user install with conflict in globalsite and usersite ignores
global site and updates usersite.
"""
# the test framework only supports testing using virtualenvs.
# the sys.path ordering for virtualenvs with --system-site-packages is
# this: virtualenv-site, user-site, global-site.
# this test will use 2 modifications to simulate the
# user-site/global-site relationship
# 1) a monkey patch which will make it appear INITools==0.2 is not in
# the virtualenv site if we don't patch this, pip will return an
# installation error: "Will not install to the usersite because it
# will lack sys.path precedence..."
# 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence
# over the virtualenv site
virtualenv.system_site_packages = True
script.environ["PYTHONPATH"] = script.base_path / script.user_site
_patch_dist_in_site_packages(script)
script.pip('install', 'INITools==0.2', '--no-binary=:all:')
script.pip('install', '--user', 'INITools==0.3', '--no-binary=:all:')
result3 = script.pip(
'install', '--user', 'INITools==0.1', '--no-binary=:all:')
# usersite has 0.1
egg_info_folder = (
script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion
)
initools_v3_file = (
# file only in 0.3
script.base_path / script.user_site / 'initools' /
'configparser.py'
)
assert egg_info_folder in result3.files_created, str(result3)
assert not isfile(initools_v3_file), initools_v3_file
# site still has 0.2 (can't just look in result1; have to check)
egg_info_folder = (
script.base_path / script.site_packages /
'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.base_path / script.site_packages / 'initools'
assert isdir(egg_info_folder)
assert isdir(initools_folder)
@pytest.mark.network
def test_install_user_in_global_virtualenv_with_conflict_fails(
self, script, virtualenv):
"""
Test user install in --system-site-packages virtualenv with conflict in
site fails.
"""
virtualenv.system_site_packages = True
script.pip('install', 'INITools==0.2')
result2 = script.pip(
'install', '--user', 'INITools==0.1',
expect_error=True,
)
resultp = script.run(
'python', '-c',
"import pkg_resources; print(pkg_resources.get_distribution"
"('initools').location)",
)
dist_location = resultp.stdout.strip()
assert (
"Will not install to the user site because it will lack sys.path "
"precedence to %s in %s" %
('INITools', dist_location) in result2.stderr
)
|
{
"content_hash": "7feca14c185f24a9a84bcf8055e63b45",
"timestamp": "",
"source": "github",
"line_count": 295,
"max_line_length": 79,
"avg_line_length": 39.061016949152545,
"alnum_prop": 0.6039225896034018,
"repo_name": "jythontools/pip",
"id": "eda5fb8fa015438ed5491706ad6768efb8ff9a9d",
"size": "11523",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "tests/functional/test_install_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1372"
},
{
"name": "Python",
"bytes": "2279432"
},
{
"name": "Shell",
"bytes": "2201"
}
],
"symlink_target": ""
}
|
"""Helper utilities module to compute various statistics for the current AOI.
:copyright: (c) 2013 by Tim Sutton
:license: GPLv3, see LICENSE for more details.
"""
import os
import sys
import getpass
from tempfile import mkstemp
import xml
import time
from datetime import date, timedelta
import zipfile
from reporter import config
from reporter.osm_node_parser import OsmNodeParser
from reporter.osm_way_parser import OsmParser
from reporter.queries import RESOURCES_MAP
from reporter import LOGGER
def overpass_resource_base_path(feature_type):
"""Get the overpass resource base path according to the feature we extract.
:param feature_type: The type of feature :
buildings, building-points, roads, potential-idp, boundary-[1,11]
:type feature_type: str
:return The resource folder.
:rtype str
"""
return os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'resources',
'overpass',
RESOURCES_MAP[feature_type],
RESOURCES_MAP[feature_type]))
def shapefile_resource_base_path(feature_type):
"""Get the shapefile resource base path according to the feature we extract.
:param feature_type: The type of feature :
buildings, building-points, roads, potential-idp, boundary-[1,11]
:type feature_type: str
:return The resource folder.
:rtype str
"""
return os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'resources',
'shapefile',
RESOURCES_MAP[feature_type],
RESOURCES_MAP[feature_type]))
def generic_shapefile_base_path():
"""Get the generic shapefile resource base path.
:return The generic resource folder.
:rtype str
"""
return os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'resources',
'shapefile',
'generic',
'generic'))
def get_totals(sorted_user_list):
"""Given a sorted user list, get the totals for ways and nodes.
:param sorted_user_list: User dicts sorted by number of ways.
:type sorted_user_list: list
:returns: Two-tuple (int, int) containing way count, node count.
:rtype: (int, int)
"""
way_count = 0
node_count = 0
for user in sorted_user_list:
way_count += user['ways']
node_count += user['nodes']
return node_count, way_count
def split_bbox(bbox):
"""Split a bounding box into its parts.
:param bbox: String describing a bbox e.g. '106.78674459457397,
-6.141301491467023,106.80691480636597,-6.133834354201348'
:type bbox: str
:returns: A dict with keys: 'southwest_lng, southwest_lat, northeast_lng,
northeast_lat'
:rtype: dict
"""
values = bbox.split(',')
if not len(values) == 4:
raise ValueError('Invalid bbox')
# pylint: disable=W0141
# next line should probably use list comprehension rather
# http://pylint-messages.wikidot.com/messages:w0141
values = map(float, values)
# pylint: enable=W0141
names = ['SW_lng', 'SW_lat', 'NE_lng', 'NE_lat']
coordinates = dict(zip(names, values))
return coordinates
def split_polygon(polygon):
"""Split polygon array to string.
:param polygon: list of array describing polygon area e.g.
'[[28.01513671875,-25.77516058680343],[28.855590820312504,-25.567220388070023],
[29.168701171875004,-26.34265280938059]]
:type polygon: list
:returns: A string of polygon e.g. 50.7 7.1 50.7 7.12 50.71 7.11
:rtype: str
"""
if len(polygon) < 3:
raise ValueError(
'At least 3 lat/lon float value pairs must be provided')
polygon_string = ''
for poly in polygon:
polygon_string += ' '.join(map(str, poly))
polygon_string += ' '
return polygon_string.strip()
def osm_object_contributions(
osm_file,
tag_name,
date_start=None,
date_end=None):
"""Compile a summary of user contributions for the selected osm data type.
:param osm_file: A file object reading from a .osm file.
:type osm_file: file, FileIO
:param tag_name: The tag name we want to filter on.
:type tag_name: str
:param date_start: The start date we want to filter
:type date_start: float
:param date_end: The end date we want to filter
:type date_end: float
:returns: A list of dicts where items in the list are sorted from highest
contributor (based on number of ways) down to lowest. Each element
in the list is a dict in the form: {
'user': <user>,
'ways': <way count>,
'nodes': <node count>,
'timeline': <timelinedict>,
'best': <most ways in a single day>,
'worst': <least ways in single day>,
'average': <average ways across active days>,
'crew': <bool> }
where crew is used to designate users who are part of an active
data gathering campaign.
The timeline dict will contain a collection of dates and
the total number of ways created on that date e.g.
{
u'2010-12-09': 10,
u'2012-07-10': 14
}
:rtype: list
"""
parser = OsmParser(
start_date=date_start,
end_date=date_end)
try:
xml.sax.parse(osm_file, parser)
except xml.sax.SAXParseException:
LOGGER.exception('Failed to parse OSM xml.')
raise
way_count_dict = parser.wayCountDict
node_count_dict = parser.nodeCountDict
timelines = parser.userDayCountDict
# Convert to a list of dicts so we can sort it.
crew_list = config.CREW
user_list = []
for key, value in way_count_dict.items():
start_date, end_date = date_range(timelines[key])
start_date = time.strftime('%d-%m-%Y', start_date.timetuple())
end_date = time.strftime('%d-%m-%Y', end_date.timetuple())
user_timeline = timelines[key]
node_count = 0
if key in node_count_dict:
node_count = node_count_dict[key]
record = {
'name': key,
'ways': value,
'nodes': node_count,
'timeline': interpolated_timeline(user_timeline),
'start': start_date,
'end': end_date,
'activeDays': len(user_timeline),
'best': best_active_day(user_timeline),
'worst': worst_active_day(user_timeline),
'average': average_for_active_days(user_timeline)
}
user_list.append(record)
for key, value in node_count_dict.items():
start_date, end_date = date_range(timelines[key])
start_date = time.strftime('%d-%m-%Y', start_date.timetuple())
end_date = time.strftime('%d-%m-%Y', end_date.timetuple())
user_timeline = timelines[key]
record = {
'name': key,
'ways': 0,
'nodes': value,
'timeline': interpolated_timeline(user_timeline),
'start': start_date,
'end': end_date,
'activeDays': len(user_timeline),
'best': best_active_day(user_timeline),
'worst': worst_active_day(user_timeline),
'average': average_for_active_days(user_timeline)
}
user_list.append(record)
# Sort it
sorted_user_list = sorted(
user_list, key=lambda d: (
-d['ways'],
d['nodes'],
d['name'],
d['timeline'],
d['start'],
d['end'],
d['activeDays'],
d['best'],
d['worst'],
d['average']))
return sorted_user_list
def date_range(timeline):
"""Given a timeline, determine the start and end dates.
The timeline may be sparse (containing fewer entries than all the dates
between the min and max dates) and since it is a dict,
the dates may be in any order.
:param timeline: A dictionary of non-sequential dates (in YYYY-MM-DD) as
keys and values (representing ways collected on that day).
:type timeline: dict
:returns: A tuple containing two dates:
* start_date - a date object representing the earliest date in the
time line.
* end_date - a date object representing the newest date in the time
line.
:rtype: (date, date)
"""
start_date = None
end_date = None
for next_date in timeline.keys():
year, month, day = next_date.split('-')
message = 'Date: %s' % next_date
LOGGER.info(message)
timeline_date = date(int(year), int(month), int(day))
if start_date is None:
start_date = timeline_date
if end_date is None:
end_date = timeline_date
if timeline_date < start_date:
start_date = timeline_date
if timeline_date > end_date:
end_date = timeline_date
return start_date, end_date
def average_for_active_days(timeline):
"""Compute the average activity per active day in a sparse timeline.
:param timeline: A dictionary of non-sequential dates (in YYYY-MM-DD) as
keys and values (representing ways collected on that day).
:type timeline: dict
:returns: Number of entities captured per day rounded to the nearest int.
:rtype: int
"""
count = 0
total = 0
for value in list(timeline.values()):
if value > 0:
count += 1
total += value
# Python 3 seems to automagically turn integer maths into float if needed
average = int(total / count)
return average
def best_active_day(timeline):
"""Compute the best activity for a single active day in a sparse timeline.
:param timeline: A dictionary of non-sequential dates (in YYYY-MM-DD) as
keys and values (representing ways collected on that day).
:type timeline: dict
:returns: Number of entities captured for the user's best day.
:rtype: int
"""
best = 0
for value in list(timeline.values()):
if value > best:
best = value
return best
def worst_active_day(timeline):
"""Compute the worst activity for a single active day in a sparse timeline.
:param timeline: A dictionary of non-sequential dates (in YYYY-MM-DD) as
keys and values (representing ways collected on that day).
:type timeline: dict
:returns: Number of entities captured for the user's worst day.
:rtype: int
"""
if len(timeline) < 1:
return 0
worst = list(timeline.values())[0]
for value in list(timeline.values()):
if value == 0: # should never be but just in case
continue
if value < worst:
worst = value
return worst
def interpolated_timeline(timeline):
"""Interpolate a timeline given a sparse timeline.
A sparse timelines is a sequence of dates containing no days of zero
activity. An interpolated timeline is a sequence of dates where there is
an entry per day in the date range regardless of whether there was any
activity or not.
:param timeline: A dictionary of non-sequential dates (in YYYY-MM-DD) as
keys and values (representing ways collected on that day).
:type timeline: dict
:returns: An interpolated list where each date in the original input
date is present, and all days where no total was provided are added
to include that day.
:rtype: list
Given an input looking like this::
{
{u'2012-09-24': 1},
{u'2012-09-21': 10},
{u'2012-09-25': 5},
}
The returned list will be in the form::
[
[Date(2012,09,21), 10],
[Date(2012,09,22), 0],
[Date(2012,09,23), 0],
[Date(2012,09,24), 1],
[Date(2012,09,25), 5],
]
"""
# Work out the earliest and latest day
start_date, end_date = date_range(timeline)
# Loop through them, adding an entry for each day
time_line = '['
for current_date in date_range_iterator(start_date, end_date):
date_string = time.strftime('%Y-%m-%d', current_date.timetuple())
if date_string in timeline:
value = timeline[date_string]
else:
value = 0
if value == 0:
continue
if time_line != '[':
time_line += ','
time_line += '["%s",%i]' % (date_string, value)
time_line += ']'
return time_line
def date_range_iterator(start_date, end_date):
"""Given two dates return a collection of dates between start and end.
:param start_date: Date representing the start date.
:type start_date: date
:param end_date: Date representing the end date.
:type end_date: date
:returns: Iterable collection yielding dates.
:rtype: iterable
"""
for n in range(int((end_date - start_date).days) + 1):
yield start_date + timedelta(n)
def osm_nodes_by_user(file_handle, username):
"""Obtain the nodes collected by a single user from an OSM file.
:param file_handle: File handle to an open OSM XML document.
:type file_handle: file
:param username: Name of the user for whom nodes should be collected.
:type username: str
:returns: A list of nodes for the given user.
:rtype: list
"""
parser = OsmNodeParser(username)
xml.sax.parse(file_handle, parser)
return parser.nodes
def temp_dir(sub_dir='work'):
"""Obtain the temporary working directory for the operating system.
An osm-reporter subdirectory will automatically be created under this.
.. note:: You can use this together with unique_filename to create
a file in a temporary directory under the inasafe workspace. e.g.
tmpdir = temp_dir('testing')
tmpfile = unique_filename(dir=tmpdir)
print tmpfile
/tmp/osm-reporter/23-08-2012/timlinux/testing/tmpMRpF_C
If you specify OSM_REPORTER_WORK_DIR as an environment var, it will be
used in preference to the system temp directory.
.. note:: This function was taken from InaSAFE (http://inasafe.org) with
minor adaptions.
:param sub_dir: Optional argument which will cause an additional
subirectory to be created e.g. ``/tmp/inasafe/foo/``.
:type sub_dir: str
:returns: Path to the output clipped layer (placed in the system temp dir).
:rtype: str
"""
user = getpass.getuser().replace(' ', '_')
current_date = date.today()
date_string = current_date.isoformat()
if 'OSM_REPORTER_WORK_DIR' in os.environ:
new_directory = os.environ['OSM_REPORTER_WORK_DIR']
else:
# Following 4 lines are a workaround for tempfile.tempdir()
# unreliabilty
handle, filename = mkstemp()
os.close(handle)
new_directory = os.path.dirname(filename)
os.remove(filename)
path = os.path.join(
new_directory, 'osm-reporter', date_string, user, sub_dir)
if not os.path.exists(path):
# Ensure that the dir is world writable
# Umask sets the new mask and returns the old
old_mask = os.umask(0000)
try:
os.makedirs(path, 0o0777)
except OSError:
# one of the directories in the path already exists maybe
pass
# Reinstate the old mask for tmp
os.umask(old_mask)
if not os.path.exists(path):
raise Exception('Could not create working directory', path)
return path
def unique_filename(**kwargs):
"""Create new filename guaranteed not to exist previously
:param kwargs: A dictionary of keyword arguments which are passed on to
``mkstemp(**kwargs)``
.. note:: This function was taken from InaSAFE (http://inasafe.org) with
minor adaptions.
Use mkstemp to create the file, then remove it and return the name
If dir is specified, the tempfile will be created in the path specified
otherwise the file will be created in a directory following this scheme:
:file:`/tmp/osm-reporter/<dd-mm-yyyy>/<user>/impacts'
See http://docs.python.org/library/tempfile.html for details.
Example usage::
tempdir = temp_dir(sub_dir='test')
filename = unique_filename(suffix='.keywords', dir=tempdir)
print filename
/tmp/osm-reporter/23-08-2012/timlinux/test/tmpyeO5VR.keywords
Or with no preferred subdir, a default subdir of 'impacts' is used::
filename = unique_filename(suffix='.shp')
print filename
/tmp/osm-reporter/23-08-2012/timlinux/impacts/tmpoOAmOi.shp
"""
if 'dir' not in kwargs:
path = temp_dir('impacts')
kwargs['dir'] = path
else:
path = temp_dir(kwargs['dir'])
kwargs['dir'] = path
if not os.path.exists(kwargs['dir']):
# Ensure that the dir mask won't conflict with the mode
# Umask sets the new mask and returns the old
umask = os.umask(0000)
# Ensure that the dir is world writable by explictly setting mode
os.makedirs(kwargs['dir'], 0o0777)
# Reinstate the old mask for tmp dir
os.umask(umask)
# Now we have the working dir set up go on and return the filename
handle, filename = mkstemp(**kwargs)
# Need to close it using the filehandle first for windows!
os.close(handle)
try:
os.remove(filename)
except OSError:
pass
return filename
def zip_shp(shp_path, extra_ext=None, remove_file=False):
"""Zip shape file and its gang (.shx, .dbf, .prj).
.. note:: This function was taken from InaSAFE (http://inasafe.org) with
minor adaptions.
:param shp_path: Path to the main shape file.
:type shp_path: str
:param extra_ext: List of extra extensions (as strings) related to
shapefile that should be packaged up.
:type extra_ext: list
:param remove_file: bool - whether the original shp files should be
removed after zipping is complete. Defaults to False.
:type remove_file: bool
:returns: Full path to the created shapefile.
:rtype: str
"""
# go to the directory
my_cwd = os.getcwd()
shp_dir, shp_name = os.path.split(shp_path)
os.chdir(shp_dir)
shp_base_name, _ = os.path.splitext(shp_name)
extensions = ['.shp', '.shx', '.dbf', '.prj']
if extra_ext is not None:
extensions.extend(extra_ext)
# zip files
zip_filename = shp_base_name + '.zip'
zip_object = zipfile.ZipFile(zip_filename, 'w')
for ext in extensions:
if os.path.isfile(shp_base_name + ext):
zip_object.write(shp_base_name + ext)
zip_object.close()
if remove_file:
for ext in extensions:
if os.path.isfile(shp_base_name + ext):
os.remove(shp_base_name + ext)
os.chdir(my_cwd)
return os.path.join(shp_dir, zip_filename)
def which(name, flags=os.X_OK):
"""Search PATH for executable files with the given name.
..note:: This function was taken verbatim from the twisted framework,
licence available here:
http://twistedmatrix.com/trac/browser/tags/releases/twisted-8.2.0/LICENSE
On newer versions of MS-Windows, the PATHEXT environment variable will be
set to the list of file extensions for files considered executable. This
will normally include things like ".EXE". This fuction will also find files
with the given name ending with any of these extensions.
On MS-Windows the only flag that has any meaning is os.F_OK. Any other
flags will be ignored.
:type name: C{str}
:param name: The name for which to search.
:type flags: C{int}
:param flags: Arguments to L{os.access}.
:rtype: C{list}
:param: A list of the full paths to files found, in the
order in which they were found.
"""
if os.path.exists('/usr/bin/%s' % name):
return ['/usr/bin/%s' % name]
if os.path.exists('/usr/local/bin/%s' % name):
return ['/usr/local/bin/%s' % name]
result = []
# pylint: disable=W0141
extensions = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep))
# pylint: enable=W0141
path = os.environ.get('PATH', None)
# In c6c9b26 we removed this hard coding for issue #529 but I am
# adding it back here in case the user's path does not include the
# gdal binary dir on OSX but it is actually there. (TS)
if sys.platform == 'darwin': # Mac OS X
gdal_prefix = (
'/Library/Frameworks/GDAL.framework/'
'Versions/1.10/Programs/')
path = '%s:%s' % (path, gdal_prefix)
message = 'Search path: %s' % path
LOGGER.debug(message)
if path is None:
return []
for p in path.split(os.pathsep):
p = os.path.join(p, name)
if os.access(p, flags):
result.append(p)
for e in extensions:
pext = p + e
if os.access(pext, flags):
result.append(pext)
return result
def short_version(version):
"""Get a shorter version, only with the major and minor version.
:param version: The version.
:type version: str
:return 'major.minor' version number.
:rtype float
"""
return float('.'.join(version.split('.')[0:2]))
|
{
"content_hash": "c6a75fc67e78011e7e990c0c695c8152",
"timestamp": "",
"source": "github",
"line_count": 676,
"max_line_length": 83,
"avg_line_length": 31.51775147928994,
"alnum_prop": 0.6194499202102695,
"repo_name": "meomancer/field-campaigner",
"id": "56f5a68aec2b27d72f1afba17604c1b3256cf4f4",
"size": "21321",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "flask_project/reporter/utilities.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "66147"
},
{
"name": "Dockerfile",
"bytes": "1005"
},
{
"name": "HTML",
"bytes": "211354"
},
{
"name": "JavaScript",
"bytes": "330296"
},
{
"name": "Makefile",
"bytes": "5094"
},
{
"name": "Python",
"bytes": "245499"
},
{
"name": "QML",
"bytes": "235701"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import argparse
import sys
import gzip
import os
import csv
def linecount(fname):
i = 0
if os.path.isfile(fname):
f = gzip.open(fname) if fname.endswith('.gz') else open(fname)
for i, l in enumerate(f):
pass
return i + 1
# Parse command line
parser = argparse.ArgumentParser(description='Key Quality Control Metrics')
parser.add_argument('-p', '--prefix', metavar='prefix', required=True, dest='prefix', help='output prefix used in ATAC-Seq pipeline (required)')
args = parser.parse_args()
# Collect QC statistics
qc = dict()
if args.prefix:
# Unfiltered BAM stats
filename = args.prefix + ".bamStats.unfiltered.tsv.gz"
if os.path.isfile(filename):
with gzip.open(filename, mode="rt") as f:
header = dict()
headerCM = dict()
f_reader = csv.reader(f, delimiter="\t")
for row in f_reader:
if row[0].startswith('ME'):
if not len(header.keys()):
for idx, key in enumerate(row):
header[key] = idx
else:
qc['UnmappedFraction'] = row[header['UnmappedFraction']]
qc['DuplicateFraction'] = row[header['DuplicateFraction']]
qc['MappedSameChrFraction'] = row[header['MappedSameChrFraction']]
if row[0].startswith('CM'):
if not len(headerCM.keys()):
for idx, key in enumerate(row):
headerCM[key] = idx
else:
if (row[headerCM['Chrom']] == "M") or (row[headerCM['Chrom']] == "chrM") or (row[headerCM['Chrom']] == "MT") or (row[headerCM['Chrom']] == "chrMT"):
qc['FractionChrM'] = row[headerCM['MappedFraction']]
# Filtered BAM stats
filename = args.prefix + ".bamStats.promoters.tsv.gz"
if os.path.isfile(filename):
with gzip.open(filename, mode="rt") as f:
header = dict()
f_reader = csv.reader(f, delimiter="\t")
for row in f_reader:
if row[0].startswith('ME'):
if not len(header.keys()):
for idx, key in enumerate(row):
header[key] = idx
else:
qc['Sample'] = row[header['Sample']].replace(".final","")
qc['MappedReads'] = row[header['#Mapped']]
qc['ErrorRate'] = row[header['ErrorRate']]
qc['SDCoverage'] = row[header['SDCoverage']]
qc['BpCov1ToCovNRatio'] = row[header['BpCov1ToCovNRatio']]
qc['BpCov1ToCov2Ratio'] = row[header['BpCov1ToCov2Ratio']]
qc['TssEnrichment'] = row[header['EnrichmentOverBed']]
# Peak calling statistics
filename = args.prefix + ".unfiltered.peaks.gz"
if os.path.isfile(filename):
qc['UnfilteredPeaks'] = str(linecount(filename))
filename = args.prefix + ".peaks"
if os.path.isfile(filename):
qc['FilteredPeaks'] = str(linecount(filename))
qc['FractionPeaksRetained'] = str(float(qc['FilteredPeaks'])/float(qc['UnfilteredPeaks']))
filename = args.prefix + ".peaks.log"
if os.path.isfile(filename):
with open(filename, 'r') as f:
f_reader = csv.DictReader(f, delimiter="\t")
for row in f_reader:
frip = row['frip'].split(',')
if len(frip) == 2:
qc['FRiP'] = str(min(float(frip[0]), float(frip[1])))
else:
qc['FRiP'] = row['frip']
qc['PeakSaturation'] = str(min(float(row['recallRep1']), float(row['recallRep2'])))
# Output summary QC information
cols = sorted(qc.keys())
print('\t'.join(cols))
first = True
for c in cols:
if first:
first = False
else:
print("\t", end="")
print(qc[c], end="")
print()
|
{
"content_hash": "ae795e5a75d2b968b3b36e39b7d92f8e",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 172,
"avg_line_length": 41.80612244897959,
"alnum_prop": 0.5223334146936783,
"repo_name": "tobiasrausch/ATACseq",
"id": "17824483f83f30c1a3299f5533ca97fa24223db4",
"size": "4121",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/qc_globber.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "609"
},
{
"name": "Makefile",
"bytes": "1227"
},
{
"name": "Python",
"bytes": "4121"
},
{
"name": "R",
"bytes": "8496"
},
{
"name": "Shell",
"bytes": "20981"
}
],
"symlink_target": ""
}
|
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import StringIO
import sys, subprocess
from mock.mock import MagicMock, patch, ANY
import mock.mock
import unittest
import logging
import signal
import ConfigParser
import ssl
import os
import tempfile
from ambari_commons import OSCheck
from only_for_platform import os_distro_value
with patch("platform.linux_distribution", return_value = ('Suse','11','Final')):
from ambari_agent import NetUtil
from ambari_agent.security import CertificateManager
from ambari_agent import ProcessHelper, main
from ambari_agent.AmbariConfig import AmbariConfig
from ambari_agent.Controller import Controller
from ambari_agent import security
aa = mock.mock.mock_open()
class TestSecurity(unittest.TestCase):
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
def setUp(self):
# disable stdout
out = StringIO.StringIO()
sys.stdout = out
# Create config
self.config = AmbariConfig()
self.config.set('security', 'ssl_verify_cert', '0')
# Instantiate CachedHTTPSConnection (skip connect() call)
with patch.object(security.VerifiedHTTPSConnection, "connect"):
self.cachedHTTPSConnection = security.CachedHTTPSConnection(self.config, "example.com")
def tearDown(self):
# enable stdout
sys.stdout = sys.__stdout__
### VerifiedHTTPSConnection ###
@patch.object(security.CertificateManager, "initSecurity")
@patch("socket.create_connection")
@patch("ssl.wrap_socket")
def test_VerifiedHTTPSConnection_connect(self, wrap_socket_mock,
create_connection_mock,
init_security_mock):
init_security_mock.return_value = None
self.config.set('security', 'keysdir', '/dummy-keysdir')
connection = security.VerifiedHTTPSConnection("example.com",
self.config.get('server', 'secured_url_port'), self.config)
connection._tunnel_host = False
connection.sock = None
connection.connect()
self.assertTrue(wrap_socket_mock.called)
### VerifiedHTTPSConnection with no certificates creation
@patch.object(security.CertificateManager, "initSecurity")
@patch("socket.create_connection")
@patch("ssl.wrap_socket")
def test_Verified_HTTPSConnection_non_secure_connect(self, wrap_socket_mock,
create_connection_mock,
init_security_mock):
connection = security.VerifiedHTTPSConnection("example.com",
self.config.get('server', 'secured_url_port'), self.config)
connection._tunnel_host = False
connection.sock = None
connection.connect()
self.assertFalse(init_security_mock.called)
### VerifiedHTTPSConnection with two-way SSL authentication enabled
@patch.object(security.CertificateManager, "initSecurity")
@patch("socket.create_connection")
@patch("ssl.wrap_socket")
def test_Verified_HTTPSConnection_two_way_ssl_connect(self, wrap_socket_mock,
create_connection_mock,
init_security_mock):
wrap_socket_mock.side_effect=ssl.SSLError()
connection = security.VerifiedHTTPSConnection("example.com",
self.config.get('server', 'secured_url_port'), self.config)
self.config.isTwoWaySSLConnection = MagicMock(return_value=True)
connection._tunnel_host = False
connection.sock = None
try:
connection.connect()
except ssl.SSLError:
pass
self.assertTrue(init_security_mock.called)
### CachedHTTPSConnection ###
@patch.object(security.VerifiedHTTPSConnection, "connect")
def test_CachedHTTPSConnection_connect(self, vhc_connect_mock):
self.config.set('server', 'hostname', 'dummy.server.hostname')
self.config.set('server', 'secured_url_port', '443')
# Testing not connected case
self.cachedHTTPSConnection.connected = False
self.cachedHTTPSConnection.connect()
self.assertTrue(vhc_connect_mock.called)
vhc_connect_mock.reset_mock()
# Testing already connected case
self.cachedHTTPSConnection.connect()
self.assertFalse(vhc_connect_mock.called)
@patch.object(security.CachedHTTPSConnection, "connect")
def test_forceClear(self, connect_mock):
# Testing if httpsconn instance changed
old = self.cachedHTTPSConnection.httpsconn
self.cachedHTTPSConnection.forceClear()
self.assertNotEqual(old, self.cachedHTTPSConnection.httpsconn)
@patch.object(security.CachedHTTPSConnection, "connect")
def test_request(self, connect_mock):
httpsconn_mock = MagicMock(create = True)
self.cachedHTTPSConnection.httpsconn = httpsconn_mock
dummy_request = MagicMock(create = True)
dummy_request.get_method.return_value = "dummy_get_method"
dummy_request.get_full_url.return_value = "dummy_full_url"
dummy_request.get_data.return_value = "dummy_get_data"
dummy_request.headers = "dummy_headers"
responce_mock = MagicMock(create = True)
responce_mock.read.return_value = "dummy responce"
httpsconn_mock.getresponse.return_value = responce_mock
# Testing normal case
responce = self.cachedHTTPSConnection.request(dummy_request)
self.assertEqual(responce, responce_mock.read.return_value)
httpsconn_mock.request.assert_called_once_with(
dummy_request.get_method.return_value,
dummy_request.get_full_url.return_value,
dummy_request.get_data.return_value,
dummy_request.headers)
# Testing case of exception
try:
def side_eff():
raise Exception("Dummy exception")
httpsconn_mock.read.side_effect = side_eff
responce = self.cachedHTTPSConnection.request(dummy_request)
self.fail("Should raise IOError")
except Exception, err:
# Expected
pass
### CertificateManager ###
@patch("ambari_agent.hostname.hostname")
def test_getAgentKeyName(self, hostname_mock):
hostname_mock.return_value = "dummy.hostname"
self.config.set('security', 'keysdir', '/dummy-keysdir')
man = CertificateManager(self.config, "active_server")
res = man.getAgentKeyName()
self.assertEquals(res, os.path.abspath("/dummy-keysdir/dummy.hostname.key"))
@patch("ambari_agent.hostname.hostname")
def test_getAgentCrtName(self, hostname_mock):
hostname_mock.return_value = "dummy.hostname"
self.config.set('security', 'keysdir', '/dummy-keysdir')
man = CertificateManager(self.config, "active_server")
res = man.getAgentCrtName()
self.assertEquals(res, os.path.abspath("/dummy-keysdir/dummy.hostname.crt"))
@patch("ambari_agent.hostname.hostname")
def test_getAgentCrtReqName(self, hostname_mock):
hostname_mock.return_value = "dummy.hostname"
self.config.set('security', 'keysdir', '/dummy-keysdir')
man = CertificateManager(self.config, "active_server")
res = man.getAgentCrtReqName()
self.assertEquals(res, os.path.abspath("/dummy-keysdir/dummy.hostname.csr"))
def test_getSrvrCrtName(self):
self.config.set('security', 'keysdir', '/dummy-keysdir')
man = CertificateManager(self.config, "active_server")
res = man.getSrvrCrtName()
self.assertEquals(res, os.path.abspath("/dummy-keysdir/ca.crt"))
@patch("os.path.exists")
@patch.object(security.CertificateManager, "loadSrvrCrt")
@patch.object(security.CertificateManager, "getAgentKeyName")
@patch.object(security.CertificateManager, "genAgentCrtReq")
@patch.object(security.CertificateManager, "getAgentCrtName")
@patch.object(security.CertificateManager, "reqSignCrt")
def test_checkCertExists(self, reqSignCrt_mock, getAgentCrtName_mock,
genAgentCrtReq_mock, getAgentKeyName_mock,
loadSrvrCrt_mock, exists_mock):
self.config.set('security', 'keysdir', '/dummy-keysdir')
getAgentKeyName_mock.return_value = "dummy AgentKeyName"
getAgentCrtName_mock.return_value = "dummy AgentCrtName"
man = CertificateManager(self.config, "active_server")
# Case when all files exist
exists_mock.side_effect = [True, True, True]
man.checkCertExists()
self.assertFalse(loadSrvrCrt_mock.called)
self.assertFalse(genAgentCrtReq_mock.called)
self.assertFalse(reqSignCrt_mock.called)
# Absent server cert
exists_mock.side_effect = [False, True, True]
man.checkCertExists()
self.assertTrue(loadSrvrCrt_mock.called)
self.assertFalse(genAgentCrtReq_mock.called)
self.assertFalse(reqSignCrt_mock.called)
loadSrvrCrt_mock.reset_mock()
# Absent agent key
exists_mock.side_effect = [True, False, True]
man.checkCertExists()
self.assertFalse(loadSrvrCrt_mock.called)
self.assertTrue(genAgentCrtReq_mock.called)
self.assertFalse(reqSignCrt_mock.called)
genAgentCrtReq_mock.reset_mock()
# Absent agent cert
exists_mock.side_effect = [True, True, False]
man.checkCertExists()
self.assertFalse(loadSrvrCrt_mock.called)
self.assertFalse(genAgentCrtReq_mock.called)
self.assertTrue(reqSignCrt_mock.called)
reqSignCrt_mock.reset_mock()
@patch("urllib2.OpenerDirector.open")
@patch.object(security.CertificateManager, "getSrvrCrtName")
def test_loadSrvrCrt(self, getSrvrCrtName_mock, urlopen_mock):
read_mock = MagicMock(create=True)
read_mock.read.return_value = "dummy_cert"
urlopen_mock.return_value = read_mock
_, tmpoutfile = tempfile.mkstemp()
getSrvrCrtName_mock.return_value = tmpoutfile
man = CertificateManager(self.config, "active_server")
man.loadSrvrCrt()
# Checking file contents
saved = open(tmpoutfile, 'r').read()
self.assertEqual(saved, read_mock.read.return_value)
try:
os.unlink(tmpoutfile)
except:
pass
@patch("ambari_agent.hostname.hostname")
@patch('__builtin__.open', create=True, autospec=True)
@patch.dict('os.environ', {'DUMMY_PASSPHRASE': 'dummy-passphrase'})
@patch('ambari_simplejson.dumps')
@patch('urllib2.Request')
@patch("urllib2.OpenerDirector.open")
@patch('ambari_simplejson.loads')
def test_reqSignCrt(self, loads_mock, urlopen_mock, request_mock, dumps_mock, open_mock, hostname_mock):
self.config.set('security', 'keysdir', '/dummy-keysdir')
self.config.set('security', 'passphrase_env_var_name', 'DUMMY_PASSPHRASE')
man = CertificateManager(self.config, "active_server")
hostname_mock.return_value = "dummy-hostname"
open_mock.return_value.read.return_value = "dummy_request"
urlopen_mock.return_value.read.return_value = "dummy_server_request"
loads_mock.return_value = {
'result': 'OK',
'signedCa': 'dummy-crt'
}
# Test normal server interaction
man.reqSignCrt()
self.assertEqual(dumps_mock.call_args[0][0], {
'csr' : 'dummy_request',
'passphrase' : 'dummy-passphrase'
})
self.assertEqual(open_mock.return_value.write.call_args[0][0], 'dummy-crt')
# Test negative server reply
dumps_mock.reset_mock()
open_mock.return_value.write.reset_mock()
loads_mock.return_value = {
'result': 'FAIL',
'signedCa': 'fail-crt'
}
# If certificate signing failed, then exception must be raised
try:
man.reqSignCrt()
self.fail()
except ssl.SSLError:
pass
self.assertFalse(open_mock.return_value.write.called)
# Test connection fail
dumps_mock.reset_mock()
open_mock.return_value.write.reset_mock()
try:
man.reqSignCrt()
self.fail("Expected exception here")
except Exception, err:
# expected
pass
# Test malformed JSON response
open_mock.return_value.write.reset_mock()
loads_mock.side_effect = Exception()
try:
man.reqSignCrt()
self.fail("Expected exception here")
except ssl.SSLError:
pass
self.assertFalse(open_mock.return_value.write.called)
@patch("subprocess.Popen")
@patch("subprocess.Popen.communicate")
@patch.object(os, "chmod")
def test_genAgentCrtReq(self, chmod_mock, communicate_mock, popen_mock):
man = CertificateManager(self.config, "active_server")
p = MagicMock(spec=subprocess.Popen)
p.communicate = communicate_mock
popen_mock.return_value = p
man.genAgentCrtReq('/dummy-keysdir/hostname.key')
self.assertTrue(chmod_mock.called)
self.assertTrue(popen_mock.called)
self.assertTrue(communicate_mock.called)
@patch("ambari_agent.hostname.hostname")
@patch('__builtin__.open', create=True, autospec=True)
@patch("urllib2.OpenerDirector.open")
@patch.dict('os.environ', {'DUMMY_PASSPHRASE': 'dummy-passphrase'})
def test_reqSignCrt_malformedJson(self, urlopen_mock, open_mock, hostname_mock):
hostname_mock.return_value = "dummy-hostname"
open_mock.return_value.read.return_value = "dummy_request"
self.config.set('security', 'keysdir', '/dummy-keysdir')
self.config.set('security', 'passphrase_env_var_name', 'DUMMY_PASSPHRASE')
man = CertificateManager(self.config, "active_server")
# test valid JSON response
urlopen_mock.return_value.read.return_value = '{"result": "OK", "signedCa":"dummy"}'
try:
man.reqSignCrt()
except ssl.SSLError:
self.fail("Unexpected exception!")
open_mock.return_value.write.assert_called_with(u'dummy')
# test malformed JSON response
open_mock.return_value.write.reset_mock()
urlopen_mock.return_value.read.return_value = '{malformed_object}'
try:
man.reqSignCrt()
self.fail("Expected exception!")
except ssl.SSLError:
pass
self.assertFalse(open_mock.return_value.write.called)
@patch.object(security.CertificateManager, "checkCertExists")
def test_initSecurity(self, checkCertExists_method):
man = CertificateManager(self.config, "active_server")
man.initSecurity()
self.assertTrue(checkCertExists_method.called)
|
{
"content_hash": "f9e71911772d241cb6c9ff87bd74df96",
"timestamp": "",
"source": "github",
"line_count": 392,
"max_line_length": 106,
"avg_line_length": 37.25765306122449,
"alnum_prop": 0.7053748716193085,
"repo_name": "radicalbit/ambari",
"id": "c9a7fbe8659b362ccdb9464aba4a3e5f10fb9bb7",
"size": "14628",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "ambari-agent/src/test/python/ambari_agent/TestSecurity.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "42212"
},
{
"name": "C",
"bytes": "331204"
},
{
"name": "C#",
"bytes": "182799"
},
{
"name": "C++",
"bytes": "257"
},
{
"name": "CSS",
"bytes": "1287531"
},
{
"name": "CoffeeScript",
"bytes": "4323"
},
{
"name": "FreeMarker",
"bytes": "2654"
},
{
"name": "Groovy",
"bytes": "88056"
},
{
"name": "HTML",
"bytes": "5098825"
},
{
"name": "Java",
"bytes": "29006663"
},
{
"name": "JavaScript",
"bytes": "17274453"
},
{
"name": "Makefile",
"bytes": "11111"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLSQL",
"bytes": "2160"
},
{
"name": "PLpgSQL",
"bytes": "314333"
},
{
"name": "PowerShell",
"bytes": "2087991"
},
{
"name": "Python",
"bytes": "14584206"
},
{
"name": "R",
"bytes": "1457"
},
{
"name": "Roff",
"bytes": "13935"
},
{
"name": "Ruby",
"bytes": "14478"
},
{
"name": "SQLPL",
"bytes": "2117"
},
{
"name": "Shell",
"bytes": "741459"
},
{
"name": "Vim script",
"bytes": "5813"
}
],
"symlink_target": ""
}
|
import saga
ctx = saga.Context("ssh")
ctx.user_id = "molu8455"
session = saga.Session()
session.add_context(ctx)
outfile_src = 'sftp://login/lustre/janus_scratch/molu8455/webapi/rengers.out'
handle = saga.filesystem.File(url=outfile_src, session=session)
print handle.get_size()
print handle.is_file()
#data = handle.read(10, ttype=saga.task.SYNC)
data = handle.read()
print data
handle.write(data+'monte')
data = handle.read()
print data
handle.close()
|
{
"content_hash": "657cb3481ed1fed24a42bfe113b715cf",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 77,
"avg_line_length": 17.846153846153847,
"alnum_prop": 0.7306034482758621,
"repo_name": "srinathv/myhat",
"id": "123c022a024b7d76e6f089cd21ae507409d3629d",
"size": "464",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test_fileio.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1246"
},
{
"name": "Python",
"bytes": "21529"
}
],
"symlink_target": ""
}
|
import os
import csv
from django.utils.dateparse import parse_datetime
from django.core.urlresolvers import reverse
from tempfile import NamedTemporaryFile
from odk_logger.models.xform import XForm
from main.tests.test_base import MainTestCase
from odk_logger.xform_instance_parser import xform_instance_to_dict
from odk_viewer.pandas_mongo_bridge import *
from common_tags import NA_REP
def xls_filepath_from_fixture_name(fixture_name):
"""
Return an xls file path at tests/fixtures/[fixture]/fixture.xls
"""
#TODO: currently this only works for fixtures in this app because of __file__
return os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"fixtures", fixture_name, fixture_name + ".xls"
)
def xml_inst_filepath_from_fixture_name(fixture_name, instance_name):
return os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"fixtures", fixture_name, "instances",
fixture_name + "_" + instance_name + ".xml"
)
class TestPandasMongoBridge(MainTestCase):
def setUp(self):
self._create_user_and_login()
self._submission_time=parse_datetime('2013-02-18 15:54:01Z')
def _publish_xls_fixture_set_xform(self, fixture):
"""
Publish an xls file at tests/fixtures/[fixture]/fixture.xls
"""
xls_file_path = xls_filepath_from_fixture_name(fixture)
count = XForm.objects.count()
response = self._publish_xls_file(xls_file_path)
self.assertEqual(XForm.objects.count(), count + 1)
self.xform = XForm.objects.all().reverse()[0]
def _submit_fixture_instance(
self, fixture, instance, submission_time=None):
"""
Submit an instance at
tests/fixtures/[fixture]/instances/[fixture]_[instance].xml
"""
xml_submission_file_path = xml_inst_filepath_from_fixture_name(fixture,
instance)
self._make_submission(
xml_submission_file_path, forced_submission_time=submission_time)
self.assertEqual(self.response.status_code, 201)
def _publish_single_level_repeat_form(self):
self._publish_xls_fixture_set_xform("new_repeats")
self.survey_name = u"new_repeats"
def _publish_nested_repeats_form(self):
self._publish_xls_fixture_set_xform("nested_repeats")
self.survey_name = u"nested_repeats"
def _publish_grouped_gps_form(self):
self._publish_xls_fixture_set_xform("grouped_gps")
self.survey_name = u"grouped_gps"
def _xls_data_for_dataframe(self):
xls_df_builder = XLSDataFrameBuilder(self.user.username,
self.xform.id_string)
cursor = xls_df_builder._query_mongo()
return xls_df_builder._format_for_dataframe(cursor)
def _csv_data_for_dataframe(self):
csv_df_builder = CSVDataFrameBuilder(self.user.username,
self.xform.id_string)
cursor = csv_df_builder._query_mongo()
return csv_df_builder._format_for_dataframe(cursor)
def test_generated_sections(self):
self._publish_single_level_repeat_form()
self._submit_fixture_instance("new_repeats", "01")
xls_df_builder = XLSDataFrameBuilder(self.user.username,
self.xform.id_string)
expected_section_keys = [self.survey_name, u"kids_details"]
section_keys = xls_df_builder.sections.keys()
self.assertEqual(sorted(expected_section_keys), sorted(section_keys))
def test_row_counts(self):
"""
Test the number of rows in each sheet
We expect a single row in the main new_repeats sheet and 2 rows in the
kids details sheet one for each repeat
"""
self._publish_single_level_repeat_form()
self._submit_fixture_instance("new_repeats", "01")
data = self._xls_data_for_dataframe()
self.assertEqual(len(data[self.survey_name]), 1)
self.assertEqual(len(data[u"kids_details"]), 2)
def test_xls_columns(self):
"""
Test that our expected columns are in the data
"""
self._publish_single_level_repeat_form()
self._submit_fixture_instance("new_repeats", "01")
data = self._xls_data_for_dataframe()
# columns in the default sheet
expected_default_columns = [
u"gps",
u"_gps_latitude",
u"_gps_longitude",
u"_gps_altitude",
u"_gps_precision",
u"web_browsers/firefox",
u"web_browsers/safari",
u"web_browsers/ie",
u"info/age",
u"web_browsers/chrome",
u"kids/has_kids",
u"info/name",
u"meta/instanceID"
] + AbstractDataFrameBuilder.ADDITIONAL_COLUMNS +\
XLSDataFrameBuilder.EXTRA_COLUMNS
# get the header
default_columns = [k for k in data[self.survey_name][0]]
self.assertEqual(sorted(expected_default_columns),
sorted(default_columns))
# columns in the kids_details sheet
expected_kids_details_columns = [
u"kids/kids_details/kids_name",
u"kids/kids_details/kids_age"
] + AbstractDataFrameBuilder.ADDITIONAL_COLUMNS +\
XLSDataFrameBuilder.EXTRA_COLUMNS
kids_details_columns = [k for k in data[u"kids_details"][0]]
self.assertEqual(sorted(expected_kids_details_columns),
sorted(kids_details_columns))
def test_xls_columns_for_gps_within_groups(self):
"""
Test that a valid xpath is generated for extra gps fields that are NOT
top level
"""
self._publish_grouped_gps_form()
self._submit_fixture_instance("grouped_gps", "01")
data = self._xls_data_for_dataframe()
# columns in the default sheet
expected_default_columns = [
u"gps_group/gps",
u"gps_group/_gps_latitude",
u"gps_group/_gps_longitude",
u"gps_group/_gps_altitude",
u"gps_group/_gps_precision",
u"web_browsers/firefox",
u"web_browsers/safari",
u"web_browsers/ie",
u"web_browsers/chrome",
u"meta/instanceID"
] + AbstractDataFrameBuilder.ADDITIONAL_COLUMNS +\
XLSDataFrameBuilder.EXTRA_COLUMNS
default_columns = [k for k in data[self.survey_name][0]]
self.assertEqual(sorted(expected_default_columns),
sorted(default_columns))
def test_xlsx_output_when_data_exceeds_limits(self):
self._publish_xls_fixture_set_xform("xlsx_output")
self._submit_fixture_instance("xlsx_output", "01")
xls_builder = XLSDataFrameBuilder(username=self.user.username,
id_string=self.xform.id_string)
self.assertEqual(xls_builder.exceeds_xls_limits, True)
# test that the view returns an xlsx file instead
url = reverse('xls_export',
kwargs={
'username': self.user.username,
'id_string': self.xform.id_string
})
self.response = self.client.get(url)
self.assertEqual(self.response.status_code, 200)
self.assertEqual(self.response["content-type"],\
'application/vnd.openxmlformats')
def test_xlsx_export_for_repeats(self):
"""
Make sure exports run fine when the xlsx file has multiple sheets
"""
self._publish_xls_fixture_set_xform("new_repeats")
self._submit_fixture_instance("new_repeats", "01")
xls_builder = XLSDataFrameBuilder(username=self.user.username,
id_string=self.xform.id_string)
# test that the view returns an xlsx file instead
url = reverse('xls_export',
kwargs={
'username': self.user.username,
'id_string': self.xform.id_string
}
)
params = {
'xlsx': 'true' # force xlsx
}
self.response = self.client.get(url, params)
self.assertEqual(self.response.status_code, 200)
self.assertEqual(self.response["content-type"],\
'application/vnd.openxmlformats')
def test_csv_dataframe_export_to(self):
self._publish_nested_repeats_form()
self._submit_fixture_instance(
"nested_repeats", "01", submission_time=self._submission_time)
self._submit_fixture_instance(
"nested_repeats", "02", submission_time=self._submission_time)
csv_df_builder = CSVDataFrameBuilder(self.user.username,
self.xform.id_string)
temp_file = NamedTemporaryFile(suffix=".csv", delete=False)
csv_df_builder.export_to(temp_file.name)
csv_fixture_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"fixtures", "nested_repeats", "nested_repeats.csv"
)
temp_file.close()
fixture, output = '', ''
with open(csv_fixture_path) as f:
fixture = f.read()
with open(temp_file.name) as f:
output = f.read()
os.unlink(temp_file.name)
self.assertEqual(fixture, output)
def test_csv_columns_for_gps_within_groups(self):
self._publish_grouped_gps_form()
self._submit_fixture_instance("grouped_gps", "01")
data = self._csv_data_for_dataframe()
columns = data[0].keys()
expected_columns = [
u'gps_group/gps',
u'gps_group/_gps_latitude',
u'gps_group/_gps_longitude',
u'gps_group/_gps_altitude',
u'gps_group/_gps_precision',
u'_tags',
u'web_browsers/firefox',
u'web_browsers/chrome',
u'web_browsers/ie',
u'web_browsers/safari',
] + AbstractDataFrameBuilder.ADDITIONAL_COLUMNS +\
AbstractDataFrameBuilder.IGNORED_COLUMNS
try:
expected_columns.remove(u'_deleted_at')
except ValueError:
pass
self.maxDiff = None
self.assertEqual(sorted(expected_columns), sorted(columns))
def test_format_mongo_data_for_csv(self):
self.maxDiff = None
self._publish_single_level_repeat_form()
self._submit_fixture_instance("new_repeats", "01")
dd = self.xform.data_dictionary()
columns = dd.get_keys()
data_0 = self._csv_data_for_dataframe()[0]
# remove AbstractDataFrameBuilder.INTERNAL_FIELDS
for key in AbstractDataFrameBuilder.IGNORED_COLUMNS:
if data_0.has_key(key):
data_0.pop(key)
for key in AbstractDataFrameBuilder.ADDITIONAL_COLUMNS:
if data_0.has_key(key):
data_0.pop(key)
expected_data_0 = {
u'gps': u'-1.2627557 36.7926442 0.0 30.0',
u'_gps_latitude': u'-1.2627557',
u'_gps_longitude': u'36.7926442',
u'_gps_altitude': u'0.0',
u'_gps_precision': u'30.0',
u'_tags': u'',
u'kids/has_kids': u'1',
u'info/age': u'80',
u'kids/kids_details[1]/kids_name': u'Abel',
u'kids/kids_details[1]/kids_age': u'50',
u'kids/kids_details[2]/kids_name': u'Cain',
u'kids/kids_details[2]/kids_age': u'76',
u'web_browsers/chrome': True,
u'web_browsers/ie': True,
u'web_browsers/safari': False,
u'web_browsers/firefox': False,
u'info/name': u'Adam',
}
self.assertEqual(expected_data_0, data_0)
def test_split_select_multiples(self):
self._publish_nested_repeats_form()
dd = self.xform.data_dictionary()
self._submit_fixture_instance("nested_repeats", "01")
csv_df_builder = CSVDataFrameBuilder(self.user.username,
self.xform.id_string)
cursor = csv_df_builder._query_mongo()
record = cursor[0]
select_multiples = CSVDataFrameBuilder._collect_select_multiples(dd)
result = CSVDataFrameBuilder._split_select_multiples(record,
select_multiples)
expected_result = {
u'web_browsers/ie': True,
u'web_browsers/safari': True,
u'web_browsers/firefox': False,
u'web_browsers/chrome': False
}
# build a new dictionary only composed of the keys we want to use in
# the comparison
result = dict([(key, result[key]) for key in result.keys() if key in \
expected_result.keys()])
self.assertEqual(expected_result, result)
def test_split_select_multiples_within_repeats(self):
self.maxDiff = None
record = {
'name': 'Tom',
'age': 23,
'browser_use': [
{
'browser_use/year': '2010',
'browser_use/browsers': 'firefox safari'
},
{
'browser_use/year': '2011',
'browser_use/browsers': 'firefox chrome'
}
]
}
expected_result = {
'name': 'Tom',
'age': 23,
'browser_use': [
{
'browser_use/year': '2010',
'browser_use/browsers/firefox': True,
'browser_use/browsers/safari': True,
'browser_use/browsers/ie': False,
'browser_use/browsers/chrome': False
},
{
'browser_use/year': '2011',
'browser_use/browsers/firefox': True,
'browser_use/browsers/safari': False,
'browser_use/browsers/ie': False,
'browser_use/browsers/chrome': True
}
]
}
select_multiples = {
'browser_use/browsers':
[
'browser_use/browsers/firefox',
'browser_use/browsers/safari',
'browser_use/browsers/ie',
'browser_use/browsers/chrome'
]
}
result = CSVDataFrameBuilder._split_select_multiples(record,
select_multiples)
self.assertEqual(expected_result, result)
def test_split_gps_fields(self):
record = {
'gps': '5 6 7 8'
}
gps_fields = ['gps']
expected_result = {
'gps': '5 6 7 8',
'_gps_latitude': '5',
'_gps_longitude': '6',
'_gps_altitude': '7',
'_gps_precision': '8',
}
AbstractDataFrameBuilder._split_gps_fields(record, gps_fields)
self.assertEqual(expected_result, record)
def test_split_gps_fields_within_repeats(self):
record = \
{
'a_repeat':
[
{
'a_repeat/gps': '1 2 3 4'
},
{
'a_repeat/gps': '5 6 7 8'
}
]
}
gps_fields = ['a_repeat/gps']
expected_result = \
{
'a_repeat':
[
{
'a_repeat/gps': '1 2 3 4',
'a_repeat/_gps_latitude': '1',
'a_repeat/_gps_longitude': '2',
'a_repeat/_gps_altitude': '3',
'a_repeat/_gps_precision': '4',
},
{
'a_repeat/gps': '5 6 7 8',
'a_repeat/_gps_latitude': '5',
'a_repeat/_gps_longitude': '6',
'a_repeat/_gps_altitude': '7',
'a_repeat/_gps_precision': '8',
}
]
}
AbstractDataFrameBuilder._split_gps_fields(record, gps_fields)
self.assertEqual(expected_result, record)
def test_unicode_export(self):
unicode_char = unichr(40960)
# fake data
data = [{"key": unicode_char}]
columns = ["key"]
# test xls
xls_df_writer = XLSDataFrameWriter(data, columns)
temp_file = NamedTemporaryFile(suffix=".xls")
excel_writer = ExcelWriter(temp_file.name)
passed = False
try:
xls_df_writer.write_to_excel(excel_writer, "default")
passed = True
except UnicodeEncodeError:
pass
finally:
temp_file.close()
self.assertTrue(passed)
# test csv
passed = False
csv_df_writer = CSVDataFrameWriter(data, columns)
temp_file = NamedTemporaryFile(suffix=".csv")
try:
csv_df_writer.write_to_csv(temp_file)
passed = True
except UnicodeEncodeError:
pass
finally:
temp_file.close()
temp_file.close()
self.assertTrue(passed)
def test_repeat_child_name_matches_repeat(self):
"""
ParsedInstance.to_dict creates a list within a repeat if a child has the same name as the repeat
This test makes sure that doesnt happen
"""
self.maxDiff = None
fixture = "repeat_child_name_matches_repeat"
# publish form so we have a dd to pass to xform inst. parser
self._publish_xls_fixture_set_xform(fixture)
submission_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"fixtures", fixture, fixture + ".xml"
)
# get submission xml str
with open(submission_path, "r") as f:
xml_str = f.read()
dict = xform_instance_to_dict(xml_str, self.xform.data_dictionary())
expected_dict = {
u'test_item_name_matches_repeat': {
u'formhub': {
u'uuid': u'c911d71ce1ac48478e5f8bac99addc4e'
},
u'gps':
[
{
u'info': u'Yo',
u'gps': u'-1.2625149 36.7924478 0.0 30.0'
},
{
u'info': u'What',
u'gps': u'-1.2625072 36.7924328 0.0 30.0'
}
]
}
}
self.assertEqual(dict, expected_dict)
def test_remove_dups_from_list_maintain_order(self):
l = ["a", "z", "b", "y", "c", "b", "x"]
result = remove_dups_from_list_maintain_order(l)
expected_result = ["a", "z", "b", "y", "c", "x"]
self.assertEqual(result, expected_result)
def test_valid_sheet_name(self):
sheet_names = ["sheet_1", "sheet_2"]
desired_sheet_name = "sheet_3"
expected_sheet_name = "sheet_3"
generated_sheet_name = get_valid_sheet_name(desired_sheet_name,
sheet_names)
self.assertEqual(generated_sheet_name, expected_sheet_name)
def test_invalid_sheet_name(self):
sheet_names = ["sheet_1", "sheet_2"]
desired_sheet_name = "sheet_3_with_more_than_max_expected_length"
expected_sheet_name = "sheet_3_with_more_than_max_exp"
generated_sheet_name = get_valid_sheet_name(desired_sheet_name,
sheet_names)
self.assertEqual(generated_sheet_name, expected_sheet_name)
def test_duplicate_sheet_name(self):
sheet_names = ["sheet_2_with_duplicate_sheet_n",
"sheet_2_with_duplicate_sheet_1"]
duplicate_sheet_name = "sheet_2_with_duplicate_sheet_n"
expected_sheet_name = "sheet_2_with_duplicate_sheet_2"
generated_sheet_name = get_valid_sheet_name(duplicate_sheet_name,
sheet_names)
self.assertEqual(generated_sheet_name, expected_sheet_name)
def test_query_mongo(self):
"""
Test querying for record count and records using AbstractDataFrameBuilder._query_mongo
"""
self._publish_single_level_repeat_form()
# submit 3 instances
for i in range(3):
self._submit_fixture_instance("new_repeats", "01")
df_builder = XLSDataFrameBuilder(self.user.username,
self.xform.id_string)
record_count = df_builder._query_mongo(count=True)
self.assertEqual(record_count, 3)
cursor = df_builder._query_mongo()
records = [record for record in cursor]
self.assertTrue(len(records), 3)
# test querying using limits
cursor = df_builder._query_mongo(start=2, limit=2)
records = [record for record in cursor]
self.assertTrue(len(records), 1)
def test_prefix_from_xpath(self):
xpath = "parent/child/grandhild"
prefix = get_prefix_from_xpath(xpath)
self.assertEqual(prefix, 'parent/child/')
xpath = "parent/child"
prefix = get_prefix_from_xpath(xpath)
self.assertEqual(prefix, 'parent/')
xpath = "parent"
prefix = get_prefix_from_xpath(xpath)
self.assertTrue(prefix is None)
def test_csv_export_with_df_size_limit(self):
"""
To fix pandas limitation of 30k rows on csv export, we specify a max
number of records in a dataframe on export - lets test it
"""
self._publish_single_level_repeat_form()
# submit 7 instances
for i in range(4):
self._submit_fixture_instance("new_repeats", "01")
self._submit_fixture_instance("new_repeats", "02")
for i in range(2):
self._submit_fixture_instance("new_repeats", "01")
csv_df_builder = CSVDataFrameBuilder(self.user.username,
self.xform.id_string)
record_count = csv_df_builder._query_mongo(count=True)
self.assertEqual(record_count, 7)
temp_file = NamedTemporaryFile(suffix=".csv", delete=False)
csv_df_builder.export_to(temp_file.name, data_frame_max_size=3)
csv_file = open(temp_file.name)
csv_reader = csv.reader(csv_file)
header = csv_reader.next()
self.assertEqual(
len(header), 17 + len(AbstractDataFrameBuilder.ADDITIONAL_COLUMNS))
rows = []
for row in csv_reader:
rows.append(row)
self.assertEqual(len(rows), 7)
self.assertEqual(rows[4][5], NA_REP)
# close and delete file
csv_file.close()
os.unlink(temp_file.name)
def test_csv_column_indices_in_groups_within_repeats(self):
self._publish_xls_fixture_set_xform("groups_in_repeats")
self._submit_fixture_instance("groups_in_repeats", "01")
dd = self.xform.data_dictionary()
columns = dd.get_keys()
data_0 = self._csv_data_for_dataframe()[0]
# remove dynamic fields
ignore_list = [
'_uuid', 'meta/instanceID', 'formhub/uuid', '_submission_time',
'_id', '_bamboo_dataset_id']
for item in ignore_list:
data_0.pop(item)
expected_data_0 = {
# u'_id': 1,
# u'_uuid': u'ba6bc9d7-b46a-4d25-955e-99ec94e7b2f6',
# u'_deleted_at': None,
u'_xform_id_string': u'groups_in_repeats',
u'_status': u'submitted_via_web',
u'_tags': u'',
# u'_bamboo_dataset_id': u'',
# u'_submission_time': u'2013-03-20T10:50:08',
u'name': u'Abe',
u'age': u'88',
u'has_children': u'1',
# u'meta/instanceID': u'uuid:ba6bc9d7-b46a-4d25-955e-99ec94e7b2f6',
# u'formhub/uuid': u'1c491d705d514354acd4a9e34fe7526d',
u'_attachments': [],
u'children[1]/childs_info/name': u'Cain',
u'children[2]/childs_info/name': u'Abel',
u'children[1]/childs_info/age': u'56',
u'children[2]/childs_info/age': u'48',
u'children[1]/immunization/immunization_received/polio_1': True,
u'children[1]/immunization/immunization_received/polio_2': False,
u'children[2]/immunization/immunization_received/polio_1': True,
u'children[2]/immunization/immunization_received/polio_2': True,
u'web_browsers/chrome': True,
u'web_browsers/firefox': False,
u'web_browsers/ie': False,
u'web_browsers/safari': False,
u'gps': u'-1.2626156 36.7923571 0.0 30.0',
u'_geolocation': [u'-1.2626156', u'36.7923571'],
u'_gps_latitude': u'-1.2626156',
u'_gps_longitude': u'36.7923571',
u'_gps_altitude': u'0.0',
u'_gps_precision': u'30.0',
}
self.maxDiff = None
self.assertEqual(data_0, expected_data_0)
# todo: test nested repeats as well on xls
def test_xls_groups_within_repeats(self):
self._publish_xls_fixture_set_xform("groups_in_repeats")
self._submit_fixture_instance("groups_in_repeats", "01")
dd = self.xform.data_dictionary()
columns = dd.get_keys()
data = self._xls_data_for_dataframe()
# remove dynamic fields
ignore_list = [
'_uuid', 'meta/instanceID', 'formhub/uuid', '_submission_time',
'_id', '_bamboo_dataset_id']
for item in ignore_list:
# pop unwanted keys from main section
for d in data["groups_in_repeats"]:
if d.has_key(item):
d.pop(item)
# pop unwanted keys from children's section
for d in data["children"]:
if d.has_key(item):
d.pop(item)
# todo: add _id to xls export
expected_data = {
u"groups_in_repeats":
[
{
# u'_submission_time': u'2013-03-21T02:57:37',
u'picture': None,
u'has_children': u'1',
u'name': u'Abe',
u'age': u'88',
u'web_browsers/chrome': True,
u'web_browsers/safari': False,
u'web_browsers/ie': False,
u'web_browsers/firefox': False,
u'gps': u'-1.2626156 36.7923571 0.0 30.0',
u'_gps_latitude': u'-1.2626156',
u'_gps_longitude': u'36.7923571',
u'_gps_altitude': u'0.0',
u'_gps_precision': u'30.0',
# u'meta/instanceID': u'uuid:ba6bc9d7-b46a-4d25-955e-99ec94e7b2f6',
# u'_uuid': u'ba6bc9d7-b46a-4d25-955e-99ec94e7b2f6',
u'_index': 1,
u'_parent_table_name': None,
u'_parent_index': -1
}
]
,
u"children":
[
{
u'children/childs_info/name': u'Cain',
u'children/childs_info/age': u'56',
u'children/immunization/immunization_received/polio_1': True,
u'children/immunization/immunization_received/polio_2': False,
u'_index': 1,
u'_parent_table_name': u'groups_in_repeats',
u'_parent_index': 1,
# u'_submission_time': None,
# u'_uuid': None,
},
{
u'children/childs_info/name': u'Able',
u'children/childs_info/age': u'48',
u'children/immunization/immunization_received/polio_1': True,
u'children/immunization/immunization_received/polio_2': True,
u'_index': 2,
u'_parent_table_name': u'groups_in_repeats',
u'_parent_index': 1,
# u'_submission_time': None,
# u'_uuid': None,
}
]
}
self.maxDiff = None
self.assertEqual(
data["groups_in_repeats"][0], expected_data["groups_in_repeats"][0])
# each of the children should have children/... keys, we can guratnee the order so we cant check the values, just make sure they are not none
self.assertEqual(len(data["children"]), 2)
for child in data["children"]:
self.assertTrue(child.has_key("children/childs_info/name"))
self.assertIsNotNone(child["children/childs_info/name"])
self.assertTrue(child.has_key("children/childs_info/age"))
self.assertIsNotNone(child["children/childs_info/name"])
self.assertTrue(child.has_key("children/immunization/immunization_received/polio_1"))
self.assertEqual(type(child["children/immunization/immunization_received/polio_1"]), bool)
self.assertTrue(child.has_key("children/immunization/immunization_received/polio_2"))
self.assertEqual(type(child["children/immunization/immunization_received/polio_2"]), bool)
|
{
"content_hash": "b310d3141894023996189081bcf17840",
"timestamp": "",
"source": "github",
"line_count": 716,
"max_line_length": 149,
"avg_line_length": 40.39664804469274,
"alnum_prop": 0.5484372839164707,
"repo_name": "SEL-Columbia/formhub",
"id": "4e0bf7594c751d8123e17f4e051a51be7f42d389",
"size": "28924",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "odk_viewer/tests/test_pandas_mongo_bridge.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "295007"
},
{
"name": "HTML",
"bytes": "1669852"
},
{
"name": "JavaScript",
"bytes": "2294844"
},
{
"name": "Makefile",
"bytes": "8446"
},
{
"name": "Python",
"bytes": "1543287"
},
{
"name": "Shell",
"bytes": "11919"
}
],
"symlink_target": ""
}
|
"""Taiga integration for Zulip.
Tips for notification output:
*Text formatting*: if there has been a change of a property, the new
value should always be in bold; otherwise the subject of US/task
should be in bold.
"""
from typing import Any, Dict, List, Mapping, Optional, Tuple
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile
@api_key_only_webhook_view('Taiga')
@has_request_variables
def api_taiga_webhook(request: HttpRequest, user_profile: UserProfile,
message: Dict[str, Any]=REQ(argument_type='body')) -> HttpResponse:
parsed_events = parse_message(message)
content_lines = []
for event in parsed_events:
content_lines.append(generate_content(event) + '\n')
content = "".join(sorted(content_lines))
topic = 'General'
if message["data"].get("milestone") is not None:
if message["data"]["milestone"].get("name") is not None:
topic = message["data"]["milestone"]["name"]
check_send_webhook_message(request, user_profile, topic, content)
return json_success()
templates = {
'epic': {
'create': '[{user}]({user_link}) created epic {subject}.',
'set_assigned_to': '[{user}]({user_link}) assigned epic {subject} to {new}.',
'unset_assigned_to': '[{user}]({user_link}) unassigned epic {subject}.',
'changed_assigned_to': '[{user}]({user_link}) reassigned epic {subject}'
' from {old} to {new}.',
'blocked': '[{user}]({user_link}) blocked epic {subject}.',
'unblocked': '[{user}]({user_link}) unblocked epic {subject}.',
'changed_status': '[{user}]({user_link}) changed status of epic {subject}'
' from {old} to {new}.',
'renamed': '[{user}]({user_link}) renamed epic from **{old}** to **{new}**.',
'description_diff': '[{user}]({user_link}) updated description of epic {subject}.',
'commented': '[{user}]({user_link}) commented on epic {subject}.',
'delete': '[{user}]({user_link}) deleted epic {subject}.',
},
'relateduserstory': {
'create': ('[{user}]({user_link}) added a related user story '
'{userstory_subject} to the epic {epic_subject}.'),
'delete': ('[{user}]({user_link}) removed a related user story ' +
'{userstory_subject} from the epic {epic_subject}.'),
},
'userstory': {
'create': '[{user}]({user_link}) created user story {subject}.',
'set_assigned_to': '[{user}]({user_link}) assigned user story {subject} to {new}.',
'unset_assigned_to': '[{user}]({user_link}) unassigned user story {subject}.',
'changed_assigned_to': '[{user}]({user_link}) reassigned user story {subject}'
' from {old} to {new}.',
'points': '[{user}]({user_link}) changed estimation of user story {subject}.',
'blocked': '[{user}]({user_link}) blocked user story {subject}.',
'unblocked': '[{user}]({user_link}) unblocked user story {subject}.',
'set_milestone': '[{user}]({user_link}) added user story {subject} to sprint {new}.',
'unset_milestone': '[{user}]({user_link}) removed user story {subject} from sprint {old}.',
'changed_milestone': '[{user}]({user_link}) changed sprint of user story {subject} from {old}'
' to {new}.',
'changed_status': '[{user}]({user_link}) changed status of user story {subject}'
' from {old} to {new}.',
'closed': '[{user}]({user_link}) closed user story {subject}.',
'reopened': '[{user}]({user_link}) reopened user story {subject}.',
'renamed': '[{user}]({user_link}) renamed user story from {old} to **{new}**.',
'description_diff': '[{user}]({user_link}) updated description of user story {subject}.',
'commented': '[{user}]({user_link}) commented on user story {subject}.',
'delete': '[{user}]({user_link}) deleted user story {subject}.',
'due_date': '[{user}]({user_link}) changed due date of user story {subject}'
' from {old} to {new}.',
'set_due_date': '[{user}]({user_link}) set due date of user story {subject}'
' to {new}.',
},
'milestone': {
'create': '[{user}]({user_link}) created sprint {subject}.',
'renamed': '[{user}]({user_link}) renamed sprint from {old} to **{new}**.',
'estimated_start': '[{user}]({user_link}) changed estimated start of sprint {subject}'
' from {old} to {new}.',
'estimated_finish': '[{user}]({user_link}) changed estimated finish of sprint {subject}'
' from {old} to {new}.',
'set_estimated_start': '[{user}]({user_link}) changed estimated start of sprint {subject}'
' to {new}.',
'set_estimated_finish': '[{user}]({user_link}) set estimated finish of sprint {subject}'
' to {new}.',
'delete': '[{user}]({user_link}) deleted sprint {subject}.',
},
'task': {
'create': '[{user}]({user_link}) created task {subject}.',
'set_assigned_to': '[{user}]({user_link}) assigned task {subject} to {new}.',
'unset_assigned_to': '[{user}]({user_link}) unassigned task {subject}.',
'changed_assigned_to': '[{user}]({user_link}) reassigned task {subject}'
' from {old} to {new}.',
'blocked': '[{user}]({user_link}) blocked task {subject}.',
'unblocked': '[{user}]({user_link}) unblocked task {subject}.',
'changed_status': '[{user}]({user_link}) changed status of task {subject}'
' from {old} to {new}.',
'renamed': '[{user}]({user_link}) renamed task {old} to **{new}**.',
'description_diff': '[{user}]({user_link}) updated description of task {subject}.',
'set_milestone': '[{user}]({user_link}) added task {subject} to sprint {new}.',
'commented': '[{user}]({user_link}) commented on task {subject}.',
'delete': '[{user}]({user_link}) deleted task {subject}.',
'changed_us': '[{user}]({user_link}) moved task {subject} from user story {old} to {new}.',
'due_date': '[{user}]({user_link}) changed due date of task {subject}'
' from {old} to {new}.',
'set_due_date': '[{user}]({user_link}) set due date of task {subject}'
' to {new}.',
},
'issue': {
'create': '[{user}]({user_link}) created issue {subject}.',
'set_assigned_to': '[{user}]({user_link}) assigned issue {subject} to {new}.',
'unset_assigned_to': '[{user}]({user_link}) unassigned issue {subject}.',
'changed_assigned_to': '[{user}]({user_link}) reassigned issue {subject}'
' from {old} to {new}.',
'set_milestone': '[{user}]({user_link}) added issue {subject} to sprint {new}.',
'unset_milestone': '[{user}]({user_link}) detached issue {subject} from sprint {old}.',
'changed_priority': '[{user}]({user_link}) changed priority of issue '
'{subject} from {old} to {new}.',
'changed_severity': '[{user}]({user_link}) changed severity of issue '
'{subject} from {old} to {new}.',
'changed_status': '[{user}]({user_link}) changed status of issue {subject}'
' from {old} to {new}.',
'changed_type': '[{user}]({user_link}) changed type of issue {subject} from {old} to {new}.',
'renamed': '[{user}]({user_link}) renamed issue {old} to **{new}**.',
'description_diff': '[{user}]({user_link}) updated description of issue {subject}.',
'commented': '[{user}]({user_link}) commented on issue {subject}.',
'delete': '[{user}]({user_link}) deleted issue {subject}.',
'due_date': '[{user}]({user_link}) changed due date of issue {subject}'
' from {old} to {new}.',
'set_due_date': '[{user}]({user_link}) set due date of issue {subject}'
' to {new}.',
'blocked': '[{user}]({user_link}) blocked issue {subject}.',
'unblocked': '[{user}]({user_link}) unblocked issue {subject}.',
},
'webhook_test': {
'test': '[{user}]({user_link}) triggered a test of the Taiga integration.',
},
}
return_type = Tuple[Optional[Dict[str, Any]], Optional[Dict[str, Any]]]
def get_old_and_new_values(change_type: str,
message: Mapping[str, Any]) -> return_type:
""" Parses the payload and finds previous and current value of change_type."""
old = message["change"]["diff"][change_type].get("from")
new = message["change"]["diff"][change_type].get("to")
return old, new
def parse_comment(message: Mapping[str, Any]) -> Dict[str, Any]:
""" Parses the comment to issue, task or US. """
return {
'event': 'commented',
'type': message["type"],
'values': {
'user': get_owner_name(message),
'user_link': get_owner_link(message),
'subject': get_subject(message),
},
}
def parse_create_or_delete(message: Mapping[str, Any]) -> Dict[str, Any]:
""" Parses create or delete event. """
if message["type"] == 'relateduserstory':
return {
'type': message["type"],
'event': message["action"],
'values': {
'user': get_owner_name(message),
'user_link': get_owner_link(message),
'epic_subject': get_epic_subject(message),
'userstory_subject': get_userstory_subject(message),
},
}
return {
'type': message["type"],
'event': message["action"],
'values': {
'user': get_owner_name(message),
'user_link': get_owner_link(message),
'subject': get_subject(message),
},
}
def parse_change_event(change_type: str, message: Mapping[str, Any]) -> Optional[Dict[str, Any]]:
""" Parses change event. """
evt: Dict[str, Any] = {}
values: Dict[str, Any] = {
'user': get_owner_name(message),
'user_link': get_owner_link(message),
'subject': get_subject(message),
}
if change_type in ["description_diff", "points"]:
event_type = change_type
elif change_type in ["milestone", "assigned_to"]:
old, new = get_old_and_new_values(change_type, message)
if not old:
event_type = "set_" + change_type
values["new"] = new
elif not new:
event_type = "unset_" + change_type
values["old"] = old
else:
event_type = "changed_" + change_type
values.update({'old': old, 'new': new})
elif change_type == "is_blocked":
if message["change"]["diff"]["is_blocked"]["to"]:
event_type = "blocked"
else:
event_type = "unblocked"
elif change_type == "is_closed":
if message["change"]["diff"]["is_closed"]["to"]:
event_type = "closed"
else:
event_type = "reopened"
elif change_type == "user_story":
old, new = get_old_and_new_values(change_type, message)
event_type = "changed_us"
values.update({'old': old, 'new': new})
elif change_type in ["subject", 'name']:
event_type = 'renamed'
old, new = get_old_and_new_values(change_type, message)
values.update({'old': old, 'new': new})
elif change_type in ["estimated_finish", "estimated_start", "due_date"]:
old, new = get_old_and_new_values(change_type, message)
if not old:
event_type = "set_" + change_type
values["new"] = new
elif not old == new:
event_type = change_type
values.update({'old': old, 'new': new})
else:
# date hasn't changed
return None
elif change_type in ["priority", "severity", "type", "status"]:
event_type = 'changed_' + change_type
old, new = get_old_and_new_values(change_type, message)
values.update({'old': old, 'new': new})
else:
# we are not supporting this type of event
return None
evt.update({"type": message["type"], "event": event_type, "values": values})
return evt
def parse_webhook_test(message: Mapping[str, Any]) -> Dict[str, Any]:
return {
"type": "webhook_test",
"event": "test",
"values": {
"user": get_owner_name(message),
"user_link": get_owner_link(message),
"end_type": "test",
},
}
def parse_message(message: Mapping[str, Any]) -> List[Dict[str, Any]]:
""" Parses the payload by delegating to specialized functions. """
events = []
if message["action"] in ['create', 'delete']:
events.append(parse_create_or_delete(message))
elif message["action"] == 'change':
if message["change"]["diff"]:
for value in message["change"]["diff"]:
parsed_event = parse_change_event(value, message)
if parsed_event:
events.append(parsed_event)
if message["change"]["comment"]:
events.append(parse_comment(message))
elif message["action"] == "test":
events.append(parse_webhook_test(message))
return events
def generate_content(data: Mapping[str, Any]) -> str:
""" Gets the template string and formats it with parsed data. """
template = templates[data['type']][data['event']]
content = template.format(**data['values'])
return content
def get_owner_name(message: Mapping[str, Any]) -> str:
return message["by"]["full_name"]
def get_owner_link(message: Mapping[str, Any]) -> str:
return message['by']['permalink']
def get_subject(message: Mapping[str, Any]) -> str:
data = message["data"]
if 'permalink' in data:
return '[' + data.get('subject', data.get('name')) + ']' + '(' + data['permalink'] + ')'
return '**' + data.get('subject', data.get('name')) + '**'
def get_epic_subject(message: Mapping[str, Any]) -> str:
if 'permalink' in message['data']['epic']:
return ('[' + message['data']['epic']['subject'] + ']' +
'(' + message['data']['epic']['permalink'] + ')')
return '**' + message['data']['epic']['subject'] + '**'
def get_userstory_subject(message: Mapping[str, Any]) -> str:
if 'permalink' in message['data']['user_story']:
us_data = message['data']['user_story']
return '[' + us_data['subject'] + ']' + '(' + us_data['permalink'] + ')'
return '**' + message['data']['user_story']['subject'] + '**'
|
{
"content_hash": "f70dc962dab6c98d4ddd4bd774821426",
"timestamp": "",
"source": "github",
"line_count": 325,
"max_line_length": 102,
"avg_line_length": 45.23076923076923,
"alnum_prop": 0.565374149659864,
"repo_name": "timabbott/zulip",
"id": "d4f61a7ef910d585261a20f37e4cd651032c03be",
"size": "14700",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "zerver/webhooks/taiga/view.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "429356"
},
{
"name": "Dockerfile",
"bytes": "2939"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "844217"
},
{
"name": "JavaScript",
"bytes": "3259448"
},
{
"name": "Perl",
"bytes": "8594"
},
{
"name": "Puppet",
"bytes": "74427"
},
{
"name": "Python",
"bytes": "7825440"
},
{
"name": "Ruby",
"bytes": "6110"
},
{
"name": "Shell",
"bytes": "123706"
},
{
"name": "TSQL",
"bytes": "314"
},
{
"name": "TypeScript",
"bytes": "22102"
}
],
"symlink_target": ""
}
|
""" Processor class define for Scan and Session."""
import logging
import re
import os
import json
import itertools
import requests
from uuid import uuid4
from datetime import date
from . import XnatUtils, task
from . import assessor_utils
from . import processor_parser
from . import yaml_doc
from .errors import AutoProcessorError
from .dax_settings import DEFAULT_FS_DATATYPE, DEFAULT_DATATYPE
from .task import NeedInputsException
from .processors_v3 import Processor_v3, SgpProcessor
__copyright__ = 'Copyright 2013 Vanderbilt University. All Rights Reserved'
__all__ = ['Processor', 'AutoProcessor']
# Logger for logs
LOGGER = logging.getLogger('dax')
class Processor(object):
""" Base class for processor """
def __init__(self, walltime_str, memreq_mb, spider_path,
version=None, ppn=1, env=None, suffix_proc='',
xsitype='proc:genProcData',
job_template=None):
"""
Entry point of the Base class for processor.
:param walltime_str: Amount of walltime to request for the process
:param memreq_mb: Number of megabytes of memory to use
:param spider_path: Fully qualified path to the spider to run
:param version: Version of the spider
:param ppn: Number of processors per job to use.
:param env: Environment file to source.
:param suffix_proc: Processor suffix (if desired)
:param xsitype: the XNAT xsiType.
:return: None
"""
self.job_template = job_template
self.walltime_str = walltime_str # 00:00:00 format
self.memreq_mb = memreq_mb # memory required in megabytes
# default values:
self.version = "1.0.0"
# Suffix
if suffix_proc and suffix_proc[0] != '_':
self.suffix_proc = '_%s' % suffix_proc
else:
self.suffix_proc = suffix_proc
self.suffix_proc = re.sub('[^a-zA-Z0-9]', '_', self.suffix_proc)
self.name = None
self.spider_path = spider_path
self.ppn = ppn
if env:
self.env = env
else:
self.env = os.path.join(os.environ['HOME'], '.bashrc')
self.xsitype = xsitype
# getting name and version from spider_path
self.set_spider_settings(spider_path, version)
# if suffix_proc is empty, set it to "" for the spider call:
if not suffix_proc:
self.suffix_proc = ''
# get the spider_path right with the version:
def set_spider_settings(self, spider_path, version):
"""
Method to set the spider version, path, and name from filepath
:param spider_path: Fully qualified path and file of the spider
:param version: version of the spider
:return: None
"""
if version:
# get the proc_name
proc_name = os.path.basename(spider_path)[7:-3]
# remove any version if there is one
proc_name = re.split("/*_v[0-9]/*", proc_name)[0]
# setting the version and name of the spider
self.version = version
nformat = '''{procname}_v{version}{suffix}'''
self.name = nformat.format(procname=proc_name,
version=self.version.split('.')[0],
suffix=self.suffix_proc)
sformat = '''Spider_{procname}_v{version}.py'''
spider_name = sformat.format(procname=proc_name,
version=version.replace('.', '_'))
self.spider_path = os.path.join(os.path.dirname(spider_path),
spider_name)
else:
self.default_settings_spider(spider_path)
def default_settings_spider(self, spider_path):
"""
Get the default spider version and name
:param spider_path: Fully qualified path and file of the spider
:return: None
"""
# set spider path
self.spider_path = spider_path
# set the name and the version of the spider
if len(re.split("/*_v[0-9]/*", spider_path)) > 1:
basename = os.path.basename(spider_path)
self.version = basename[7:-3].split('_v')[-1].replace('_', '.')
spidername = basename[7:-3]
self.name = '''{procname}_v{version}{suffix}'''.format(
procname=re.split("/*_v[0-9]/*", spidername)[0],
version=self.version.split('.')[0], suffix=self.suffix_proc)
else:
self.name = os.path.basename(spider_path)[7:-3] + self.suffix_proc
def get_proctype(self):
"""
Return the processor name for this processor. Override this method if
you are inheriting from a non-yaml processor.
:return: the name of the processor type
"""
return None
def get_assessor_input_types(self):
"""
Enumerate the assessor input types for this. The default implementation
returns an empty collection; override this method if you are inheriting
from a non-yaml processor.
:return: a list of input assessor types
"""
return []
# should_run - is the object of the proper object type?
# e.g. is it a scan? and is it the required scan type?
# e.g. is it a T1?
# other arguments here, could be Proj/Subj/Sess/Scan/Assessor depending
# on processor type?
def should_run(self):
"""
Responsible for determining if the assessor should shouw up in session.
:raises: NotImplementedError if not overridden.
:return: None
"""
raise NotImplementedError()
def build_cmds(self, cobj, dir):
"""
Build the commands that will go in the PBS/SLURM script
:raises: NotImplementedError if not overridden from base class.
:return: None
"""
raise NotImplementedError()
def create_assessor(self, xnatsession, inputs, relabel=False):
attempts = 0
while attempts < 100:
guid = str(uuid4())
assessor = xnatsession.assessor(guid)
if not assessor.exists():
kwargs = {}
if self.xsitype.lower() == DEFAULT_FS_DATATYPE.lower():
fsversion = '{}/fsversion'.format(self.xsitype.lower())
kwargs[fsversion] = 0
elif self.xsitype.lower() == DEFAULT_DATATYPE.lower():
proctype = '{}/proctype'.format(self.xsitype.lower())
kwargs[proctype] = self.name
procversion = '{}/procversion'.format(self.xsitype.lower())
kwargs[procversion] = self.version
input_key = '{}/inputs'.format(self.xsitype.lower())
kwargs[input_key] = self._serialize_inputs(inputs)
if relabel:
_proj = assessor.parent().parent().parent().label()
_subj = assessor.parent().parent().label()
_sess = assessor.parent().label()
label = '-x-'.join([_proj, _subj, _sess, self.name, guid])
else:
label = guid
# Set creation date to today
date_key = '{}/date'.format(self.xsitype.lower())
date_val = str(date.today())
kwargs[date_key] = date_val
# Create the assessor
assessor.create(assessors=self.xsitype.lower(),
ID=guid, label=label,
**kwargs)
return assessor
attempts += 1
def _serialize_inputs(self, inputs):
return json.dumps(inputs)
def _deserialize_inputs(self, assessor):
return json.loads(
XnatUtils.parse_assessor_inputs(assessor.attrs.get('inputs')))
class AutoProcessor(Processor):
""" Auto Processor class for AutoSpider using YAML files"""
def __init__(self, xnat, yaml_source, user_inputs=None):
"""
Entry point for the auto processor
:param xnat: xnat context object (XnatUtils in production contexts)
:param yaml_source: dictionary containing source_type -> string,\
source_id -> string, document -> yaml document
:param user_inputs: a dictionary of user overrides to the yaml\
source document
:return: None
"""
if not xnat:
raise AutoProcessorError("Parameter 'xnat' must be provided")
if not yaml_source:
raise AutoProcessorError(
"Parameter 'yaml_source' must be provided")
self.xnat = xnat
self.user_overrides = dict()
self.extra_user_overrides = dict()
self._read_yaml(yaml_source)
# Edit the values from user inputs:
if user_inputs is not None:
self._edit_inputs(user_inputs, yaml_source)
self.parser = processor_parser.ProcessorParser(
yaml_source.contents, self.proctype)
# Set up attrs:
self.walltime_str = self.attrs.get('walltime')
self.memreq_mb = self.attrs.get('memory')
self.ppn = self.attrs.get('ppn', 1)
self.env = self.attrs.get('env', None)
self.xsitype = self.attrs.get('xsitype', 'proc:genProcData')
self.full_regex = self.attrs.get('fullregex', False)
self.suffix = self.attrs.get('suffix', None)
def _edit_inputs(self, user_inputs, yaml_source):
"""
Method to edit the inputs from the YAML file by the user inputs.
:param user_inputs: dictionary of tag, value. E.G:
user_inputs = {'default.spider_path': /.../Spider....py'}
"""
yaml_name = yaml_source.source_id
for key, val in list(user_inputs.items()):
LOGGER.debug('overriding:key={}, file={}'.format(key, yaml_name))
tags = key.split('.')
if key.startswith('inputs.default'):
# change value in inputs
if tags[-1] in list(self.user_overrides.keys()):
self.user_overrides[tags[-1]] = val
elif tags[-1] in list(self.extra_user_overrides.keys()):
self.extra_user_overrides[tags[-1]] = val
else:
msg = 'key not found in default inputs:key={}, file={}'
msg = msg.format(tags[-1], yaml_name)
LOGGER.error(msg)
raise AutoProcessorError(msg)
elif key.startswith('inputs.xnat'):
# change value in self.xnat_inputs
if tags[2] not in list(self.xnat_inputs.keys()):
msg = 'key not found in xnat inputs:key={}, file={}'
msg = msg.format(tags[3], yaml_name)
LOGGER.error(msg)
raise AutoProcessorError(msg)
# Match the scan number or assessor number (e.g: scan1)
sobj = None
for obj in self.xnat_inputs[tags[2]]:
if tags[3] == obj['name']:
sobj = obj
break
if sobj is None:
msg = 'invalid override:key={}, file={}'
msg = msg.format(key, yaml_name)
LOGGER.error(msg)
raise AutoProcessorError(msg)
if tags[4] == 'resources':
if tags[6] == 'fmatch':
# Match the resource name
robj = None
for obj in sobj['resources']:
if tags[5] == obj['varname']:
robj = obj
break
if robj is None:
msg = 'invalid override:key={}, file={}'
LOGGER.error(msg)
raise AutoProcessorError(msg)
msg = 'overriding fmatch:key={}, val={}'
msg = msg.format(key, val)
LOGGER.debug(msg)
robj['fmatch'] = val
else:
msg = 'invalid override:key={}, file={}'
msg = msg.format(key, yaml_name)
LOGGER.error(msg)
raise AutoProcessorError(msg)
else:
LOGGER.info('overriding:{}:{}'.format(tags[4], str(val)))
obj[tags[4]] = val
elif key.startswith('attrs'):
# change value in self.attrs
if tags[-1] in list(self.attrs.keys()):
self.attrs[tags[-1]] = val
else:
msg = 'key not found in attrs:key={}, file={}'
msg = msg.format(tags[-1], yaml_name)
LOGGER.error(msg)
raise AutoProcessorError(msg)
else:
msg = 'invalid override:key={}, file={}'
msg = msg.format(key, yaml_name)
LOGGER.error(msg)
raise AutoProcessorError(msg)
def _read_yaml(self, yaml_source):
"""
Method to parse the processor arguments and their default values.
:param yaml_source: YamlDoc object containing the yaml file contents
"""
if yaml_source.source_type is None:
raise AutoProcessorError('Empty yaml source provided')
doc = yaml_source.contents
# Set Inputs from Yaml
self._check_default_keys(yaml_source.source_id, doc)
self.attrs = doc.get('attrs')
self.command = doc.get('command')
inputs = doc.get('inputs')
self.xnat_inputs = inputs.get('xnat')
for key, value in list(inputs.get('default').items()):
# If value is a key in command
k_str = '{{{}}}'.format(key)
if k_str in self.command:
self.user_overrides[key] = value
else:
if isinstance(value, bool) and value is True:
self.extra_user_overrides[key] = ''
elif value and value != 'None':
self.extra_user_overrides[key] = value
# Getting proctype from Yaml
self.proctype, self.version = self.xnat.get_proctype(
self.user_overrides.get('spider_path'),
self.attrs.get('suffix', None))
# Set attributes:
self.spider_path = self.user_overrides.get('spider_path')
self.name = self.proctype
# Override template
if doc.get('jobtemplate'):
_tmp = doc.get('jobtemplate')
# Make sure we have the full path
if not os.path.isabs(_tmp):
# If only filename, we assume it is same folder as default
_tmp = os.path.join(os.path.dirname(self.job_template), _tmp)
# Override it
self.job_template = os.path.join(_tmp)
def _check_default_keys(self, source_id, doc):
""" Static method to raise error if key not found in dictionary from
yaml file.
:param source_id: dictionary containing source_type -> string,\
source_id -> string, document -> yaml document
:param key: key to check in the doc
"""
# first level
for key in ['inputs', 'command', 'attrs']:
self._raise_yaml_error_if_no_key(doc, source_id, key)
# Second level in inputs and attrs:
inputs = doc.get('inputs')
attrs = doc.get('attrs')
for _doc, key in [(inputs, 'default'), (inputs, 'xnat'),
(attrs, 'memory'),
(attrs, 'walltime')]:
self._raise_yaml_error_if_no_key(_doc, source_id, key)
# third level for default:
default = doc.get('inputs').get('default')
for key in ['spider_path']:
self._raise_yaml_error_if_no_key(default, source_id, key)
@ staticmethod
def _raise_yaml_error_if_no_key(doc, source_id, key):
"""Method to raise an execption if the key is not in the dict
:param doc: dict to check
:param source_id: YAML source identifier string for logging
:param key: key to search
"""
if key not in list(doc.keys()):
err = 'YAML source {} does not have {} defined. See example.'
raise AutoProcessorError(err.format(source_id, key))
def parse_session(self, csess, sessions, pets=None):
"""
Method to run the processor parser on this session, in order to
calculate the pattern matches for this processor and the sessions
provided
:param csess: the active session. For non-longitudinal studies, this is
the session that the pattern matching is performed on. For longitudinal
studies, this is the 'current' session from which all prior sessions
are numbered for the purposes of pattern matching
:param sessions: the full, time-ordered list of sessions that should be
considered for longitudinal studies.
:return: None
"""
return self.parser.parse_session(csess, sessions, pets)
def get_proctype(self):
return self.name
# ***** Names still need fixing! *****
def get_assessor_input_types(self):
"""
Enumerate the assessor input types for this. The default implementation
returns an empty collection; override this method if you are inheriting
from a non-yaml processor.
:return: a list of input assessor types
"""
assessor_inputs = [i for i in list(self.parser.inputs.values()) if i['artefact_type'] == 'assessor']
assessors = [i['types'] for i in assessor_inputs]
return list(itertools.chain.from_iterable(assessors))
# TODO: BenM/assessor_of_assessor/this method is no longer suitable for
# execution on a single assessor, as it generates commands for the whole
# session. In any case, the command should be written out to the xnat
# assessor schema so that it can simply be launched, rather than performing
# this reconstruction each time the dax launch command is called
def get_cmds(self, assr, jobdir):
"""Method to generate the spider command for cluster job.
:param assessor: pyxnat assessor object
:param jobdir: jobdir where the job's output will be generated
:return: command to execute the spider in the job script
"""
# TODO: BenM/assessor_of_assessors/parse each scan / assessor and
# any select statements and generate one or more corresponding commands
# self.parser.generate_command(cassr, )
# combine the user overrides with the input parameters for each
# distinct command
commands = []
variable_set = self.parser.get_variable_set(assr)
combined_params = {}
for k, v in list(variable_set.items()):
combined_params[k] = v
for k, v in list(self.user_overrides.items()):
combined_params[k] = v
cmd = self.command.format(**combined_params)
for key, value in list(self.extra_user_overrides.items()):
cmd = '{} --{} {}'.format(cmd, key, value)
# TODO: BenM/assessor_of_assessor/each assessor is separate and
# has a different label; change the code to fetch the label from
# the assessor
# Add assr and jobdir:
assr_full_name =\
assessor_utils.full_label_from_assessor(assr)
if ' -a ' not in cmd and ' --assessor ' not in cmd:
cmd = '{} -a {}'.format(cmd, assr_full_name)
if ' -d ' not in cmd:
cmd = '{} -d {}'.format(cmd, jobdir)
commands.append(cmd)
return commands
class MoreAutoProcessor(AutoProcessor):
""" More Auto Processor class for AutoSpider using YAML files"""
def __init__(self, xnat, yaml_source, user_inputs=None,
singularity_imagedir=None, job_template='~/job_template.txt'):
"""
Entry point for the auto processor
:param yaml_file: yaml file defining the processor
:return: None
"""
# Load outputs
self.outputs = dict()
# Save location of singularity imagedir
self.singularity_imagedir = singularity_imagedir
# Set the template to the global default, it could be overwritten by
# processor yaml
self.job_template = job_template
super(MoreAutoProcessor, self).__init__(xnat, yaml_source, user_inputs)
def _read_yaml(self, yaml_source):
"""
Method to read the processor arguments and there default value.
:param yaml_file: path to yaml file defining the processor
"""
if yaml_source.source_type is None:
raise AutoProcessorError('Empty yaml source provided')
doc = yaml_source.contents
# Set Inputs from Yaml
self._check_default_keys(yaml_source.source_id, doc)
self.attrs = doc.get('attrs')
self.command = doc.get('command')
# Set Inputs from Yaml
inputs = doc.get('inputs')
self.xnat_inputs = inputs.get('xnat')
for key, value in list(inputs.get('default').items()):
# If value is a key in command
k_str = '{{{}}}'.format(key)
if k_str in self.command:
self.user_overrides[key] = value
else:
if isinstance(value, bool) and value is True:
self.extra_user_overrides[key] = ''
elif value and value != 'None':
self.extra_user_overrides[key] = value
# Container path, prepend singularity imagedir
self.container_path = inputs.get('default').get('container_path')
if ((self.container_path.endswith('.simg') or
self.container_path.endswith('.img') or
self.container_path.endswith('.sif')) and
not os.path.isabs(self.container_path) and
self.singularity_imagedir):
self.container_path = os.path.join(
self.singularity_imagedir, self.container_path)
# Overwrite container_path for building script
self.user_overrides['container_path'] = self.container_path
# Getting proctype and version from Yaml
if doc.get('procversion'):
self.version = doc.get('procversion')
else:
self.version = self.parse_procversion()
if doc.get('procname'):
procname = doc.get('procname')
else:
procname = self.parse_procname()
if doc.get('proctype'):
self.proctype = doc.get('proctype')
else:
self.proctype = '{}_v{}'.format(
procname, self.version.split('.')[0]
)
suffix = self.attrs.get('suffix', None)
if suffix:
if suffix[0] != '_':
suffix = '_{}'.format(suffix)
suffix = re.sub('[^a-zA-Z0-9]', '_', suffix)
if suffix[-1] == '_':
suffix = suffix[:-1]
self.proctype = '{}{}'.format(self.proctype, suffix)
# Set attributes:
self.name = self.proctype
# Set Outputs from Yaml
self.outputs = doc.get('outputs')
# Override template
if doc.get('jobtemplate'):
_tmp = doc.get('jobtemplate')
# Make sure we have the full path
if not os.path.isabs(_tmp):
# If only filename, we assume it is same folder as default
_tmp = os.path.join(os.path.dirname(self.job_template), _tmp)
# Override it
self.job_template = os.path.join(_tmp)
def _check_default_keys(self, source_id, doc):
""" Static method to raise error if key not found in dictionary from
yaml file.
:param yaml_file: path to yaml file defining the processor
:param doc: doc dictionary extracted from the yaml file
:param key: key to check in the doc
"""
# first level
for key in ['inputs', 'command', 'attrs', 'outputs']:
self._raise_yaml_error_if_no_key(doc, source_id, key)
# Second level in inputs and attrs:
inputs = doc.get('inputs')
attrs = doc.get('attrs')
for _doc, key in [(inputs, 'default'), (inputs, 'xnat'),
(attrs, 'memory'),
(attrs, 'walltime')]:
self._raise_yaml_error_if_no_key(_doc, source_id, key)
# third level for default:
default = doc.get('inputs').get('default')
for key in ['container_path']:
self._raise_yaml_error_if_no_key(default, source_id, key)
def parse_procname(self):
tmp = self.container_path
tmp = tmp.split('://')[-1]
tmp = tmp.rsplit('/')[-1]
if len(re.split('/*[_:]v[0-9]/*', tmp)) > 1:
tmp = re.split('/*[_:]v[0-9]/*', tmp)[0]
if tmp.startswith('Spider_'):
tmp = tmp[len('Spider_'):]
return tmp
def parse_procversion(self):
tmp = self.container_path
tmp = tmp.split('://')[-1]
tmp = tmp.rsplit('/')[-1]
if tmp.endswith('.img'):
tmp = tmp.split('.img')[0]
elif tmp.endswith('.simg'):
tmp = tmp.split('.simg')[0]
elif tmp.endswith('.py'):
tmp = tmp.split('.py')[0]
if len(re.split('/*_v[0-9]/*', tmp)) > 1:
tmp = tmp.split('_v')[-1].replace('_', '.')
elif len(re.split('/*:v[0-9]/*', tmp)) > 1:
tmp = tmp.split(':v')[-1].replace('_', '.')
return tmp
def build_cmds(self, assr, assr_label, sessions, jobdir, resdir):
"""Method to generate the spider command for cluster job.
:param jobdir: jobdir where the job's output will be generated
:return: command to execute the spider in the job script
"""
assr_inputs = XnatUtils.get_assessor_inputs(assr, sessions)
# Make every input a list, so we can iterate later
for k in assr_inputs:
if not isinstance(assr_inputs[k], list):
assr_inputs[k] = [assr_inputs[k]]
# Find values for the xnat inputs
var2val, input_list = self.parser.find_inputs(
assr, sessions, assr_inputs)
# Append other stuff
for k, v in list(self.user_overrides.items()):
var2val[k] = v
for k, v in list(self.extra_user_overrides.items()):
var2val[k] = v
# Include the assessor label
var2val['assessor'] = assr_label
# Handle xnat attributes
for attr_in in self.xnat_inputs.get('attrs', list()):
_var = attr_in['varname']
_attr = attr_in['attr']
_obj = attr_in['object']
_val = ''
if _obj == 'subject':
_val = assr.parent().parent().attrs.get(_attr)
elif _obj == 'session':
_val = assr.parent().attrs.get(_attr)
elif _obj == 'scan':
_ref = attr_in['ref']
_refval = [a.rsplit('/', 1)[1] for a in assr_inputs[_ref]]
_val = ','.join(
[assr.parent().scan(r).attrs.get(_attr) for r in _refval])
elif _obj == 'assessor':
if 'ref' in attr_in:
_ref = attr_in['ref']
_refval = [a.rsplit('/', 1)[1] for a in assr_inputs[_ref]]
_val = ','.join(
[assr.parent().assessor(r).attrs.get(_attr) for r in _refval])
else:
_val = assr.attrs.get(_attr)
else:
LOGGER.error('invalid YAML')
err = 'YAML File:contains invalid attribute:{}'
raise AutoProcessorError(err.format(_attr))
if _val == '':
raise NeedInputsException('Missing ' + _attr)
else:
var2val[_var] = _val
# Handle edits
edit_res = assr.out_resource(task.EDITS_RESOURCE)
if edit_res.exists():
file_list = edit_res.files().get()
assr_path = '/projects/{}/subjects/{}/experiments/{}/assessors/{}'.format(
assr.parent().parent().parent().label(),
assr.parent().parent().label(),
assr.parent().label(),
assr.label()
)
for edit_in in self.xnat_inputs.get('edits', list()):
_fpref = edit_in['fpref']
_var = edit_in['varname']
# Filter files that match prefix
cur_list = [f for f in file_list if f.startswith(_fpref)]
if cur_list:
# Sort and grab the last file
_val = sorted(cur_list)[-1]
# Build full uri
_uri = '{}/data{}/out/resources/{}/files/{}'.format(
assr._intf.host,
assr_path,
task.EDITS_RESOURCE,
_val)
# Append to inputs to be downloaded
input_list.append({
'fdest': _fpref,
'ftype': 'FILE',
'fpath': _uri,
'ddest': ''
})
# Set the value for command text
var2val[_var] = '/INPUTS/'+_fpref
else:
# None found
var2val[_var] = ''
else:
for edit_in in self.xnat_inputs.get('edits', list()):
var2val[edit_in['varname']] = ''
# Build the command text
dstdir = os.path.join(resdir, assr_label)
assr_dir = os.path.join(jobdir, assr_label)
cmd = self.build_text(
var2val, input_list, assr_dir, dstdir,
assr._intf.host, assr._intf.user)
return [cmd]
def build_text(self, var2val, input_list, jobdir, dstdir, host, user):
# Initialize commands
cmd = '\n\n'
# Append the list of inputs, URL-encoding the fpath to handle special chars in URLs
cmd += 'INLIST=(\n'
for cur in input_list:
cur['fpath'] = requests.utils.quote(cur['fpath'],safe=":/")
cmd += '{fdest},{ftype},{fpath},{ddest}\n'.format(**cur)
cmd += ')\n\n'
# Append the list on outputs
cmd += 'OUTLIST=(\n'
for cur in self.outputs:
cmd += '{path},{type},{resource}\n'.format(**cur)
cmd += ')\n\n'
# Append other paths
cmd += 'VERSION={}\n'.format(self.version)
cmd += 'JOBDIR=$(mktemp -d "{}.XXXXXXXXX") || '.format(jobdir)
cmd += '{ echo "mktemp failed"; exit 1; }\n'
cmd += 'INDIR=$JOBDIR/INPUTS\n'
cmd += 'OUTDIR=$JOBDIR/OUTPUTS\n'
cmd += 'DSTDIR={}\n\n'.format(dstdir)
cmd += 'CONTAINERPATH={}\n\n'.format(self.container_path)
cmd += 'XNATHOST={}\n\n'.format(host)
cmd += 'XNATUSER={}\n\n'.format(user)
# Append the main command
cmd += 'MAINCMD=\"'
cmd += self.command.format(**var2val)
cmd += '\"\n'
return cmd
def processors_by_type(proc_list):
"""
Organize the processor types and return a list of session processors
first, then scan
:param proc_list: List of Processor classes from the DAX settings file
:return: Lists of processors by type
"""
auto_proc_list = list()
# Build list of processors by type
if proc_list is not None:
for proc in proc_list:
if isinstance(proc, Processor_v3):
auto_proc_list.append(proc)
elif issubclass(proc.__class__, AutoProcessor):
auto_proc_list.append(proc)
else:
LOGGER.warn('unknown processor type: %s' % proc)
return auto_proc_list
def load_from_yaml(xnat, filepath, user_inputs=None,
singularity_imagedir=None, job_template=None):
"""
Load processor from yaml
:param filepath: path to yaml file
:return: processor
"""
yaml_obj = yaml_doc.YamlDoc().from_file(filepath)
# Load file based on yaml version and data type
if yaml_obj.contents.get('inputs').get('xnat').get('sessions', False):
print('FOUND SGP')
# This must be a subjgenproc
return SgpProcessor(
xnat, filepath, user_inputs, singularity_imagedir, job_template)
elif yaml_obj.contents.get('procyamlversion', '') == '3.0.0-dev.0':
LOGGER.debug('loading as Processor_v3:{}'.format(filepath))
return Processor_v3(
xnat, filepath, user_inputs, singularity_imagedir, job_template)
elif yaml_obj.contents.get('moreauto'):
return MoreAutoProcessor(xnat, yaml_obj, user_inputs,
singularity_imagedir, job_template)
else:
return AutoProcessor(xnat, yaml_obj, user_inputs)
|
{
"content_hash": "affa3bb86d52adb6e2b311f9344cefbd",
"timestamp": "",
"source": "github",
"line_count": 883,
"max_line_length": 108,
"avg_line_length": 37.731596828992075,
"alnum_prop": 0.5418254944923012,
"repo_name": "VUIIS/dax",
"id": "ca4067f6b7aeb4b9971a27ffdf9cb8398d793ded",
"size": "33317",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "dax/processors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "MATLAB",
"bytes": "6017"
},
{
"name": "Python",
"bytes": "1109343"
}
],
"symlink_target": ""
}
|
from unittest import mock
from django.core.urlresolvers import reverse
from taiga.projects import services
from taiga.base.utils import json
from .. import factories as f
import pytest
pytestmark = pytest.mark.django_db
def test_get_members_from_bulk():
data = [{"role_id": "1", "email": "member1@email.com"},
{"role_id": "1", "email": "member2@email.com"}]
members = services.get_members_from_bulk(data, project_id=1)
assert len(members) == 2
assert members[0].email == "member1@email.com"
assert members[1].email == "member2@email.com"
def test_create_members_in_bulk():
with mock.patch("taiga.projects.services.members.db") as db:
data = [{"role_id": "1", "email": "member1@email.com"},
{"role_id": "1", "email": "member2@email.com"}]
members = services.create_members_in_bulk(data, project_id=1)
db.save_in_bulk.assert_called_once_with(members, None, None)
def test_api_create_bulk_members(client):
project = f.ProjectFactory()
john = f.UserFactory.create()
joseph = f.UserFactory.create()
tester = f.RoleFactory(project=project, name="Tester")
gamer = f.RoleFactory(project=project, name="Gamer")
f.MembershipFactory(project=project, user=project.owner, is_admin=True)
url = reverse("memberships-bulk-create")
data = {
"project_id": project.id,
"bulk_memberships": [
{"role_id": tester.pk, "email": john.email},
{"role_id": gamer.pk, "email": joseph.email},
]
}
client.login(project.owner)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 200
assert response.data[0]["email"] == john.email
assert response.data[1]["email"] == joseph.email
def test_api_create_bulk_members_without_enough_memberships_private_project_slots_one_project(client):
user = f.UserFactory.create(max_memberships_private_projects=3)
project = f.ProjectFactory(owner=user, is_private=True)
role = f.RoleFactory(project=project, name="Test")
f.MembershipFactory(project=project, user=user, is_admin=True)
url = reverse("memberships-bulk-create")
data = {
"project_id": project.id,
"bulk_memberships": [
{"role_id": role.pk, "email": "test1@test.com"},
{"role_id": role.pk, "email": "test2@test.com"},
{"role_id": role.pk, "email": "test3@test.com"},
{"role_id": role.pk, "email": "test4@test.com"},
]
}
client.login(user)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 400
assert "reached your current limit of memberships for private" in response.data["_error_message"]
def test_api_create_bulk_members_for_admin_without_enough_memberships_private_project_slots_one_project(client):
owner = f.UserFactory.create(max_memberships_private_projects=3)
user = f.UserFactory.create()
project = f.ProjectFactory(owner=owner, is_private=True)
role = f.RoleFactory(project=project, name="Test")
f.MembershipFactory(project=project, user=user, is_admin=True)
url = reverse("memberships-bulk-create")
data = {
"project_id": project.id,
"bulk_memberships": [
{"role_id": role.pk, "email": "test1@test.com"},
{"role_id": role.pk, "email": "test2@test.com"},
{"role_id": role.pk, "email": "test3@test.com"},
{"role_id": role.pk, "email": "test4@test.com"},
]
}
client.login(user)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 400
assert "reached your current limit of memberships for private" in response.data["_error_message"]
def test_api_create_bulk_members_with_enough_memberships_private_project_slots_multiple_projects(client):
user = f.UserFactory.create(max_memberships_private_projects=6)
project = f.ProjectFactory(owner=user, is_private=True)
role = f.RoleFactory(project=project, name="Test")
f.MembershipFactory(project=project, user=user, is_admin=True)
other_project = f.ProjectFactory(owner=user)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
url = reverse("memberships-bulk-create")
data = {
"project_id": project.id,
"bulk_memberships": [
{"role_id": role.pk, "email": "test1@test.com"},
{"role_id": role.pk, "email": "test2@test.com"},
{"role_id": role.pk, "email": "test3@test.com"},
{"role_id": role.pk, "email": "test4@test.com"},
]
}
client.login(user)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 200
def test_api_create_bulk_members_without_enough_memberships_public_project_slots_one_project(client):
user = f.UserFactory.create(max_memberships_public_projects=3)
project = f.ProjectFactory(owner=user, is_private=False)
role = f.RoleFactory(project=project, name="Test")
f.MembershipFactory(project=project, user=user, is_admin=True)
url = reverse("memberships-bulk-create")
data = {
"project_id": project.id,
"bulk_memberships": [
{"role_id": role.pk, "email": "test1@test.com"},
{"role_id": role.pk, "email": "test2@test.com"},
{"role_id": role.pk, "email": "test3@test.com"},
{"role_id": role.pk, "email": "test4@test.com"},
]
}
client.login(user)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 400
assert "reached your current limit of memberships for public" in response.data["_error_message"]
def test_api_create_bulk_members_with_enough_memberships_public_project_slots_multiple_projects(client):
user = f.UserFactory.create(max_memberships_public_projects=6)
project = f.ProjectFactory(owner=user, is_private=False)
role = f.RoleFactory(project=project, name="Test")
f.MembershipFactory(project=project, user=user, is_admin=True)
other_project = f.ProjectFactory(owner=user)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
url = reverse("memberships-bulk-create")
data = {
"project_id": project.id,
"bulk_memberships": [
{"role_id": role.pk, "email": "test1@test.com"},
{"role_id": role.pk, "email": "test2@test.com"},
{"role_id": role.pk, "email": "test3@test.com"},
{"role_id": role.pk, "email": "test4@test.com"},
]
}
client.login(user)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 200
def test_api_create_bulk_members_with_extra_text(client, outbox):
project = f.ProjectFactory()
tester = f.RoleFactory(project=project, name="Tester")
f.MembershipFactory(project=project, user=project.owner, is_admin=True)
url = reverse("memberships-bulk-create")
invitation_extra_text = "this is a not so random invitation text"
data = {
"project_id": project.id,
"bulk_memberships": [
{"role_id": tester.pk, "email": "john@email.com"},
],
"invitation_extra_text": invitation_extra_text
}
client.login(project.owner)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 200
assert response.data[0]["email"] == "john@email.com"
message = outbox[0]
assert len(outbox) == 1
assert message.to == ["john@email.com"]
assert "this is a not so random invitation text" in message.body
def test_api_resend_invitation(client, outbox):
invitation = f.create_invitation(user=None)
f.MembershipFactory(project=invitation.project, user=invitation.project.owner, is_admin=True)
url = reverse("memberships-resend-invitation", kwargs={"pk": invitation.pk})
client.login(invitation.project.owner)
response = client.post(url)
assert response.status_code == 204
assert len(outbox) == 1
assert outbox[0].to == [invitation.email]
def test_api_invite_existing_user(client, outbox):
"Should create the invitation linked to that user"
user = f.UserFactory.create()
role = f.RoleFactory.create()
f.MembershipFactory(project=role.project, user=role.project.owner, is_admin=True)
client.login(role.project.owner)
url = reverse("memberships-list")
data = {"role": role.pk, "project": role.project.pk, "email": user.email}
response = client.json.post(url, json.dumps(data))
assert response.status_code == 201, response.data
assert len(outbox) == 1
assert user.memberships.count() == 1
message = outbox[0]
assert message.to == [user.email]
assert "Added to the project" in message.subject
def test_api_create_invalid_membership_email_failing(client):
"Should not create the invitation linked to that user"
user = f.UserFactory.create()
role = f.RoleFactory.create()
client.login(role.project.owner)
url = reverse("memberships-list")
data = {"role": role.pk, "project": role.project.pk}
response = client.json.post(url, json.dumps(data))
assert response.status_code == 400, response.data
assert user.memberships.count() == 0
def test_api_create_invalid_membership_role_doesnt_exist_in_the_project(client):
"Should not create the invitation linked to that user"
user = f.UserFactory.create()
role = f.RoleFactory.create()
project = f.ProjectFactory.create()
client.login(project.owner)
url = reverse("memberships-list")
data = {"role": role.pk, "project": project.pk, "email": user.email}
response = client.json.post(url, json.dumps(data))
assert response.status_code == 400, response.data
assert response.data["role"][0] == "Invalid role for the project"
assert user.memberships.count() == 0
def test_api_create_membership(client):
membership = f.MembershipFactory(is_admin=True)
role = f.RoleFactory.create(project=membership.project)
user = f.UserFactory.create()
client.login(membership.user)
url = reverse("memberships-list")
data = {"role": role.pk, "project": role.project.pk, "email": user.email}
response = client.json.post(url, json.dumps(data))
assert response.status_code == 201
assert response.data["user_email"] == user.email
def test_api_create_membership_without_enough_memberships_private_project_slots_one_projects(client):
user = f.UserFactory.create(max_memberships_private_projects=1)
project = f.ProjectFactory(owner=user, is_private=True)
role = f.RoleFactory(project=project, name="Test")
f.MembershipFactory(project=project, user=user, is_admin=True)
client.login(user)
url = reverse("memberships-list")
data = {"role": role.pk, "project": project.pk, "email": "test@test.com"}
response = client.json.post(url, json.dumps(data))
assert response.status_code == 400
assert "reached your current limit of memberships for private" in response.data["_error_message"]
def test_api_create_membership_with_enough_memberships_private_project_slots_multiple_projects(client):
user = f.UserFactory.create(max_memberships_private_projects=5)
project = f.ProjectFactory(owner=user, is_private=True)
role = f.RoleFactory(project=project, name="Test")
f.MembershipFactory(project=project, user=user, is_admin=True)
other_project = f.ProjectFactory(owner=user)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
client.login(user)
url = reverse("memberships-list")
data = {"role": role.pk, "project": project.pk, "email": "test@test.com"}
response = client.json.post(url, json.dumps(data))
assert response.status_code == 201
def test_api_create_membership_without_enough_memberships_public_project_slots_one_projects(client):
user = f.UserFactory.create(max_memberships_public_projects=1)
project = f.ProjectFactory(owner=user, is_private=False)
role = f.RoleFactory(project=project, name="Test")
f.MembershipFactory(project=project, user=user, is_admin=True)
client.login(user)
url = reverse("memberships-list")
data = {"role": role.pk, "project": project.pk, "email": "test@test.com"}
response = client.json.post(url, json.dumps(data))
assert response.status_code == 400
assert "reached your current limit of memberships for public" in response.data["_error_message"]
def test_api_create_membership_with_enough_memberships_public_project_slots_multiple_projects(client):
user = f.UserFactory.create(max_memberships_public_projects=5)
project = f.ProjectFactory(owner=user, is_private=False)
role = f.RoleFactory(project=project, name="Test")
f.MembershipFactory(project=project, user=user, is_admin=True)
other_project = f.ProjectFactory(owner=user)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
f.MembershipFactory.create(project=other_project)
client.login(user)
url = reverse("memberships-list")
data = {"role": role.pk, "project": project.pk, "email": "test@test.com"}
response = client.json.post(url, json.dumps(data))
assert response.status_code == 201
def test_api_edit_membership(client):
membership = f.MembershipFactory(is_admin=True)
client.login(membership.user)
url = reverse("memberships-detail", args=[membership.id])
data = {"email": "new@email.com"}
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200
def test_api_change_owner_membership_to_no_admin_return_error(client):
project = f.ProjectFactory()
membership_owner = f.MembershipFactory(project=project, user=project.owner, is_admin=True)
membership = f.MembershipFactory(project=project, is_admin=True)
url = reverse("memberships-detail", args=[membership_owner.id])
data = {"is_admin": False}
client.login(membership.user)
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 400
assert 'is_admin' in response.data
def test_api_delete_membership(client):
membership = f.MembershipFactory(is_admin=True)
client.login(membership.user)
url = reverse("memberships-detail", args=[membership.id])
response = client.json.delete(url)
assert response.status_code == 400
f.MembershipFactory(is_admin=True, project=membership.project)
url = reverse("memberships-detail", args=[membership.id])
response = client.json.delete(url)
assert response.status_code == 204
def test_api_delete_membership_without_user(client):
membership_owner = f.MembershipFactory(is_admin=True)
membership_without_user_one = f.MembershipFactory(project=membership_owner.project, user=None)
f.MembershipFactory(project=membership_owner.project, user=None)
client.login(membership_owner.user)
url = reverse("memberships-detail", args=[membership_without_user_one.id])
response = client.json.delete(url)
assert response.status_code == 204
|
{
"content_hash": "83a97348747727ffcdd9443e0e1443ac",
"timestamp": "",
"source": "github",
"line_count": 416,
"max_line_length": 112,
"avg_line_length": 37.33413461538461,
"alnum_prop": 0.6821840190586569,
"repo_name": "curiosityio/taiga-docker",
"id": "c878bd458dd4ccad000fee84e0b06e0f80bbf07a",
"size": "15531",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "taiga-back/taiga-back/tests/integration/test_memberships.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "186988"
},
{
"name": "JavaScript",
"bytes": "2007"
},
{
"name": "Nginx",
"bytes": "4140"
},
{
"name": "Python",
"bytes": "2793020"
},
{
"name": "Shell",
"bytes": "1392"
}
],
"symlink_target": ""
}
|
from gluuwebui import app
from flask import request, redirect, url_for, Response
import json
import requests
import os
import datetime
api_base = app.config["API_SERVER_URL"]
class APIError(Exception):
"""Raise an exception whenever the API returns an error code"""
def __init__(self, msg, code, reason, params=""):
Exception.__init__(self)
self.msg = msg
self.code = code
self.reason = reason
self.params = params # a dict of invalid parameters from API response
def __str__(self):
return "{0} API server returned Code: {1} Reason: {2} {3}".format(
self.msg, self.code, self.reason, self.params)
@app.errorhandler(APIError)
def api_error(error):
resp = dict({'message': str(error)})
return Response(json.dumps(resp), status=400, mimetype="application/json")
def root_dir(): # pragma: no cover
return os.path.abspath(os.path.dirname(__file__))
def get_file(filename): # pragma: no cover
try:
src = os.path.join(root_dir(), filename)
return open(src).read()
except IOError as exc:
return str(exc)
def api_get(req):
try:
r = requests.get(api_base + req)
if r.status_code != 200:
raise APIError('There was an issue fetching your data',
r.status_code, reason(r))
return r.json()
except requests.ConnectionError:
raise APIError('No response from API Server', 500, 'Connection Error')
def generate_curl(req, method, data=None):
"""Function that will generate a curl command for the input request object
Params:
req (requests.request) object for which curl command is to be generated
Returns:
command (string) - a string which forms the curl command of the request
"""
command = "curl {uri} -X {method} -d {data}"
uri = api_base + req
if isinstance(data, dict):
data_str = " -d ".join(["%s='%s'" % (k, v) for k, v in data.items()])
else:
data_str = str(data)
return command.format(uri=uri, method=method, data=data_str)
def append_history(req, method, data, status):
"""Function that would append the command to the config-history.log file"""
history = os.path.join(root_dir(), "static/config-history.log")
with open(history, 'a') as logfile:
dt = datetime.datetime.now()
logfile.write(dt.strftime('%d %b %Y, %H:%M:%S\n'))
logfile.write(generate_curl(req, method, data))
logfile.write("\n")
logfile.write("RESPONSE CODE: {0} \n".format(status))
def api_post(req, data):
"""Function to send post requests to the API
@param req (string) the resource name to request
@param data (dict) the post form data as a dict from json
"""
r = requests.post(api_base + req, data=data)
append_history(req, 'POST', data, r.status_code)
if r.status_code > 210:
try:
params = r.json()['params']
invalidParams = "=> "+" ".join("{0}: {1}".format(k, v)
for k, v in params.items())
except KeyError:
invalidParams = ""
raise APIError('Could not create a new {0}'.format(req),
r.status_code, reason(r), invalidParams)
return r.json()
def api_delete(resource, id, forced=None):
"""Fucntion that sends the delete requests to the API.
@param resource (string) the resource to request
@param id (string) the id of the resource to be deleted
"""
url = api_base + '{0}/{1}'.format(resource, id)
if forced:
url += "?force_rm={0}".format(forced)
r = requests.delete(url)
append_history(resource, 'DELETE', None, r.status_code)
if r.status_code != 204:
raise APIError("The {0} with id {1} couldn't be deleted.".format(
resource, id), r.status_code, reason(r))
data = {'message': 'Deleted {0} with id {1}'.format(resource, id)}
return data
def reason(res):
try:
return res.json()['message']
except (AttributeError, TypeError):
return res.reason
def json_response(data, status=200):
return Response(json.dumps(data), status=status,
mimetype="application/json")
@app.route("/")
def index():
content = get_file('static/templates/index.html')
return Response(content, mimetype="text/html")
@app.route("/templates/<filename>")
def template(filename):
content = get_file('static/templates/{0}'.format(filename))
return Response(content, mimetype="text/html")
@app.route("/js/<filename>")
def js(filename):
content = get_file('static/js/{0}'.format(filename))
return Response(content, mimetype="text/js")
@app.route("/css/<filename>")
def css(filename):
content = get_file('static/css/{0}'.format(filename))
return Response(content, mimetype="text/css")
@app.route("/img/<filename>")
def img(filename):
content = get_file('static/img/{0}'.format(filename))
# @todo: mimetype accuracy
extension = filename.split(".")[-1]
return Response(content, mimetype="image/"+extension)
@app.route("/nodes", methods=['GET'])
@app.route("/nodes/<node_type>", methods=['GET', 'POST', 'PUT', 'DELETE'])
def represent_node(node_type=None):
if request.method == 'POST': # Initiate create new node
resp = api_post('nodes/{0}'.format(node_type),
json.loads(request.data))
return json_response(resp)
elif request.method == 'DELETE':
name = node_type # node_type for a delete request actually has name
resp = api_delete('nodes', name)
return json_response(resp)
elif request.method == 'PUT':
name = node_type # node_type request actually has name
url = api_base + "nodes/{0}".format(name)
r = requests.put(url)
if r.status_code != 202:
raise APIError("Node Deployment retry failed for node: {0}".format(
name), r.status_code, reason(r))
return json_response(r.json())
if node_type:
resp = api_get("nodes/{0}".format(node_type))
else:
resp = api_get("nodes")
return json_response(resp)
@app.route("/providers", methods=['GET'])
@app.route("/providers/<driver>", methods=['GET', 'POST', 'DELETE'])
def represent_provider(driver=None):
if request.method == 'POST':
resp = api_post('providers/{0}'.format(driver),
json.loads(request.data))
return json_response(resp)
elif request.method == 'DELETE':
pro_id = driver
resp = api_delete('providers', pro_id)
return json_response(resp)
if driver: # for GETthe driver acts as the id
resp = api_get('providers/{0}'.format(driver))
else:
resp = api_get('providers')
return json_response(resp)
@app.route("/clusters", methods=['GET', 'POST'])
@app.route("/clusters/<cluster_id>", methods=['GET', 'DELETE'])
def represent_cluster(cluster_id=None):
if request.method == 'POST':
resp = api_post('clusters', json.loads(request.data))
return json_response(resp)
elif request.method == 'DELETE':
resp = api_delete('clusters', cluster_id)
return json_response(resp)
if cluster_id:
resp = api_get('clusters/{0}'.format(cluster_id))
else:
resp = api_get('clusters')
return json_response(resp)
@app.route('/containers', methods=['GET'])
@app.route('/containers/<ctype>', methods=['GET', 'POST', 'DELETE'])
def represent_containers(ctype=None):
if request.method == 'POST':
resp = api_post('containers/{0}'.format(ctype),
json.loads(request.data))
return json_response(resp)
elif request.method == 'DELETE':
id = ctype # for DELETE the ctype is the id
force_rm = request.args.get('force_rm')
resp = api_delete('containers', id, force_rm)
return json_response(resp)
if ctype: # for GET ctype acts as the id
resp = api_get('containers/{0}'.format(ctype))
else:
resp = api_get('containers')
return json_response(resp)
@app.route('/scale-containers/<ctype>/<count>', methods=['POST', 'DELETE'])
def scale_containers(ctype, count):
if request.method == 'POST':
resp = api_post('scale-containers/{0}/{1}'.format(ctype, count), {})
elif request.method == 'DELETE':
url = api_base + 'scale-containers/{0}/{1}'.format(ctype, count)
r = requests.delete(url)
if r.status_code != 202:
raise APIError(
"The descale request for {0} {1} containers failed.".format(
count, ctype
), r.status_code, reason(r)
)
resp = r.json()
return json_response(resp)
def clean_keystring(key):
'''Helper function to remove white spaces from the copy-pasted license
keys in the webform'''
return key.strip().replace('\n', '').replace(' ', '')
@app.route("/license_keys", methods=['GET', 'POST'])
@app.route("/license_keys/<lic_id>", methods=['GET', 'PUT', 'DELETE'])
def represent_keys(lic_id=None):
if request.method == 'POST': # Add a new credential
data = json.loads(request.data)
data['public_key'] = clean_keystring(data['public_key'])
resp = api_post('license_keys', data)
return json_response(resp)
elif request.method == 'PUT':
url = api_base + "license_keys/{0}".format(lic_id)
newdata = json.loads(request.data)
if 'id' in newdata.keys():
del newdata['id']
# Clean the License keys during update time too
if 'public_key' in newdata:
newdata['public_key'] = clean_keystring(newdata['public_key'])
r = requests.put(url, data=newdata)
if r.status_code != 200:
raise APIError("License update failed for ID: {0}".format(lic_id),
r.status_code, reason(r))
return json_response(r.json())
elif request.method == 'DELETE':
resp = api_delete('license_keys', lic_id)
return json_response(resp)
if lic_id:
res = api_get('license_keys/{0}'.format(lic_id))
else:
res = api_get('license_keys')
return json_response(res)
@app.route('/container_logs', methods=['GET'])
@app.route('/container_logs/<log_id>', methods=['GET', 'DELETE'])
@app.route('/container_logs/<log_id>/<action>', methods=['GET'])
def represent_container_logs(log_id=None, action=None):
if request.method == 'DELETE':
resp = api_delete('container_logs', log_id)
return json_response(resp)
if log_id and action:
resp = api_get('container_logs/{0}/{1}'.format(log_id, action))
elif log_id:
resp = api_get('container_logs/{0}'.format(log_id))
else:
resp = api_get('container_logs')
return json_response(resp)
|
{
"content_hash": "96cabbbe34572444f3e6d8c4964fd65b",
"timestamp": "",
"source": "github",
"line_count": 319,
"max_line_length": 79,
"avg_line_length": 34.12852664576803,
"alnum_prop": 0.6047579682189768,
"repo_name": "GluuFederation/gluu-webui",
"id": "405fdb3a6517706dac4506faf713894b608489c4",
"size": "10887",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gluuwebui/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "738"
},
{
"name": "CSS",
"bytes": "1711"
},
{
"name": "HTML",
"bytes": "39039"
},
{
"name": "JavaScript",
"bytes": "92444"
},
{
"name": "Python",
"bytes": "21078"
}
],
"symlink_target": ""
}
|
"""Auto-generated file, do not edit by hand. AI metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_AI = PhoneMetadata(id='AI', country_code=1, international_prefix='011',
general_desc=PhoneNumberDesc(national_number_pattern='[2589]\\d{9}', possible_number_pattern='\\d{7}(?:\\d{3})?', possible_length=(10,), possible_length_local_only=(7,)),
fixed_line=PhoneNumberDesc(national_number_pattern='2644(?:6[12]|9[78])\\d{4}', example_number='2644612345', possible_length=(10,), possible_length_local_only=(7,)),
mobile=PhoneNumberDesc(national_number_pattern='264(?:235|476|5(?:3[6-9]|8[1-4])|7(?:29|72))\\d{4}', example_number='2642351234', possible_length=(10,), possible_length_local_only=(7,)),
toll_free=PhoneNumberDesc(national_number_pattern='8(?:00|33|44|55|66|77|88)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='8002123456', possible_length=(10,)),
premium_rate=PhoneNumberDesc(national_number_pattern='900[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='9002123456', possible_length=(10,)),
shared_cost=PhoneNumberDesc(),
personal_number=PhoneNumberDesc(national_number_pattern='5(?:00|22|33|44|66|77|88)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='5002345678', possible_length=(10,)),
voip=PhoneNumberDesc(),
pager=PhoneNumberDesc(),
uan=PhoneNumberDesc(),
voicemail=PhoneNumberDesc(),
no_international_dialling=PhoneNumberDesc(),
national_prefix='1',
national_prefix_for_parsing='1',
leading_digits='264')
|
{
"content_hash": "4dc9e33dd9921713fa3da7b095e3c95b",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 190,
"avg_line_length": 83.10526315789474,
"alnum_prop": 0.7042431918936035,
"repo_name": "vicky2135/lucious",
"id": "e301ab97370ae618820ddb5145c6c6e11b16ce3b",
"size": "1579",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oscar/lib/python2.7/site-packages/phonenumbers/data/region_AI.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "896683"
},
{
"name": "C++",
"bytes": "52230"
},
{
"name": "CSS",
"bytes": "1169533"
},
{
"name": "HTML",
"bytes": "1104983"
},
{
"name": "JavaScript",
"bytes": "1055140"
},
{
"name": "Makefile",
"bytes": "145238"
},
{
"name": "Python",
"bytes": "55993261"
},
{
"name": "Shell",
"bytes": "40487"
}
],
"symlink_target": ""
}
|
import unittest
import gevent
from scales.asynchronous import AsyncResult
class AsyncUtilTestCase(unittest.TestCase):
def testWhenAllSuccessful(self):
ar1 = AsyncResult()
ar2 = AsyncResult()
ar_agg = AsyncResult.WhenAll((ar1, ar2))
self.assertEqual(ar_agg.ready(), False)
ar1.set(1)
gevent.sleep(0)
self.assertEqual(ar_agg.ready(), False)
ar2.set(2)
gevent.sleep(0)
self.assertEqual(ar_agg.ready(), True)
self.assertEqual(ar_agg.value, [1,2])
def testWhenAnySuccessful(self):
ar1 = AsyncResult()
ar2 = AsyncResult()
ar_agg = AsyncResult.WhenAny((ar1, ar2))
self.assertEqual(ar_agg.ready(), False)
ar1.set('hi')
gevent.sleep(0)
self.assertEqual(ar_agg.value, 'hi')
def testWhenAnyFailure(self):
ar1 = AsyncResult()
ar2 = AsyncResult()
ar_agg = AsyncResult.WhenAny((ar1, ar2))
ar1.set_exception(Exception())
gevent.sleep(0)
ar2.set(1)
gevent.sleep(0)
self.assertEqual(ar_agg.value, 1)
def testWhenAllFailure(self):
ar1 = AsyncResult()
ar2 = AsyncResult()
ar_agg = AsyncResult.WhenAll((ar1, ar2))
ar1.set(1)
gevent.sleep(0)
ar2.set_exception(Exception())
gevent.sleep(0)
self.assertIsNotNone(ar_agg.exception)
def testUnwrapAlreadyDone(self):
ar_outer = AsyncResult()
ar_inner = AsyncResult()
ar_outer.set(ar_inner)
ar_inner.set('test')
ar_unwrapped = ar_outer.Unwrap()
self.assertEqual(ar_unwrapped.get(), 'test')
def testUnwrapLater(self):
ar_outer = AsyncResult()
ar_inner = AsyncResult()
def set_ar_inner():
gevent.sleep(.01)
ar_outer.set(ar_inner)
gevent.sleep(.01)
ar_inner.set('test')
gevent.spawn(set_ar_inner)
ar_unwrapped = ar_outer.Unwrap()
self.assertFalse(ar_unwrapped.ready())
self.assertEqual(ar_unwrapped.get(), 'test')
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "b38885bfcec26f260e49c39261274c2d",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 48,
"avg_line_length": 23.426829268292682,
"alnum_prop": 0.6496616345653305,
"repo_name": "steveniemitz/scales",
"id": "c783033ed855620f3a4aca6d53a8ab64c1d8e4da",
"size": "1921",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/scales/test_async.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "223553"
},
{
"name": "Shell",
"bytes": "60"
},
{
"name": "Thrift",
"bytes": "525"
}
],
"symlink_target": ""
}
|
from app import app
|
{
"content_hash": "8e45c2bc11f28661a4f137fc5cf84df8",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 19,
"avg_line_length": 20,
"alnum_prop": 0.8,
"repo_name": "kumaranzone/myflaskblogger",
"id": "dc0fb4e4f7b842558ae6d600bb5f4b4a622f6bcf",
"size": "39",
"binary": false,
"copies": "23",
"ref": "refs/heads/master",
"path": "runp-heroku.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Perl",
"bytes": "344"
},
{
"name": "Python",
"bytes": "145594"
},
{
"name": "Shell",
"bytes": "46"
}
],
"symlink_target": ""
}
|
"""Support for sensors."""
from __future__ import annotations
from fjaraskupan import Device, State
from homeassistant.components.number import NumberEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import TIME_MINUTES
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import DeviceInfo, Entity, EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from . import DeviceState, async_setup_entry_platform
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up number entities dynamically through discovery."""
def _constructor(device_state: DeviceState) -> list[Entity]:
return [
PeriodicVentingTime(
device_state.coordinator, device_state.device, device_state.device_info
),
]
async_setup_entry_platform(hass, config_entry, async_add_entities, _constructor)
class PeriodicVentingTime(CoordinatorEntity[State], NumberEntity):
"""Periodic Venting."""
_attr_max_value: float = 59
_attr_min_value: float = 0
_attr_step: float = 1
_attr_entity_category = EntityCategory.CONFIG
_attr_unit_of_measurement = TIME_MINUTES
def __init__(
self,
coordinator: DataUpdateCoordinator[State],
device: Device,
device_info: DeviceInfo,
) -> None:
"""Init number entities."""
super().__init__(coordinator)
self._device = device
self._attr_unique_id = f"{device.address}-periodic-venting"
self._attr_device_info = device_info
self._attr_name = f"{device_info['name']} Periodic Venting"
@property
def value(self) -> float | None:
"""Return the entity value to represent the entity state."""
if data := self.coordinator.data:
return data.periodic_venting
return None
async def async_set_value(self, value: float) -> None:
"""Set new value."""
await self._device.send_periodic_venting(int(value))
self.coordinator.async_set_updated_data(self._device.state)
|
{
"content_hash": "bc3105aea110041b3cf75bfb3e6505ec",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 87,
"avg_line_length": 33.289855072463766,
"alnum_prop": 0.6861123204179365,
"repo_name": "GenericStudent/home-assistant",
"id": "eecb0b3b8e1bb61e46bb715127bdd46f185e1ccf",
"size": "2297",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/fjaraskupan/number.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
}
|
import email.utils
import traceback
import hashlib
import logging
import json
import requests
from datapackage import DataPackage
from tableschema_sql import Storage
from os_api_cache import get_os_cache
from .callbacks import *
from .model_registry import ModelRegistry
from .config import get_engine
from .fdp_utils import fdp_to_model
from .db_utils import database_name, table_name_for_package
from .row_processor import RowProcessor
def noop(*args, **kw):
pass
class FDPLoader(object):
"""
Utility class for loading FDPs to the DB
"""
def __init__(self, engine=None):
if engine is None:
self.engine = get_engine()
else:
self.engine = engine
self.package = None
self.model = None
self.model_name = None
self.dpo = None
self.datapackage_name = None
self.fullname = None
self.registry = ModelRegistry()
self.last_package_descriptor = None
self.last_loading_success = None
self.callback = noop
def check_hashes(self, resource):
logging.info('Checking hashes of currently loaded data')
current_schema_hash = self.last_package_descriptor\
.get('resources', ({},))[0]\
.get('_schema_hash')
logging.info('Loaded resource descriptor hash is %s', current_schema_hash)
new_schema_hash = dict((k, v)
for k, v in resource.descriptor.items()
if not k.startswith('_'))
new_schema_hash['_model'] = self.dpo.descriptor.get('model', {})
new_schema_hash = json.dumps(new_schema_hash, sort_keys=True, ensure_ascii=True)
new_schema_hash = new_schema_hash.encode('ascii')
new_schema_hash = hashlib.md5(new_schema_hash).hexdigest()
logging.info('Loading resource descriptor hash is %s', new_schema_hash)
current_data_hash = self.last_package_descriptor \
.get('resources', ({},))[0] \
.get('_data_hash')
logging.info('Loaded resource data hash is %s', current_data_hash)
new_data_hash = None
remote_url = resource.source
if remote_url and remote_url.startswith('http'):
response = requests.head(remote_url)
new_data_hash = response.headers.get('etag')
logging.info('Loading resource data hash is %s', new_data_hash)
resource.descriptor['_schema_hash'] = new_schema_hash
resource.descriptor['_data_hash'] = new_data_hash
ret = (current_schema_hash != new_schema_hash) or\
(current_data_hash != new_data_hash) or\
(not self.last_loading_success)
if ret:
logging.info('Looks like stuff changed, loading data')
else:
logging.info('Looks like nothing major changed, skipping data load')
return ret
def status_update(self, **kwargs):
if self.model_name is not None:
try:
_name, _, _package, _model, _dataset, \
_author, _loading_status, _loaded = \
self.registry.get_raw(self.model_name)
if self.last_package_descriptor is None:
self.last_package_descriptor = _package
if self.last_loading_success is None:
self.last_loading_success = _loading_status == STATUS_DONE
except KeyError:
_name = self.model_name
_package = {}
_model = {}
_dataset = ''
_author = ''
_loading_status = None
_loaded = False
self.last_package_descriptor = {}
self.last_loading_success = False
if self.model is not None:
_model = self.model
if self.dpo is not None:
_package = self.dpo.descriptor
if self.datapackage_name is not None:
_dataset = self.datapackage_name
if self.fullname is not None:
_author = self.fullname
status = kwargs.get('status')
if status is not None:
_loading_status = status
_loaded = status == STATUS_DONE
self.registry.save_model(_name, self.package, _package,
_model, _dataset, _author,
_loading_status, _loaded)
self.callback(**kwargs)
def load_fdp_to_db(self, package, callback=noop):
"""
Load an FDP to the database, create a babbage model and save it as well
:param package: URL for the datapackage.json
:param callback: callback to use to send progress updates
"""
self.callback = callback
self.package = package
# Load and validate the datapackage
self.status_update(status=STATUS_LOADING_DATAPACKAGE)
self.dpo = DataPackage(package)
self.status_update(status=STATUS_VALIDATING_DATAPACKAGE)
self.dpo.validate()
self.status_update(status=STATUS_LOADING_RESOURCE)
resource = self.dpo.resources[0]
schema = resource.descriptor['schema']
# Use the cube manager to get the table name
self.datapackage_name = self.dpo.descriptor['name']
datapackage_owner = self.dpo.descriptor['owner']
datapackage_author = self.dpo.descriptor['author']
# Get the full name from the author field, and rewrite it without the email
self.fullname, email_addr = email.utils.parseaddr(datapackage_author)
email_addr = email_addr.split('@')[0] + '@not.shown'
self.dpo.descriptor['author'] = '{0} <{1}>'.format(self.fullname, email_addr)
self.dpo.descriptor.setdefault('private', True)
self.model_name = "{0}:{1}".format(datapackage_owner, self.datapackage_name)
table_name = table_name_for_package(datapackage_owner, self.datapackage_name)
try:
all_fields = set()
field_translation = {}
# Process schema - slugify field names
for field in schema['fields']:
name = database_name(field['name'], all_fields)
all_fields.add(name)
translated_field = {
'name': name,
'type': field['type']
}
field_translation[field['name']] = translated_field
storage_schema = {
'fields': [
{
'type': f['type'],
'name': field_translation[f['name']]['name'],
'format': f.get('format', 'default')
}
for f in schema['fields']
],
# Babbage likes just one primary key
'primaryKey': '_id'
}
# Add Primary key to schema
storage_schema['fields'].insert(0, {
'name': '_id',
'type': 'integer'
})
# Create Babbage Model
self.status_update(status=STATUS_CREATING_BABBAGE_MODEL)
self.model = fdp_to_model(self.dpo, table_name, resource, field_translation)
if self.check_hashes(resource):
# Create indexes
indexes = set()
primary_keys = schema.get('primaryKey', [])
for dim in self.dpo.descriptor.get('model', {}).get('dimensions',{}).values():
attributes = dim.get('attributes', {})
for attribute in attributes.values():
source = attribute.get('source')
if source in primary_keys:
indexes.add((field_translation[source]['name'],))
labelfor = attribute.get('labelfor')
if labelfor is not None:
labelfor = attributes.get(labelfor, {})
labelfor_source = labelfor.get('source')
if labelfor_source in primary_keys:
indexes.add((field_translation[labelfor_source]['name'],
field_translation[source]['name'],))
indexes = list(indexes)
logging.error('INDEXES: %r', indexes)
#
# if dim['label'] in primary_keys:
# key_field = dim['attributes'][dim['key_attribute']]['label']
# key_field = field_translation[key_field]['name']
# indexes.append((key_field,))
#
# label_field = dim['attributes'].get(dim.get('label_attribute'), {}).get('label')
# if label_field is not None:
# label_field = field_translation[label_field]['name']
# if label_field != key_field:
# indexes.append((key_field, label_field))
# Load 1st resource data into DB
# We use the prefix name so that JTS-SQL doesn't load all table data into memory
storage = Storage(self.engine, prefix=table_name)
faux_table_name = ''
if faux_table_name in storage.buckets:
self.status_update(status=STATUS_DELETING_TABLE)
storage.delete(faux_table_name)
self.status_update(status=STATUS_CREATING_TABLE)
indexes_fields = None
if indexes:
indexes_fields = [indexes]
storage.create(faux_table_name, storage_schema, indexes_fields=indexes_fields)
self.status_update(status=STATUS_LOADING_DATA_READY)
row_processor = RowProcessor(resource.iter(keyed=True), self.status_update,
schema, self.dpo.descriptor)
storage.write(faux_table_name, row_processor.iter())
cache = get_os_cache()
if cache is not None:
logging.info('Clearing cache for context=%s', self.model_name)
cache.clear(self.model_name)
response = {
'model_name': self.model_name,
'babbage_model': self.model,
'package': self.dpo.descriptor
}
self.status_update(status=STATUS_DONE, data=response)
except Exception as e:
logging.exception('LOADING FAILED')
self.status_update(status=STATUS_FAIL, error=traceback.format_exc())
return False
return True
|
{
"content_hash": "8922d5ae661b3a9ddb3e9e963bee4fc7",
"timestamp": "",
"source": "github",
"line_count": 261,
"max_line_length": 102,
"avg_line_length": 41.310344827586206,
"alnum_prop": 0.5355221665739195,
"repo_name": "openspending/babbage.fiscal-data-package",
"id": "93b138d32c8fa1b1ca8e3bcb4f759e777439f4ca",
"size": "10782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "babbage_fiscal/loader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "37953"
}
],
"symlink_target": ""
}
|
"""The tests for Arcam FMJ Receiver control device triggers."""
import pytest
from homeassistant.components.arcam_fmj.const import DOMAIN
import homeassistant.components.automation as automation
from homeassistant.components.device_automation import DeviceAutomationType
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
from tests.components.blueprint.conftest import stub_blueprint_populate # noqa: F401
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_triggers(hass, device_reg, entity_reg):
"""Test we get the expected triggers from a arcam_fmj."""
config_entry = MockConfigEntry(domain=DOMAIN, data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers={(DOMAIN, "host", 1234)},
)
entity_reg.async_get_or_create(
"media_player", DOMAIN, "5678", device_id=device_entry.id
)
expected_triggers = [
{
"platform": "device",
"domain": DOMAIN,
"type": "turn_on",
"device_id": device_entry.id,
"entity_id": "media_player.arcam_fmj_5678",
},
]
triggers = await async_get_device_automations(
hass, DeviceAutomationType.TRIGGER, device_entry.id
)
# Test triggers are either arcam_fmj specific or media_player entity triggers
triggers = await async_get_device_automations(
hass, DeviceAutomationType.TRIGGER, device_entry.id
)
for expected_trigger in expected_triggers:
assert expected_trigger in triggers
for trigger in triggers:
assert trigger in expected_triggers or trigger["domain"] == "media_player"
async def test_if_fires_on_turn_on_request(hass, calls, player_setup, state):
"""Test for turn_on and turn_off triggers firing."""
state.get_power.return_value = None
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": player_setup,
"type": "turn_on",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "{{ trigger.entity_id }}",
"id": "{{ trigger.id }}",
},
},
}
]
},
)
await hass.services.async_call(
"media_player",
"turn_on",
{"entity_id": player_setup},
blocking=True,
)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == player_setup
assert calls[0].data["id"] == 0
|
{
"content_hash": "7aa815d7b034a45f4a0f73499adbdbff",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 85,
"avg_line_length": 31.10909090909091,
"alnum_prop": 0.581531268264173,
"repo_name": "rohitranjan1991/home-assistant",
"id": "fe2507e3d8e8d982d25493cc398662894b3f7f8e",
"size": "3422",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/arcam_fmj/test_device_trigger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1051086"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
}
|
from bee import *
from bee.segments import *
import libcontext
from libcontext.socketclasses import *
from libcontext.pluginclasses import *
from Spyder import Coordinate
class setPos(worker):
setPos = antenna("push", "Coordinate")
pos = variable("Coordinate")
connect(setPos, pos)
@modifier
def do_setPos(self):
axis = self.get_matrix().get_proxy("AxisSystem")
axis.origin = Coordinate(self.pos.x, self.pos.y, self.pos.z)
axis.commit()
trigger(pos, do_setPos)
def set_get_matrix(self, function):
self.get_matrix = function
def place(self):
libcontext.socket(("entity", "bound", "matrix"), socket_single_required(self.set_get_matrix))
|
{
"content_hash": "7476da8ae3d811247eeddddcd8cc0d7f",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 101,
"avg_line_length": 27.423076923076923,
"alnum_prop": 0.6760168302945302,
"repo_name": "agoose77/hivesystem",
"id": "c63ea469c737be90cadb385493de4595d3918276",
"size": "713",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dragonfly/scene/bound/setPos.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "2491478"
},
{
"name": "Shell",
"bytes": "1164"
}
],
"symlink_target": ""
}
|
import numpy as np
import Html
class MatrixDiagnosticWebpage:
def __init__(self, solv, lyr, mesh):
self.html = ''
self.solver = solv
self.lyr = lyr
self.mesh = mesh
self.maxWebPageSize = 100
def createWebPage(self):
matrixSize= self.mesh.solveTemperatureNodeCount()
if matrixSize > self.maxWebPageSize:
print "Web page skipped because problem size is " + matrixSize + " which is larger than limit " + self.maxWebPageSize
return
print "Creating web page"
np.set_printoptions(threshold='nan', linewidth=10000)
f= open('result.html', 'w')
self.webpage()
f.write(self.html)
f.close()
def webpage(self):
h = Html.Html()
matrix= ''
rhsStr= ''
xhtml= ''
col= 0
cols= '* '
temperatureStartNode= 0
temperatureEndNode= self.mesh.solveTemperatureNodeCount()
dirichletStartNode= temperatureEndNode
dirichletEndNode= dirichletStartNode + self.mesh.boundaryDirichletNodeCount(self.lyr)
rowType = ''
for n in range(0, self.solver.NumGlobalElements):
nodeType= '?'
if ((n >= temperatureStartNode) and (n < temperatureEndNode)):
nodeType= 'matl'
else:
if ((n >= dirichletStartNode) and (n < dirichletEndNode)):
nodeType = 'diri'
rowType = rowType + "<td>" + nodeType + "</td>"
rowX = ''
for n in range(0, self.solver.NumGlobalElements):
x = self.mesh.getXAtNode(n)
rowX = rowX + "<td>" + str(x) + "</td>"
rowY = ''
for n in range(0, self.solver.NumGlobalElements):
y = self.mesh.getYAtNode(n)
rowY = rowY + "<td>" + str(y) + "</td>"
# Create matrix table
for x in range(0, self.solver.NumGlobalElements):
rhsStr = rhsStr + "<td>" + str("%.3f" % self.solver.bs[x]) + "</td>"
# xhtml = xhtml + "<td>" + str("%.3f" % self.solver.x[x]) + "</td>"
matrix_row = ''
for y in range(0, self.solver.NumGlobalElements):
if self.solver.As[x,y] != 0.0:
elt= str("%.3f" % self.solver.As[x,y])
else:
elt= '.'
matrix_row = matrix_row + "<td>" + elt + "</td>"
matrix= matrix + "<tr>" + matrix_row + "</tr>"
cols = cols + "<td>" + str(col) + "</td>"
col = col + 1
matrix = "<table>" + matrix + "</table>"
mostCommon= self.solver.nonzeroMostCommonCount()
# Create vector table
vectors = "<tr><td><b>col</b></td>" + cols + "</tr>"
vectors = vectors + "<tr><td><b>X</b></td>" + rowX + "</tr>"
vectors = vectors + "<tr><td><b>Y</b></td>" + rowY + "</tr>"
vectors = vectors + "<tr><td><b>Type</b></td>" + rowType + "</tr>"
vectors = vectors + "<tr><td><b>rhs</b></td>" + rhsStr + "</tr>"
vectors = vectors + "<tr><td><b>lhs</b></td>" + xhtml + "</tr>"
vectors = "<table>" + vectors + "</table>"
# Counts
counts = "<tr><td>BodyNodeCount</td><td>" + str(self.solver.BodyNodeCount) + "</td></tr>"
counts += "<tr><td>TopEdgeNodeCount</td><td>" + str(self.solver.TopEdgeNodeCount) + "</td></tr>"
counts += "<tr><td>RightEdgeNodeCount</td><td>" + str(self.solver.RightEdgeNodeCount) + "</td></tr>"
counts += "<tr><td>BottomEdgeNodeCount</td><td>" + str(self.solver.BottomEdgeNodeCount) + "</td></tr>"
counts += "<tr><td>LeftEdgeNodeCount</td><td>" + str(self.solver.LeftEdgeNodeCount) + "</td></tr>"
counts += "<tr><td>TopLeftCornerNodeCount</td><td>" + str(self.solver.TopLeftCornerNodeCount) + "</td></tr>"
counts += "<tr><td>TopRightCornerNodeCount</td><td>" + str(self.solver.TopRightCornerNodeCount) + "</td></tr>"
counts += "<tr><td>BottomRightCornerNodeCount</td><td>" + str(self.solver.BottomRightCornerNodeCount) + "</td></tr>"
counts += "<tr><td>BoundaryNodeCount</td><td>" + str(self.solver.BoundaryNodeCount) + "</td></tr>"
counts += "<tr><td>Total NodeCount</td><td>" + str(self.solver.totalNodeCount()) + "</td></tr>"
counts += "<tr><td>Matrix Size</td><td>" + str(self.solver.NumGlobalElements) + "</td></tr>"
counts += "Number of independent nodes in G matrix= " + str(self.mesh.nodeGcount) + "<br/>"
counts += "Number of independent nodes in GF matrix= " + str(self.mesh.nodeGFcount) + "<br/>"
counts += "Number of independent nodes in GB matrix= " + str(self.mesh.nodeGBcount) + "<br/>"
counts += "Number of independent nodes in D matrix= " + str(self.mesh.nodeDcount) + "<br/>"
counts += "Total number of independent nodes= " + str(self.mesh.nodeCount) + "<br/>"
counts += "Most Common number of nonzero matrix entries per row= " + str(mostCommon) + "<br/>"
counts = "<table>" + counts + "</table>"
# Description
descr = """
A matrix is in sections:
<table border='2'>
<tr>
<td>GF</td><tr><td>GB</td><td>0</td>
</tr>
</td>
<td>D</td>
</tr>
<tr><td>D^T</td><td>0</td></tr>
</table>
<table>
<tr><td>G</td><td>Transconductance matrix</td>
<td>The number of rows in G is self.nodeGcount .
The first set of rows GF is for the square mesh elements.
The second set of rows GB is for the boundary condition voltage source nodes.
</td>
</tr>
<tr><td>B</td><td>Voltage sources</td>
<td>The number of rows is the number of boundary condition mesh cells.
</td>
</tr>
<tr><td>D^T</td><td>D Transpose</td><td></td></tr>
<tr><td>C</td><td>Zeros</td><td></td></tr>
</table>
<pre>
G is in two sections, which are the upper left GF (for field) and GB (for boundary)
The analysis is of the form Ax = b
For rows in b corresponding to G,
b is the known value of the current (constant power in thermal circuits) sources
For rows in b corresponding to D, (constant temperature boundary conditions)
b is the known value of temperature at the boundary.
The number of rows in D is self.nodeDcount
The number of rows in G is self.nodeGcount
The number of rows in GF is self.nodeGFcount
The number of rows in GB is self.nodeGBcount
The total number of rows in A is self.nodeCount
The solution to the matrix is the vector x
For rows in x corresponding to G, these are voltages (temperature)
For rows in x corresponding to D, these are currents (power flow) in the boundary condition.
For energy balance in steady state, the current into the constant-temperature boundary condition
must equal the current from the constant-power thermal sources.
The index of the last nodes in the G submatrix for the field plus one is the number
of nodes in the field GF. Add the boundary nodes GB to G.
Also count the number of boundary sources, which is the size of the D matrix.
</pre>
"""
# Create web page
head = h.title("Matrix output")
body = h.h1("Ax = b")
body += h.h3("A Matrix")
body += h.pre(matrix)
body += h.h3("Vectors")
body += h.pre(vectors)
body += h.h3("Counts")
body += h.pre(counts) + descr
self.html= h.html(h.head(head) + h.body(body))
|
{
"content_hash": "11b0f5cd6650a587f915f4b74513e2b2",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 123,
"avg_line_length": 41.3859649122807,
"alnum_prop": 0.6020912816165042,
"repo_name": "tomacorp/thermapythia",
"id": "34705dc10ed2eee8c20e29e3422d501fa98909fa",
"size": "7077",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "thermpy/MatrixDiagnostic.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "5464"
},
{
"name": "JavaScript",
"bytes": "9020"
},
{
"name": "Python",
"bytes": "403634"
},
{
"name": "Shell",
"bytes": "5098"
}
],
"symlink_target": ""
}
|
from flask import render_template
class DreamCtl:
def __init__(self):
pass
@staticmethod
def index():
return render_template('dream/index.html')
|
{
"content_hash": "410b5ac993e5eff1bfb16862da08b6c0",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 50,
"avg_line_length": 16.09090909090909,
"alnum_prop": 0.6271186440677966,
"repo_name": "jhbez/ProjectV",
"id": "f76a7f8ac59c5f7a1b5c032f89fd063566d89ec0",
"size": "800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/v/dream/controller/dreamCtl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "380467"
},
{
"name": "HTML",
"bytes": "117159"
},
{
"name": "JavaScript",
"bytes": "275035"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "PLpgSQL",
"bytes": "13603"
},
{
"name": "Python",
"bytes": "201373"
}
],
"symlink_target": ""
}
|
"""Signals that the given finalized deal is ready to serve.
By default, deals are set ready to serve as soon as they're finalized. For
programmatic guaranteed deals, bidders can opt out of this feature by asking
their account manager. This is recommended for programmatic guaranteed deals in
order to ensure that bidders have creatives prepared to be used in placing bids
once the deal is serving. Use finalizedDeals.addCreative to associate creatives
with a programmatic guaranteed deal.
"""
import argparse
import os
import pprint
import sys
sys.path.insert(0, os.path.abspath('../../..'))
from googleapiclient.errors import HttpError
import util
_FINALIZED_DEALS_NAME_TEMPLATE = 'buyers/%s/finalizedDeals/%s'
DEFAULT_BUYER_RESOURCE_ID = 'ENTER_BUYER_RESOURCE_ID_HERE'
DEFAULT_FINALIZED_DEAL_RESOURCE_ID = 'ENTER_DEAL_RESOURCE_ID_HERE'
def main(marketplace, args):
finalized_deal_name = _FINALIZED_DEALS_NAME_TEMPLATE % (
args.account_id, args.deal_id)
print(f'Setting finalized deal with name "{finalized_deal_name}" as ready '
'to serve:')
try:
# Construct and execute the request.
response = marketplace.buyers().finalizedDeals().setReadyToServe(
deal=finalized_deal_name).execute()
except HttpError as e:
print(e)
sys.exit(1)
pprint.pprint(response)
if __name__ == '__main__':
try:
service = util.get_service(version='v1')
except IOError as ex:
print(f'Unable to create marketplace service - {ex}')
print('Did you specify the key file in util.py?')
sys.exit(1)
parser = argparse.ArgumentParser(
description='Signals that a finalized deal is ready to serve.')
# Required fields.
parser.add_argument(
'-a', '--account_id', default=DEFAULT_BUYER_RESOURCE_ID,
help=('The resource ID of the buyers resource for which the finalized '
'deal is being set ready to serve. This will be used to '
'construct the name used as a path parameter for the '
'finalizedDeals.setReadyToServe request.'))
parser.add_argument(
'-d', '--deal_id', default=DEFAULT_FINALIZED_DEAL_RESOURCE_ID,
help=('The resource ID of the buyers.finalizedDeals resource that is '
'being set ready to serve. This will be used to construct the '
'name used as a path parameter for the '
'finalizedDeals.setReadyToServe request.'))
main(service, parser.parse_args())
|
{
"content_hash": "98e23aecf3079cd0d60dd4babe4b894d",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 79,
"avg_line_length": 35.394366197183096,
"alnum_prop": 0.6812574612017509,
"repo_name": "googleads/authorized-buyers-marketplace-api-samples",
"id": "65795fb4841b6a9727b8eaadf0780a28eb73fac1",
"size": "3131",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "python/v1/buyers/finalized_deals/set_ready_to_serve_finalized_deals.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "317860"
},
{
"name": "CSS",
"bytes": "637"
},
{
"name": "Java",
"bytes": "231642"
},
{
"name": "PHP",
"bytes": "169281"
},
{
"name": "Python",
"bytes": "148287"
},
{
"name": "Ruby",
"bytes": "166333"
}
],
"symlink_target": ""
}
|
import asyncio
import datetime
import sqlite3
import unittest
import uuid
from cloudhands.burst.agent import message_handler
from cloudhands.burst.membership import AcceptedAgent
import cloudhands.common
from cloudhands.common.connectors import initialise
from cloudhands.common.connectors import Registry
from cloudhands.common.schema import Appliance
from cloudhands.common.schema import CatalogueChoice
from cloudhands.common.schema import CatalogueItem
from cloudhands.common.schema import Component
from cloudhands.common.schema import IPAddress
from cloudhands.common.schema import Label
from cloudhands.common.schema import Membership
from cloudhands.common.schema import NATRouting
from cloudhands.common.schema import Node
from cloudhands.common.schema import Organisation
from cloudhands.common.schema import Provider
from cloudhands.common.schema import ProviderReport
from cloudhands.common.schema import Registration
from cloudhands.common.schema import SoftwareDefinedNetwork
from cloudhands.common.schema import State
from cloudhands.common.schema import Touch
from cloudhands.common.schema import User
from cloudhands.common.states import MembershipState
from cloudhands.common.states import RegistrationState
class AgentTesting(unittest.TestCase):
def setUp(self):
""" Populate test database"""
self.session = Registry().connect(sqlite3, ":memory:").session
initialise(self.session)
self.session.add_all((
Organisation(
uuid=uuid.uuid4().hex,
name="TestOrg"),
Provider(
uuid=uuid.uuid4().hex,
name="cloudhands.jasmin.vcloud.phase04.cfg"),
User(handle="testuser", uuid=uuid.uuid4().hex),
))
self.session.commit()
org, user = (
self.session.query(Organisation).one(),
self.session.query(User).one(),
)
self.session.add(
Membership(
uuid=uuid.uuid4().hex,
model=cloudhands.common.__version__,
organisation=org,
role="user")
)
self.session.commit()
self.mship = self.session.query(Membership).one()
accepted = self.session.query(
MembershipState).filter(
MembershipState.name == "accepted").one()
now = datetime.datetime.utcnow()
act = Touch(artifact=self.mship, actor=user, state=accepted, at=now)
self.session.add(act)
self.session.commit()
def tearDown(self):
""" Every test gets its own in-memory database """
r = Registry()
r.disconnect(sqlite3, ":memory:")
class AcceptedAgentTesting(AgentTesting):
def test_handler_registration(self):
q = asyncio.Queue()
agent = AcceptedAgent(q, args=None, config=None)
for typ, handler in agent.callbacks:
message_handler.register(typ, handler)
self.assertEqual(
agent.touch_to_active,
message_handler.dispatch(AcceptedAgent.MembershipActivated)
)
self.assertEqual(
agent.touch_to_previous,
message_handler.dispatch(AcceptedAgent.MembershipNotActivated)
)
def test_job_query_and_transmit_needs_registration(self):
q = AcceptedAgent.queue(None, None, loop=None)
agent = AcceptedAgent(q, args=None, config=None)
jobs = agent.jobs(self.session)
self.assertEqual(0, len(jobs))
def test_job_query_and_transmit(self):
reg = Registration(
uuid=uuid.uuid4().hex,
model=cloudhands.common.__version__)
user = self.session.query(User).one()
valid = self.session.query(
RegistrationState).filter(
RegistrationState.name=="pre_user_ldappublickey").one()
now = datetime.datetime.utcnow()
act = Touch(artifact=reg, actor=user, state=valid, at=now)
self.session.add_all((reg, act))
self.session.commit()
q = AcceptedAgent.queue(None, None, loop=None)
agent = AcceptedAgent(q, args=None, config=None)
jobs = agent.jobs(self.session)
self.assertEqual(1, len(jobs))
q.put_nowait(jobs[0])
self.assertEqual(1, q.qsize())
job = q.get_nowait()
self.assertEqual(1, len(job.artifact.changes))
def test_queue_creation(self):
self.assertIsInstance(
AcceptedAgent.queue(None, None, loop=None),
asyncio.Queue
)
def test_msg_dispatch_and_touch_to_active(self):
mship = self.session.query(Membership).one()
active = self.session.query(MembershipState).filter(
MembershipState.name == "active").one()
q = AcceptedAgent.queue(None, None, loop=None)
agent = AcceptedAgent(q, args=None, config=None)
for typ, handler in agent.callbacks:
message_handler.register(typ, handler)
msg = AcceptedAgent.MembershipActivated(
mship.uuid, datetime.datetime.utcnow(),
"cloudhands.jasmin.vcloud.phase04.cfg")
rv = message_handler(msg, self.session)
self.assertIsInstance(rv, Touch)
self.assertIs(rv.state, active)
def test_msg_dispatch_and_touch_to_previous(self):
mship = self.session.query(Membership).one()
accepted = self.session.query(
MembershipState).filter(
MembershipState.name == "accepted").one()
q = AcceptedAgent.queue(None, None, loop=None)
agent = AcceptedAgent(q, args=None, config=None)
for typ, handler in agent.callbacks:
message_handler.register(typ, handler)
msg = AcceptedAgent.MembershipNotActivated(
mship.uuid, datetime.datetime.utcnow(),
"cloudhands.jasmin.vcloud.phase04.cfg")
rv = message_handler(msg, self.session)
self.assertIsInstance(rv, Touch)
self.assertIs(rv.state, accepted)
|
{
"content_hash": "4b3d30f0948534197042b87e54df2ef0",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 76,
"avg_line_length": 36.21212121212121,
"alnum_prop": 0.6532217573221757,
"repo_name": "cedadev/cloudhands-burst",
"id": "75deba414460f6c764a9329603ead416c6eaf4bd",
"size": "6016",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudhands/burst/test/test_membership.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "253320"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from django.core.exceptions import ValidationError
from lists.models import Item, List
class ItemModelTest(TestCase):
def test_default_text(self):
item = Item()
self.assertEqual(item.text, '')
def test_item_is_related_to_list(self):
list_ = List.objects.create()
item = Item()
item.list = list_
item.save()
self.assertIn(item, list_.item_set.all())
def test_cannot_save_empty_list_items(self):
list_ = List.objects.create()
item = Item(list=list_, text='')
with self.assertRaises(ValidationError):
item.save()
item.full_clean()
class ListModelTest(TestCase):
def test_get_absolute_url(self):
list_ = List.objects.create()
self.assertEqual(list_.get_absolute_url(), '/lists/%d/' % (list_.id,))
def test_duplicate_items_are_invalid(self):
list_ = List.objects.create()
Item.objects.create(list=list_, text='bla')
with self.assertRaises(ValidationError):
item = Item(list=list_, text='bla')
item.full_clean()
def test_CAN_save_same_item_to_different_lists(self):
list1 = List.objects.create()
list2 = List.objects.create()
Item.objects.create(list=list1, text='bla')
item = Item(list=list2, text='bla')
item.full_clean() # should not raise
def test_list_ordering(self):
list1 = List.objects.create()
item1 = Item.objects.create(list=list1, text='i1')
item2 = Item.objects.create(list=list1, text='item 2')
item3 = Item.objects.create(list=list1, text='3')
self.assertEqual(
list(Item.objects.all()),
[item1, item2, item3]
)
def test_string_representation(self):
item = Item(text='some text')
self.assertEqual(str(item), 'some text')
|
{
"content_hash": "3ea0a687b14ab6984782cbb5afaac48d",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 78,
"avg_line_length": 32.8448275862069,
"alnum_prop": 0.6062992125984252,
"repo_name": "rocity/the-testing-goat",
"id": "c519967de8a096cf6aa8a3851135805cfc5f96ca",
"size": "1905",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "superlists/lists/tests/test_models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "33"
},
{
"name": "HTML",
"bytes": "2068"
},
{
"name": "Python",
"bytes": "28058"
}
],
"symlink_target": ""
}
|
from nova.tests.image import fake
from nova.tests.integrated.v3 import api_sample_base
class ServersSampleBase(api_sample_base.ApiSampleTestBaseV3):
def _post_server(self):
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
'glance_host': self._get_glance_host()
}
response = self._do_post('servers', 'server-post-req', subs)
subs = self._get_regexes()
return self._verify_response('server-post-resp', subs, response, 202)
class ServersSampleJsonTest(ServersSampleBase):
sample_dir = 'servers'
def test_servers_post(self):
return self._post_server()
def test_servers_get(self):
uuid = self.test_servers_post()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['hypervisor_hostname'] = r'[\w\.\-]+'
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
self._verify_response('server-get-resp', subs, response, 200)
def test_servers_list(self):
uuid = self._post_server()
response = self._do_get('servers')
subs = self._get_regexes()
subs['id'] = uuid
self._verify_response('servers-list-resp', subs, response, 200)
def test_servers_details(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['hypervisor_hostname'] = r'[\w\.\-]+'
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
self._verify_response('servers-details-resp', subs, response, 200)
class ServersSampleXmlTest(ServersSampleJsonTest):
ctype = 'xml'
class ServersSampleAllExtensionJsonTest(ServersSampleJsonTest):
all_extensions = True
class ServersSampleAllExtensionXmlTest(ServersSampleXmlTest):
all_extensions = True
class ServersActionsJsonTest(ServersSampleBase):
sample_dir = 'servers'
def _test_server_action(self, uuid, action,
subs={}, resp_tpl=None, code=202):
subs.update({'action': action,
'glance_host': self._get_glance_host()})
response = self._do_post('servers/%s/action' % uuid,
'server-action-%s' % action.replace('_',
'-'),
subs)
if resp_tpl:
subs.update(self._get_regexes())
self._verify_response(resp_tpl, subs, response, code)
else:
self.assertEqual(response.status, code)
self.assertEqual(response.read(), "")
def test_server_resize(self):
self.flags(allow_resize_to_same_host=True)
uuid = self._post_server()
self._test_server_action(uuid, "resize",
{"id": 2,
"host": self._get_host()})
return uuid
def test_server_confirm_resize(self):
uuid = self.test_server_resize()
self._test_server_action(uuid, "confirm_resize")
class ServersActionsXmlTest(ServersActionsJsonTest):
ctype = 'xml'
|
{
"content_hash": "5a5724ccc7ddb3a3e7d082e752d8e26a",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 77,
"avg_line_length": 34.51578947368421,
"alnum_prop": 0.5611466910643489,
"repo_name": "imsplitbit/nova",
"id": "d807490cba8d1275f5cd949b98c0778e74ee42dc",
"size": "3955",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/integrated/v3/test_servers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13518591"
},
{
"name": "Shell",
"bytes": "16950"
}
],
"symlink_target": ""
}
|
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
import pymc3 as pm
from scipy.stats import mode, chisquare
from sklearn.metrics import confusion_matrix, accuracy_score
import lasagne
from utils import load_dataset
import theano
from utils import run_advi
sns.set_style('white')
sns.set_context('talk')
"""
In this file we build the Bayesian Convolutional NN to be trained on the MNIST data set
"""
class GaussWeights(object):
"""
The priors act as regularizers here to try and keep the weights of the ANN small.
It’s mathematically equivalent to putting a L2 loss term that penalizes large weights into the objective function
"""
def __init__(self):
self.count = 0
def __call__(self, shape):
self.count += 1
return pm.Normal('w%d' % self.count, mu=0, sd=.1,
testval=np.random.normal(size=shape).astype(np.float64),
shape=shape)
def build_ann_conv(init):
"""
Build the Convolutional neural net to be trained with the ADVI algorithm
:param init: can pass in a function init which has to return a Theano expression
to be used as the weight and bias matrices
:return:
"""
network = lasagne.layers.InputLayer(shape=(None, 1, 28, 28),
input_var=input_var)
network = lasagne.layers.Conv2DLayer(
network, num_filters=32, filter_size=(5, 5),
nonlinearity=lasagne.nonlinearities.tanh,
W=init)
# Max-pooling layer of factor 2 in both dimensions:
network = lasagne.layers.MaxPool2DLayer(network, pool_size=(2, 2))
# Another convolution with 32 5x5 kernels, and another 2x2 pooling:
network = lasagne.layers.Conv2DLayer(
network, num_filters=32, filter_size=(5, 5),
nonlinearity=lasagne.nonlinearities.tanh,
W=init)
network = lasagne.layers.MaxPool2DLayer(network,
pool_size=(2, 2))
n_hid2 = 256
network = lasagne.layers.DenseLayer(
network, num_units=n_hid2,
nonlinearity=lasagne.nonlinearities.tanh,
b=init,
W=init
)
# Finally, we'll add the fully-connected output layer, of 10 softmax units:
network = lasagne.layers.DenseLayer(
network, num_units=10,
nonlinearity=lasagne.nonlinearities.softmax,
b=init,
W=init
)
prediction = lasagne.layers.get_output(network)
return pm.Categorical('out',
prediction,
observed=target_var)
if __name__ == "__main__":
print("Loading data...")
X_train, y_train, X_val, y_val, X_test, y_test = load_dataset()
# Building a theano.shared variable
input_var = theano.shared(X_train[:500, ...].astype(np.float64))
target_var = theano.shared(y_train[:500, ...].astype(np.float64))
with pm.Model() as neural_network_conv:
likelihood = build_ann_conv(GaussWeights())
v_params, trace, ppc, y_pred = run_advi(likelihood, X_train, y_train, input_var, X_test, y_test, target_var)
print('Accuracy on test data = {}%'.format(accuracy_score(y_test, y_pred) * 100))
miss_class = np.where(y_test != y_pred)[0]
corr_class = np.where(y_test == y_pred)[0]
preds = pd.DataFrame(ppc['out']).T
chis = preds.apply(lambda x: chisquare(x).statistic, axis='columns')
sns.distplot(chis.loc[miss_class].dropna(), label='Error')
sns.distplot(chis.loc[corr_class].dropna(), label='Correct')
plt.legend()
sns.despine()
plt.xlabel('Chi-Square statistic')
plt.show()
|
{
"content_hash": "236f415f6592e2d1e0b1c041ab7246fe",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 117,
"avg_line_length": 32.203539823008846,
"alnum_prop": 0.6361637812585875,
"repo_name": "guilherme-pombo/Bayesian-Deep-Learning",
"id": "e8cc675c3b0cd1b6cd84d200f0fe3fecadd5e314",
"size": "3641",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/bayesian_cnn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18036"
}
],
"symlink_target": ""
}
|
"""Tests for tensorflow.python.ops.op_def_library."""
from absl.testing import parameterized
import numpy as np
from google.protobuf import text_format
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import tensor_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.framework import _op_def_util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class OpDefUtilTest(test_util.TensorFlowTestCase, parameterized.TestCase):
@parameterized.parameters([
("any", "Foo", "Foo"),
("any", 12, 12),
("any", {2: 3}, {2: 3}),
("string", "Foo", "Foo"),
("string", b"Foo", b"Foo"),
("int", 12, 12),
("int", 12.3, 12),
("float", 12, 12.0),
("float", 12.3, 12.3),
("bool", True, True),
("shape", tensor_shape.TensorShape([3]), tensor_shape.TensorShape([3])),
("shape", [3], tensor_shape.TensorShape([3])),
("type", dtypes.int32, dtypes.int32),
("type", np.int32, dtypes.int32),
("type", "int32", dtypes.int32),
("tensor", tensor_pb2.TensorProto(dtype=types_pb2.DataType.DT_FLOAT),
tensor_pb2.TensorProto(dtype=types_pb2.DataType.DT_FLOAT)),
("tensor", "dtype: DT_FLOAT",
tensor_pb2.TensorProto(dtype=types_pb2.DataType.DT_FLOAT)),
("list(any)", [1, "foo", 7.3, dtypes.int32],
[1, "foo", 7.3, dtypes.int32]),
("list(any)", (1, "foo"), [1, "foo"]),
("list(string)", ["foo", "bar"], ["foo", "bar"]),
("list(string)", ("foo", "bar"), ["foo", "bar"]),
("list(string)", iter("abcd"), ["a", "b", "c", "d"]),
("list(int)", (1, 2.3), [1, 2]),
("list(float)", (1, 2.3), [1.0, 2.3]),
("list(bool)", [True, False], [True, False]),
]) # pyformat: disable
def testConvert(self, attr_type, value, expected):
result = _op_def_util.ConvertPyObjectToAttributeType(value, attr_type)
# Check that we get the expected value(s).
self.assertEqual(expected, result)
# Check that we get the expected type(s).
self.assertEqual(type(expected), type(result))
if isinstance(result, list):
for expected_item, result_item in zip(expected, result):
self.assertEqual(type(expected_item), type(result_item))
@parameterized.parameters([
("string", 12),
("int", "foo"),
("float", "foo"),
("bool", 1),
("dtype", None),
("shape", 12.0),
("tensor", [1, 2, 3]),
("list(any)", 12),
("list(int)", [1, "two"]),
("list(string)", [1, "two"]),
("tensor", "string that is not a text-formatted TensorProto"),
])
def testConvertError(self, attr_type, value):
with self.assertRaisesRegex(TypeError, "Failed to convert value"):
_op_def_util.ConvertPyObjectToAttributeType(value, attr_type)
# Test AttrValueToPyObject(). Note: this test also exercises the code in
# DataTypeToPyObject() and TensorShapeToPyObject(), since those are used
# when the AttrValue contains a DataType or TensorShape.
@parameterized.parameters([
("s: 'foo'", "foo"),
("i: 5", 5),
("f: 8", 8.0),
("b: True", True),
("type: DT_INT32", dtypes.int32),
("shape { dim: [{size: 3}, {size: 4}] }",
tensor_shape.TensorShape([3, 4])),
("list { }", []),
("list { s: [] }", []),
("list { s: ['a', 'b', 'c'] }", ["a", "b", "c"]),
("list { i: [1, 2, 3] }", [1, 2, 3]),
("list { f: [2.0, 4.0] }", [2.0, 4.0]),
]) # pyformat: disable
def testAttrValueToPyObject(self, pbtxt, expected):
proto = attr_value_pb2.AttrValue()
text_format.Parse(pbtxt, proto)
result = _op_def_util.SerializedAttrValueToPyObject(
proto.SerializeToString())
self.assertEqual(expected, result)
@parameterized.parameters([
"", # Empty value (oneof not set)
"tensor {}", # 'TensorProto' not supported (yet).
"func {}", # 'func' not supported.
"placeholder: ''", # 'placeholder' not supported.
"list { tensor [{}] }", # 'TensorProto' not supported (yet).
"list { func [{}] }", # 'func' not supported.
]) # pyformat: disable
def testAttrValueToPyObjectError(self, pbtxt):
proto = attr_value_pb2.AttrValue()
text_format.Parse(pbtxt, proto)
with self.assertRaises((TypeError, ValueError)):
_op_def_util.SerializedAttrValueToPyObject(proto.SerializeToString())
if __name__ == "__main__":
googletest.main()
|
{
"content_hash": "2c2838c5de619d2308cef7c7acf3bb64",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 78,
"avg_line_length": 38.725,
"alnum_prop": 0.5879061760275447,
"repo_name": "Intel-Corporation/tensorflow",
"id": "84be0a37f907d271f583da62540467f697e73fac",
"size": "5336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/framework/op_def_util_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7481"
},
{
"name": "C",
"bytes": "183416"
},
{
"name": "C++",
"bytes": "24549804"
},
{
"name": "CMake",
"bytes": "160888"
},
{
"name": "Go",
"bytes": "849081"
},
{
"name": "HTML",
"bytes": "681293"
},
{
"name": "Java",
"bytes": "307123"
},
{
"name": "Jupyter Notebook",
"bytes": "1833659"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37393"
},
{
"name": "Objective-C",
"bytes": "7037"
},
{
"name": "Objective-C++",
"bytes": "64142"
},
{
"name": "Protocol Buffer",
"bytes": "218430"
},
{
"name": "Python",
"bytes": "21875003"
},
{
"name": "Shell",
"bytes": "337846"
},
{
"name": "TypeScript",
"bytes": "849555"
}
],
"symlink_target": ""
}
|
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.25
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1UserInfo(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'extra': 'dict(str, list[str])',
'groups': 'list[str]',
'uid': 'str',
'username': 'str'
}
attribute_map = {
'extra': 'extra',
'groups': 'groups',
'uid': 'uid',
'username': 'username'
}
def __init__(self, extra=None, groups=None, uid=None, username=None, local_vars_configuration=None): # noqa: E501
"""V1UserInfo - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._extra = None
self._groups = None
self._uid = None
self._username = None
self.discriminator = None
if extra is not None:
self.extra = extra
if groups is not None:
self.groups = groups
if uid is not None:
self.uid = uid
if username is not None:
self.username = username
@property
def extra(self):
"""Gets the extra of this V1UserInfo. # noqa: E501
Any additional information provided by the authenticator. # noqa: E501
:return: The extra of this V1UserInfo. # noqa: E501
:rtype: dict(str, list[str])
"""
return self._extra
@extra.setter
def extra(self, extra):
"""Sets the extra of this V1UserInfo.
Any additional information provided by the authenticator. # noqa: E501
:param extra: The extra of this V1UserInfo. # noqa: E501
:type: dict(str, list[str])
"""
self._extra = extra
@property
def groups(self):
"""Gets the groups of this V1UserInfo. # noqa: E501
The names of groups this user is a part of. # noqa: E501
:return: The groups of this V1UserInfo. # noqa: E501
:rtype: list[str]
"""
return self._groups
@groups.setter
def groups(self, groups):
"""Sets the groups of this V1UserInfo.
The names of groups this user is a part of. # noqa: E501
:param groups: The groups of this V1UserInfo. # noqa: E501
:type: list[str]
"""
self._groups = groups
@property
def uid(self):
"""Gets the uid of this V1UserInfo. # noqa: E501
A unique value that identifies this user across time. If this user is deleted and another user by the same name is added, they will have different UIDs. # noqa: E501
:return: The uid of this V1UserInfo. # noqa: E501
:rtype: str
"""
return self._uid
@uid.setter
def uid(self, uid):
"""Sets the uid of this V1UserInfo.
A unique value that identifies this user across time. If this user is deleted and another user by the same name is added, they will have different UIDs. # noqa: E501
:param uid: The uid of this V1UserInfo. # noqa: E501
:type: str
"""
self._uid = uid
@property
def username(self):
"""Gets the username of this V1UserInfo. # noqa: E501
The name that uniquely identifies this user among all active users. # noqa: E501
:return: The username of this V1UserInfo. # noqa: E501
:rtype: str
"""
return self._username
@username.setter
def username(self, username):
"""Sets the username of this V1UserInfo.
The name that uniquely identifies this user among all active users. # noqa: E501
:param username: The username of this V1UserInfo. # noqa: E501
:type: str
"""
self._username = username
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1UserInfo):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1UserInfo):
return True
return self.to_dict() != other.to_dict()
|
{
"content_hash": "4418127dc5644ab3ba77f87fd2741be1",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 174,
"avg_line_length": 29.181372549019606,
"alnum_prop": 0.5696287586091047,
"repo_name": "kubernetes-client/python",
"id": "5ca1277c96abe0a11d807eb425300736a5fb34f0",
"size": "5970",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v1_user_info.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "356"
},
{
"name": "Python",
"bytes": "11454299"
},
{
"name": "Shell",
"bytes": "43108"
}
],
"symlink_target": ""
}
|
import sys
from optparse import OptionParser
from cbagent.collectors.active_tasks import ActiveTasks
from cbagent.collectors.iostat import IO
from cbagent.collectors.latency import Latency
from cbagent.collectors.observe import ObserveLatency
from cbagent.collectors.net import Net
from cbagent.collectors.ns_server import NSServer
from cbagent.collectors.secondary_stats import SecondaryStats
from cbagent.collectors.secondary_debugstats import SecondaryDebugStats
from cbagent.collectors.secondary_latency import SecondaryLatencyStats
from cbagent.collectors.n1ql_stats import N1QLStats
from cbagent.collectors.ps import PS
from cbagent.collectors.typeperf import TypePerf
from cbagent.collectors.sync_gateway import SyncGateway
from cbagent.collectors.xdcr_lag import XdcrLag
from cbagent.settings import Settings
def main():
parser = OptionParser(prog="cbagent")
parser.add_option("--at", action="store_true", dest="active_tasks",
help="Active tasks")
parser.add_option("--io", action="store_true", dest="iostat",
help="iostat")
parser.add_option("--l", action="store_true", dest="latency",
help="Latency")
parser.add_option("--o", action="store_true", dest="observe",
help="Observe latency")
parser.add_option("--n", action="store_true", dest="net",
help="Net")
parser.add_option("--ns", action="store_true", dest="ns_server",
help="ns_server")
parser.add_option("--secondary", action="store_true", dest="secondary_stats",
help="secondary_stats")
parser.add_option("--secondarylatency", action="store_true", dest="secondary_latency",
help="secondary_latency")
parser.add_option("--secondarydebugstats", action="store_true", dest="secondary_debugstats",
help="secondary_debugstats")
parser.add_option("--n1ql", action="store_true", dest="n1ql_stats",
help="n1ql_stats")
parser.add_option("--ps", action="store_true", dest="ps",
help="ps CPU, RSS and VSIZE")
parser.add_option("--sg", action="store_true", dest="sync_gateway",
help="Sync Gateway")
parser.add_option("--x", action="store_true", dest="xdcr_lag",
help="XDCR lag")
options, args = parser.parse_args()
if not args:
sys.exit("No configuration provided")
if options.active_tasks:
collector = ActiveTasks
elif options.iostat:
collector = IO
elif options.latency:
collector = Latency
elif options.observe:
collector = ObserveLatency
elif options.net:
collector = Net
elif options.ns_server:
collector = NSServer
elif options.secondary_stats:
collector = SecondaryStats
elif options.n1ql_stats:
collector = N1QLStats
elif options.ps:
collector = PS
elif options.typeperf:
collector = TypePerf
elif options.sync_gateway:
collector = SyncGateway
elif options.xdcr_lag:
collector = XdcrLag
else:
sys.exit("No collector selected")
settings = Settings()
settings.read_cfg(args[0])
collector = collector(settings)
collector.update_metadata()
collector.collect()
if __name__ == '__main__':
main()
|
{
"content_hash": "d0fc190dece9144778ee1211547a71cc",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 96,
"avg_line_length": 37.34065934065934,
"alnum_prop": 0.6486168334314303,
"repo_name": "EricACooper/perfrunner",
"id": "3c21f59d67548a8e22bde9a7210300558f7a1988",
"size": "3398",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "cbagent/__main__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1262"
},
{
"name": "Go",
"bytes": "3718"
},
{
"name": "Inno Setup",
"bytes": "20478"
},
{
"name": "Makefile",
"bytes": "518"
},
{
"name": "Python",
"bytes": "837748"
},
{
"name": "Shell",
"bytes": "13179"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.